aditya-me13 commited on
Commit
610152e
ยท
1 Parent(s): 39ab179

integrated both the versions

Browse files
.gitignore CHANGED
@@ -11,5 +11,6 @@ static/
11
  plots/
12
  uploads/
13
  downloads/
 
14
 
15
 
 
11
  plots/
12
  uploads/
13
  downloads/
14
+ predictions/
15
 
16
 
app.py CHANGED
@@ -4,6 +4,8 @@ import os
4
  import json
5
  import traceback
6
  from pathlib import Path
 
 
7
 
8
  from datetime import datetime, timedelta
9
  from werkzeug.utils import secure_filename
@@ -16,6 +18,15 @@ from interactive_plot_generator import InteractiveIndiaMapPlotter
16
  from cams_downloader import CAMSDownloader
17
  from constants import ALLOWED_EXTENSIONS, MAX_FILE_SIZE, COLOR_THEMES
18
 
 
 
 
 
 
 
 
 
 
19
  app = Flask(__name__)
20
  app.secret_key = 'your-secret-key-change-this-in-production' # Change this!
21
  app.config['DEBUG'] = False # Explicitly disable debug mode
@@ -33,8 +44,25 @@ downloader = CAMSDownloader()
33
  plotter = IndiaMapPlotter()
34
  interactive_plotter = InteractiveIndiaMapPlotter()
35
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  # Ensure directories exist
37
- for directory in ['uploads', 'downloads', 'plots', 'templates', 'static']:
38
  Path(directory).mkdir(exist_ok=True)
39
 
40
 
@@ -77,7 +105,8 @@ def index():
77
  downloaded_files=downloaded_files,
78
  cds_ready=downloader.is_client_ready(),
79
  current_date=current_date,
80
- recent_files=recent_files
 
81
  )
82
 
83
 
@@ -239,11 +268,13 @@ def get_pressure_levels(filename, variable):
239
  print(f"File path: {file_path}")
240
 
241
  processor = NetCDFProcessor(str(file_path))
242
- processor.load_dataset()
243
- processor.detect_variables()
244
-
245
- pressure_levels = processor.get_available_pressure_levels(variable)
246
- processor.close()
 
 
247
 
248
  return jsonify({
249
  'success': True,
@@ -271,11 +302,13 @@ def get_available_times(filename, variable):
271
  file_path = Path(app.config['UPLOAD_FOLDER']) / filename
272
 
273
  processor = NetCDFProcessor(str(file_path))
274
- processor.load_dataset()
275
- processor.detect_variables()
276
-
277
- available_times = processor.get_available_times(variable)
278
- processor.close()
 
 
279
 
280
  # Format times for display
281
  formatted_times = []
@@ -324,34 +357,37 @@ def visualize():
324
 
325
  # Process the data
326
  processor = NetCDFProcessor(str(file_path))
327
- processor.load_dataset()
328
- processor.detect_variables()
329
-
330
- # Convert pressure level to float if provided
331
- pressure_level_val = None
332
- if pressure_level and pressure_level != 'None':
333
- try:
334
- pressure_level_val = float(pressure_level)
335
- except ValueError:
336
- pressure_level_val = None
337
-
338
- time_index_val = request.form.get('time_index')
339
- # Extract data
340
- data_values, metadata = processor.extract_data(
341
- variable,
342
- time_index = int(time_index_val) if time_index_val and time_index_val != 'None' else 0,
343
- pressure_level=pressure_level_val
344
- )
345
-
346
- # Generate plot
347
- plot_path = plotter.create_india_map(
348
- data_values,
349
- metadata,
350
- color_theme=color_theme,
351
- save_plot=True
352
- )
353
-
354
- processor.close()
 
 
 
355
 
356
  if plot_path:
357
  plot_filename = Path(plot_path).name
@@ -412,34 +448,37 @@ def visualize_interactive():
412
 
413
  # Process the data
414
  processor = NetCDFProcessor(str(file_path))
415
- processor.load_dataset()
416
- processor.detect_variables()
417
-
418
- # Convert pressure level to float if provided
419
- pressure_level_val = None
420
- if pressure_level and pressure_level != 'None':
421
- try:
422
- pressure_level_val = float(pressure_level)
423
- except ValueError:
424
- pressure_level_val = None
425
-
426
- time_index_val = request.form.get('time_index')
427
- # Extract data
428
- data_values, metadata = processor.extract_data(
429
- variable,
430
- time_index = int(time_index_val) if time_index_val and time_index_val != 'None' else 0,
431
- pressure_level=pressure_level_val
432
- )
433
-
434
- # Generate interactive plot
435
- result = interactive_plotter.create_india_map(
436
- data_values,
437
- metadata,
438
- color_theme=color_theme,
439
- save_plot=True
440
- )
 
441
 
442
- processor.close()
 
 
443
 
444
  if result and result.get('html_content'):
445
  # Prepare metadata for display
@@ -765,10 +804,228 @@ def health_check():
765
  return jsonify({
766
  'status': 'healthy',
767
  'timestamp': datetime.now().isoformat(),
768
- 'cds_ready': downloader.is_client_ready()
 
769
  })
770
 
771
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
772
  @app.errorhandler(413)
773
  def too_large(e):
774
  """Handle file too large error"""
@@ -802,4 +1059,4 @@ if __name__ == '__main__':
802
  print("๐Ÿ”ง CDS API Ready:", downloader.is_client_ready())
803
 
804
  # Run the Flask app
805
- app.run(debug=debug_mode, host='0.0.0.0', port=port)
 
4
  import json
5
  import traceback
6
  from pathlib import Path
7
+ import xarray as xr
8
+ import numpy as np
9
 
10
  from datetime import datetime, timedelta
11
  from werkzeug.utils import secure_filename
 
18
  from cams_downloader import CAMSDownloader
19
  from constants import ALLOWED_EXTENSIONS, MAX_FILE_SIZE, COLOR_THEMES
20
 
21
+ # Aurora pipeline imports - with error handling for optional dependency
22
+ try:
23
+ from aurora import Batch, Metadata, AuroraAirPollution, rollout
24
+ from aurora_pipeline import AuroraPipeline
25
+ AURORA_AVAILABLE = True
26
+ except ImportError as e:
27
+ print(f"โš ๏ธ Aurora model not available: {e}")
28
+ AURORA_AVAILABLE = False
29
+
30
  app = Flask(__name__)
31
  app.secret_key = 'your-secret-key-change-this-in-production' # Change this!
32
  app.config['DEBUG'] = False # Explicitly disable debug mode
 
44
  plotter = IndiaMapPlotter()
45
  interactive_plotter = InteractiveIndiaMapPlotter()
46
 
47
+ # Initialize Aurora pipeline if available
48
+ if AURORA_AVAILABLE:
49
+ # Check if we're in development/local mode
50
+ import socket
51
+ hostname = socket.gethostname()
52
+ is_local = any(local_indicator in hostname.lower()
53
+ for local_indicator in ['local', 'macbook', 'laptop', 'desktop', 'dev'])
54
+
55
+ # Force CPU mode for local development to avoid GPU requirements
56
+ cpu_only = is_local or os.getenv('AURORA_CPU_ONLY', 'false').lower() == 'true'
57
+
58
+ aurora_pipeline = AuroraPipeline(cpu_only=cpu_only)
59
+ print(f"๐Ÿ”ฎ Aurora pipeline initialized ({'CPU-only' if cpu_only else 'GPU-enabled'} mode)")
60
+ else:
61
+ aurora_pipeline = None
62
+ print("โš ๏ธ Aurora pipeline not available - missing dependencies")
63
+
64
  # Ensure directories exist
65
+ for directory in ['uploads', 'downloads', 'plots', 'templates', 'static', 'predictions']:
66
  Path(directory).mkdir(exist_ok=True)
67
 
68
 
 
105
  downloaded_files=downloaded_files,
106
  cds_ready=downloader.is_client_ready(),
107
  current_date=current_date,
108
+ recent_files=recent_files,
109
+ aurora_available=AURORA_AVAILABLE
110
  )
111
 
112
 
 
268
  print(f"File path: {file_path}")
269
 
270
  processor = NetCDFProcessor(str(file_path))
271
+ try:
272
+ processor.load_dataset()
273
+ processor.detect_variables()
274
+
275
+ pressure_levels = processor.get_available_pressure_levels(variable)
276
+ finally:
277
+ processor.close()
278
 
279
  return jsonify({
280
  'success': True,
 
302
  file_path = Path(app.config['UPLOAD_FOLDER']) / filename
303
 
304
  processor = NetCDFProcessor(str(file_path))
305
+ try:
306
+ processor.load_dataset()
307
+ processor.detect_variables()
308
+
309
+ available_times = processor.get_available_times(variable)
310
+ finally:
311
+ processor.close()
312
 
313
  # Format times for display
314
  formatted_times = []
 
357
 
358
  # Process the data
359
  processor = NetCDFProcessor(str(file_path))
360
+ try:
361
+ processor.load_dataset()
362
+ processor.detect_variables()
363
+
364
+ # Convert pressure level to float if provided
365
+ pressure_level_val = None
366
+ if pressure_level and pressure_level != 'None':
367
+ try:
368
+ pressure_level_val = float(pressure_level)
369
+ except ValueError:
370
+ pressure_level_val = None
371
+
372
+ time_index_val = request.form.get('time_index')
373
+ # Extract data
374
+ data_values, metadata = processor.extract_data(
375
+ variable,
376
+ time_index = int(time_index_val) if time_index_val and time_index_val != 'None' else 0,
377
+ pressure_level=pressure_level_val
378
+ )
379
+
380
+ # Generate plot
381
+ plot_path = plotter.create_india_map(
382
+ data_values,
383
+ metadata,
384
+ color_theme=color_theme,
385
+ save_plot=True
386
+ )
387
+
388
+ finally:
389
+ # Always close the processor
390
+ processor.close()
391
 
392
  if plot_path:
393
  plot_filename = Path(plot_path).name
 
448
 
449
  # Process the data
450
  processor = NetCDFProcessor(str(file_path))
451
+ try:
452
+ processor.load_dataset()
453
+ processor.detect_variables()
454
+
455
+ # Convert pressure level to float if provided
456
+ pressure_level_val = None
457
+ if pressure_level and pressure_level != 'None':
458
+ try:
459
+ pressure_level_val = float(pressure_level)
460
+ except ValueError:
461
+ pressure_level_val = None
462
+
463
+ time_index_val = request.form.get('time_index')
464
+ # Extract data
465
+ data_values, metadata = processor.extract_data(
466
+ variable,
467
+ time_index = int(time_index_val) if time_index_val and time_index_val != 'None' else 0,
468
+ pressure_level=pressure_level_val
469
+ )
470
+
471
+ # Generate interactive plot
472
+ result = interactive_plotter.create_india_map(
473
+ data_values,
474
+ metadata,
475
+ color_theme=color_theme,
476
+ save_plot=True
477
+ )
478
 
479
+ finally:
480
+ # Always close the processor
481
+ processor.close()
482
 
483
  if result and result.get('html_content'):
484
  # Prepare metadata for display
 
804
  return jsonify({
805
  'status': 'healthy',
806
  'timestamp': datetime.now().isoformat(),
807
+ 'cds_ready': downloader.is_client_ready(),
808
+ 'aurora_available': AURORA_AVAILABLE
809
  })
810
 
811
 
812
+ @app.route('/api/aurora_status')
813
+ def aurora_status():
814
+ """API endpoint to check Aurora readiness and get system info"""
815
+ status = {
816
+ 'available': AURORA_AVAILABLE,
817
+ 'cpu_only': False,
818
+ 'estimated_time': {
819
+ 'cpu': {'1_step': 5, '2_steps': 10},
820
+ 'gpu': {'4_steps': 3, '6_steps': 4, '10_steps': 6}
821
+ }
822
+ }
823
+
824
+ if AURORA_AVAILABLE and aurora_pipeline:
825
+ status['cpu_only'] = getattr(aurora_pipeline, 'cpu_only', False)
826
+
827
+ return jsonify(status)
828
+
829
+
830
+ # Aurora ML Prediction Routes
831
+ @app.route('/aurora_predict', methods=['GET', 'POST'])
832
+ def aurora_predict():
833
+ """Aurora prediction form and handler"""
834
+ if not AURORA_AVAILABLE:
835
+ flash('Aurora model is not available. Please install required dependencies.', 'error')
836
+ return redirect(url_for('index'))
837
+
838
+ if request.method == 'GET':
839
+ current_date = datetime.now().strftime('%Y-%m-%d')
840
+ return render_template('aurora_predict.html', current_date=current_date)
841
+
842
+ # POST: Run the pipeline
843
+ date_str = request.form.get('date')
844
+ steps = int(request.form.get('steps', 2)) # Default to 2 steps for CPU-friendly execution
845
+
846
+ # Limit steps for local/CPU execution
847
+ if hasattr(aurora_pipeline, 'cpu_only') and aurora_pipeline.cpu_only:
848
+ steps = min(steps, 2) # Max 2 steps for CPU
849
+ if steps != int(request.form.get('steps', 2)):
850
+ flash(f'Steps reduced to {steps} for CPU mode optimization', 'info')
851
+
852
+ if not date_str:
853
+ flash('Please select a valid date.', 'error')
854
+ return redirect(url_for('aurora_predict'))
855
+
856
+ cams_date = date_str
857
+ cams_time = "12:00" # Always use 12:00 UTC for Aurora
858
+
859
+ try:
860
+ print(f"๐Ÿš€ Starting Aurora prediction pipeline for {cams_date}")
861
+
862
+ # 1. Download CAMS data for the selected date
863
+ print("๐Ÿ“ฅ Step 1/5: Downloading CAMS atmospheric data...")
864
+ try:
865
+ zip_path = downloader.download_cams_data(cams_date)
866
+ except Exception as e:
867
+ error_msg = f"Failed to download CAMS data: {str(e)}"
868
+ if "error response" in str(e).lower():
869
+ error_msg += " The CAMS API may have returned an error. Please try a different date or check your CDS API credentials."
870
+ elif "zip" in str(e).lower():
871
+ error_msg += " The downloaded file is corrupted. Please try again."
872
+ flash(error_msg, 'error')
873
+ print(f"โŒ Download error: {traceback.format_exc()}")
874
+ return redirect(url_for('aurora_predict'))
875
+
876
+ try:
877
+ extracted_files = downloader.extract_cams_files(zip_path)
878
+ print("โœ… CAMS data downloaded and extracted")
879
+ except Exception as e:
880
+ error_msg = f"Failed to extract CAMS data: {str(e)}"
881
+ if "not a zip file" in str(e).lower():
882
+ error_msg += " The downloaded file appears to be corrupted or is an error response from the CAMS API."
883
+ elif "html" in str(e).lower() or "error" in str(e).lower():
884
+ error_msg += " The CAMS API returned an error page instead of data."
885
+ flash(error_msg, 'error')
886
+ print(f"โŒ Extraction error: {traceback.format_exc()}")
887
+ return redirect(url_for('aurora_predict'))
888
+
889
+ # 2. Run Aurora pipeline (batch creation, model, prediction, save NetCDF)
890
+ print("๐Ÿ”ฎ Step 2/5: Initializing Aurora ML pipeline...")
891
+ output_nc = f"predictions_{cams_date}_{cams_time.replace(':','')}.nc"
892
+
893
+ print(f"๐Ÿง  Step 3/5: Loading Aurora model (this may take a few minutes)...")
894
+ print(f"โšก Step 4/5: Running {steps} prediction steps...")
895
+
896
+ try:
897
+ predictions = aurora_pipeline.run_pipeline(
898
+ date_str=cams_date,
899
+ Batch=Batch,
900
+ Metadata=Metadata,
901
+ AuroraAirPollution=AuroraAirPollution,
902
+ rollout=rollout,
903
+ steps=steps,
904
+ output_path=Path('predictions') / output_nc
905
+ )
906
+ except Exception as e:
907
+ error_msg = f"Aurora model execution failed: {str(e)}"
908
+ if "map_location" in str(e):
909
+ error_msg += " This appears to be a compatibility issue with the Aurora model version."
910
+ elif "checkpoint" in str(e).lower():
911
+ error_msg += " Failed to load the Aurora model. Please check if the model files are properly installed."
912
+ elif "memory" in str(e).lower() or "cuda" in str(e).lower():
913
+ error_msg += " Insufficient memory or GPU issues. Try reducing the number of prediction steps."
914
+ flash(error_msg, 'error')
915
+ print(f"โŒ Aurora model error: {traceback.format_exc()}")
916
+ return redirect(url_for('aurora_predict'))
917
+
918
+ print("๐Ÿ’พ Step 5/5: Saving results and preparing visualization...")
919
+ print(f"โœ… Aurora predictions completed for {cams_date} 12:00 UTC")
920
+
921
+ flash(f'๐Ÿ”ฎ Aurora predictions generated successfully for {cams_date} ({steps} steps)', 'success')
922
+ return redirect(url_for('visualize_prediction', filename=output_nc))
923
+
924
+ except Exception as e:
925
+ # Catch-all for any other unexpected errors
926
+ error_msg = f'Unexpected error in Aurora pipeline: {str(e)}'
927
+ flash(error_msg, 'error')
928
+ print(f"โŒ Unexpected Aurora pipeline error: {traceback.format_exc()}")
929
+ return redirect(url_for('aurora_predict'))
930
+
931
+
932
+ @app.route('/visualize_prediction/<filename>', methods=['GET', 'POST'])
933
+ def visualize_prediction(filename):
934
+ """Visualize or download Aurora prediction output with variable and step selection"""
935
+ file_path = Path('predictions') / filename
936
+ if not file_path.exists():
937
+ flash('Prediction file not found', 'error')
938
+ return redirect(url_for('index'))
939
+
940
+ ds = xr.open_dataset(file_path)
941
+ variables = list(ds.data_vars.keys())
942
+ steps = ds['step'].values if 'step' in ds else np.arange(ds[variables[0]].shape[0])
943
+
944
+ # Handle form submission
945
+ if request.method == 'POST':
946
+ var_name = request.form.get('variable')
947
+ step = int(request.form.get('step', 0))
948
+ color_theme = request.form.get('color_theme', 'viridis')
949
+ else:
950
+ var_name = variables[0]
951
+ step = 0
952
+ color_theme = 'viridis'
953
+
954
+ # Prepare data for plotting
955
+ data = ds[var_name].values
956
+ if data.ndim == 3:
957
+ data_to_plot = data[step]
958
+ elif data.ndim == 2:
959
+ data_to_plot = data
960
+ else:
961
+ flash('Prediction data shape not supported for plotting', 'error')
962
+ return redirect(url_for('index'))
963
+
964
+ # Extract lat/lon
965
+ lats = ds['lat'].values if 'lat' in ds else ds['latitude'].values
966
+ lons = ds['lon'].values if 'lon' in ds else ds['longitude'].values
967
+
968
+ # Prepare metadata for plot
969
+ from constants import NETCDF_VARIABLES
970
+ var_info = NETCDF_VARIABLES.get(var_name, {})
971
+ display_name = var_info.get('name', var_name)
972
+ units = ds[var_name].attrs.get('units', var_info.get('units', ''))
973
+ # Use user-selected color theme, fallback to variable default, then viridis
974
+ if 'color_theme' not in locals():
975
+ color_theme = var_info.get('cmap', 'viridis')
976
+
977
+ metadata = {
978
+ 'variable_name': var_name,
979
+ 'display_name': display_name,
980
+ 'units': units,
981
+ 'lats': lats,
982
+ 'lons': lons,
983
+ 'pressure_level': None,
984
+ 'timestamp_str': str(steps[step]) if len(steps) > step else '',
985
+ }
986
+
987
+ # Generate plot
988
+ plot_path = plotter.create_india_map(
989
+ data_to_plot,
990
+ metadata,
991
+ color_theme=color_theme,
992
+ save_plot=True,
993
+ custom_title=f"Aurora Prediction: {display_name} (step {step})"
994
+ )
995
+
996
+ if plot_path:
997
+ plot_filename = Path(plot_path).name
998
+
999
+ # Provide download link for NetCDF
1000
+ download_url = url_for('download_prediction_netcdf', filename=filename)
1001
+
1002
+ return render_template(
1003
+ 'aurora_prediction_plot.html',
1004
+ plot_filename=plot_filename,
1005
+ var_name=var_name,
1006
+ step=step,
1007
+ variables=variables,
1008
+ steps=range(len(steps)),
1009
+ filename=filename,
1010
+ download_url=download_url,
1011
+ color_themes=COLOR_THEMES,
1012
+ current_color_theme=color_theme
1013
+ )
1014
+ else:
1015
+ flash('Error generating prediction plot', 'error')
1016
+ return redirect(url_for('index'))
1017
+
1018
+
1019
+ @app.route('/download_prediction_netcdf/<filename>')
1020
+ def download_prediction_netcdf(filename):
1021
+ """Download the Aurora prediction NetCDF file"""
1022
+ file_path = Path('predictions') / filename
1023
+ if not file_path.exists():
1024
+ flash('Prediction file not found', 'error')
1025
+ return redirect(url_for('index'))
1026
+ return send_file(str(file_path), as_attachment=True, download_name=filename)
1027
+
1028
+
1029
  @app.errorhandler(413)
1030
  def too_large(e):
1031
  """Handle file too large error"""
 
1059
  print("๐Ÿ”ง CDS API Ready:", downloader.is_client_ready())
1060
 
1061
  # Run the Flask app
1062
+ app.run(debug=True, host='0.0.0.0', port=port)
aurora_pipeline.py ADDED
@@ -0,0 +1,471 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # aurora_pipeline.py
2
+ # End-to-end pipeline for CAMS data โ†’ Aurora model โ†’ predictions โ†’ NetCDF
3
+ import subprocess
4
+ import os
5
+
6
+ def get_freest_cuda_device_id():
7
+ """Get the freest CUDA device ID if available, otherwise return None for CPU mode"""
8
+ try:
9
+ # Check if nvidia-smi exists first
10
+ result = subprocess.run(
11
+ ['nvidia-smi', '--query-gpu=memory.free', '--format=csv,nounits,noheader'],
12
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding='utf-8'
13
+ )
14
+ if result.returncode == 0:
15
+ memory_free = [int(x) for x in result.stdout.strip().split('\n')]
16
+ device_id = memory_free.index(max(memory_free))
17
+ print(f"๐ŸŽฎ GPU available - using device {device_id}")
18
+ return str(device_id)
19
+ else:
20
+ print("โš ๏ธ nvidia-smi returned error, using CPU mode")
21
+ return None
22
+ except (FileNotFoundError, subprocess.SubprocessError, Exception) as e:
23
+ print(f"๐Ÿ’ป No GPU detected, using CPU mode: {e}")
24
+ return None
25
+
26
+ # Set CUDA_VISIBLE_DEVICES only if GPU is available
27
+ gpu_device = get_freest_cuda_device_id()
28
+ if gpu_device is not None:
29
+ os.environ["CUDA_VISIBLE_DEVICES"] = gpu_device
30
+ else:
31
+ # Force CPU mode
32
+ os.environ["CUDA_VISIBLE_DEVICES"] = ""
33
+ print("๐Ÿ”ง Configured for CPU-only execution")
34
+
35
+
36
+ import torch
37
+ import xarray as xr
38
+ import pickle
39
+ from pathlib import Path
40
+ import numpy as np
41
+ import zipfile
42
+ import cdsapi
43
+ from huggingface_hub import hf_hub_download
44
+ import matplotlib.pyplot as plt
45
+ import cartopy.crs as ccrs
46
+ import cartopy.feature as cfeature
47
+ from datetime import datetime, timedelta
48
+ from aurora import Batch, Metadata, AuroraAirPollution, rollout
49
+
50
+
51
+ class AuroraPipeline:
52
+ def __init__(self,
53
+ extracted_dir="downloads/extracted",
54
+ static_path="static_vars.pkl",
55
+ model_ckpt="aurora-0.4-air-pollution.ckpt",
56
+ model_repo="microsoft/aurora",
57
+ device=None,
58
+ cpu_only=False):
59
+
60
+ # Device selection with CPU fallback
61
+ if cpu_only or device == "cpu":
62
+ self.device = "cpu"
63
+ print("๐Ÿ’ป Aurora configured for CPU-only execution")
64
+ elif device is None:
65
+ # Auto-detect: prefer CPU for local testing, GPU for production
66
+ if torch.cuda.is_available() and not cpu_only:
67
+ self.device = "cuda:0"
68
+ print(f"๐ŸŽฎ Aurora using GPU: {self.device}")
69
+ else:
70
+ self.device = "cpu"
71
+ print("๐Ÿ’ป Aurora using CPU (GPU not available or CPU forced)")
72
+ else:
73
+ self.device = device
74
+ print(f"๐Ÿ”ง Aurora using specified device: {self.device}")
75
+
76
+ self.extracted_dir = Path(extracted_dir)
77
+ self.static_path = Path(static_path)
78
+ self.model_ckpt = model_ckpt
79
+ self.model_repo = model_repo
80
+ self.cpu_only = cpu_only or (self.device == "cpu")
81
+
82
+ # Load static variables with error handling
83
+ try:
84
+ self.static_vars = self._load_static_vars()
85
+ except Exception as e:
86
+ print(f"โš ๏ธ Warning: Could not load static variables: {e}")
87
+ self.static_vars = {}
88
+
89
+ self.model = None
90
+
91
+ def _load_static_vars(self):
92
+ """Load static variables from Hugging Face Hub"""
93
+ static_path = hf_hub_download(
94
+ repo_id="microsoft/aurora",
95
+ filename="aurora-0.4-air-pollution-static.pickle",
96
+ )
97
+ if not Path(static_path).exists():
98
+ raise FileNotFoundError(f"Static variables file not found: {static_path}")
99
+ with open(static_path, "rb") as f:
100
+ static_vars = pickle.load(f)
101
+ return static_vars
102
+
103
+ def create_batch(self, date_str, Batch, Metadata):
104
+ """Create a batch for Aurora model from CAMS data"""
105
+ surface_path = self.extracted_dir / f"{date_str}-cams-surface.nc"
106
+ atmos_path = self.extracted_dir / f"{date_str}-cams-atmospheric.nc"
107
+ if not surface_path.exists() or not atmos_path.exists():
108
+ raise FileNotFoundError(f"Missing CAMS files for {date_str} in {self.extracted_dir}")
109
+
110
+ surf_vars_ds = xr.open_dataset(surface_path, engine="netcdf4", decode_timedelta=True)
111
+ atmos_vars_ds = xr.open_dataset(atmos_path, engine="netcdf4", decode_timedelta=True)
112
+
113
+ # Select zero-hour forecast
114
+ surf_vars_ds = surf_vars_ds.isel(forecast_period=0)
115
+ atmos_vars_ds = atmos_vars_ds.isel(forecast_period=0)
116
+
117
+ batch = Batch(
118
+ surf_vars={
119
+ "2t": torch.from_numpy(surf_vars_ds["t2m"].values[None]),
120
+ "10u": torch.from_numpy(surf_vars_ds["u10"].values[None]),
121
+ "10v": torch.from_numpy(surf_vars_ds["v10"].values[None]),
122
+ "msl": torch.from_numpy(surf_vars_ds["msl"].values[None]),
123
+ "pm1": torch.from_numpy(surf_vars_ds["pm1"].values[None]),
124
+ "pm2p5": torch.from_numpy(surf_vars_ds["pm2p5"].values[None]),
125
+ "pm10": torch.from_numpy(surf_vars_ds["pm10"].values[None]),
126
+ "tcco": torch.from_numpy(surf_vars_ds["tcco"].values[None]),
127
+ "tc_no": torch.from_numpy(surf_vars_ds["tc_no"].values[None]),
128
+ "tcno2": torch.from_numpy(surf_vars_ds["tcno2"].values[None]),
129
+ "gtco3": torch.from_numpy(surf_vars_ds["gtco3"].values[None]),
130
+ "tcso2": torch.from_numpy(surf_vars_ds["tcso2"].values[None]),
131
+ },
132
+ static_vars={k: torch.from_numpy(v) for k, v in self.static_vars.items()},
133
+ atmos_vars={
134
+ "t": torch.from_numpy(atmos_vars_ds["t"].values[None]),
135
+ "u": torch.from_numpy(atmos_vars_ds["u"].values[None]),
136
+ "v": torch.from_numpy(atmos_vars_ds["v"].values[None]),
137
+ "q": torch.from_numpy(atmos_vars_ds["q"].values[None]),
138
+ "z": torch.from_numpy(atmos_vars_ds["z"].values[None]),
139
+ "co": torch.from_numpy(atmos_vars_ds["co"].values[None]),
140
+ "no": torch.from_numpy(atmos_vars_ds["no"].values[None]),
141
+ "no2": torch.from_numpy(atmos_vars_ds["no2"].values[None]),
142
+ "go3": torch.from_numpy(atmos_vars_ds["go3"].values[None]),
143
+ "so2": torch.from_numpy(atmos_vars_ds["so2"].values[None]),
144
+ },
145
+ metadata=Metadata(
146
+ lat=torch.from_numpy(atmos_vars_ds.latitude.values),
147
+ lon=torch.from_numpy(atmos_vars_ds.longitude.values),
148
+ time=(atmos_vars_ds.valid_time.values.astype("datetime64[s]").tolist()[-1],),
149
+ atmos_levels=tuple(int(level) for level in atmos_vars_ds.pressure_level.values),
150
+ ),
151
+ )
152
+ return batch
153
+ def load_model(self, AuroraAirPollution):
154
+ """Load Aurora model with CPU/GPU optimization"""
155
+ import gc
156
+
157
+ print(f"๐Ÿ”„ Loading Aurora model on {self.device}")
158
+
159
+ # Memory check for GPU
160
+ if self.device != "cpu" and torch.cuda.is_available():
161
+ print(f"๐Ÿ“Š GPU Memory BEFORE loading model:")
162
+ print(f" Allocated: {torch.cuda.memory_allocated(0) / 1024**3:.2f} GB")
163
+ print(f" Reserved: {torch.cuda.memory_reserved(0) / 1024**3:.2f} GB")
164
+ print(f" Free: {(torch.cuda.get_device_properties(0).total_memory - torch.cuda.memory_reserved(0)) / 1024**3:.2f} GB")
165
+
166
+ # Clear cache
167
+ if torch.cuda.is_available():
168
+ torch.cuda.empty_cache()
169
+ gc.collect()
170
+
171
+ # Initialize model with CPU-friendly settings
172
+ if self.cpu_only:
173
+ print("๐Ÿ’ป Initializing model for CPU execution...")
174
+ # Set CPU-friendly torch settings
175
+ torch.set_num_threads(2) # Limit CPU threads for local testing
176
+ model = AuroraAirPollution()
177
+ else:
178
+ model = AuroraAirPollution()
179
+
180
+ # Load checkpoint with device mapping
181
+ try:
182
+ if self.cpu_only:
183
+ print("๐Ÿ“ Loading checkpoint for CPU execution...")
184
+ # For CPU mode, we may need to handle device mapping differently
185
+ model.load_checkpoint(self.model_repo, self.model_ckpt)
186
+ else:
187
+ print("๐Ÿ“ Loading checkpoint for GPU execution...")
188
+ model.load_checkpoint(self.model_repo, self.model_ckpt)
189
+ except Exception as e:
190
+ print(f"โš ๏ธ Checkpoint loading failed: {e}")
191
+ print("๐Ÿ”„ Trying alternative loading method...")
192
+ try:
193
+ # Alternative: try loading without any special parameters
194
+ model.load_checkpoint(self.model_repo, self.model_ckpt)
195
+ print("โœ… Checkpoint loaded successfully with fallback method")
196
+ except Exception as e2:
197
+ print(f"โŒ All loading methods failed: {e2}")
198
+ # Set device to CPU as last resort
199
+ self.device = "cpu"
200
+ self.cpu_only = True
201
+ raise RuntimeError(f"Failed to load Aurora model: {e2}")
202
+
203
+ model.eval()
204
+
205
+ # Move to device
206
+ model = model.to(self.device)
207
+
208
+ # Memory check after loading
209
+ if self.device != "cpu" and torch.cuda.is_available():
210
+ print(f"๐Ÿ“Š GPU Memory AFTER model load:")
211
+ print(f" Allocated: {torch.cuda.memory_allocated(0) / 1024**3:.2f} GB")
212
+ print(f" Reserved: {torch.cuda.memory_reserved(0) / 1024**3:.2f} GB")
213
+
214
+ self.model = model
215
+ print(f"โœ… Aurora model loaded on {self.device}")
216
+
217
+ if self.cpu_only:
218
+ print("โš ๏ธ WARNING: CPU mode will be slower than GPU. Consider using fewer steps for faster inference.")
219
+
220
+ return model
221
+
222
+ def predict(self, batch, rollout, steps=4):
223
+ """Run model prediction with CPU/GPU optimization"""
224
+ if self.model is None:
225
+ raise RuntimeError("Model not loaded. Call load_model() first.")
226
+
227
+ # Limit steps for CPU to avoid memory issues
228
+ if self.cpu_only and steps > 2:
229
+ print(f"โš ๏ธ CPU mode: reducing steps from {steps} to 2 for memory efficiency")
230
+ steps = 2
231
+
232
+ print(f"๐Ÿ”„ Running {steps} prediction steps on {self.device}...")
233
+
234
+ # Move batch to device
235
+ batch = batch.to(self.device)
236
+
237
+ # CPU-friendly inference settings
238
+ if self.cpu_only:
239
+ torch.set_grad_enabled(False) # Disable gradients for inference
240
+
241
+ with torch.inference_mode():
242
+ predictions = []
243
+ for step in range(steps):
244
+ print(f" Step {step + 1}/{steps}...")
245
+ if step == 0:
246
+ # First prediction from initial batch
247
+ pred_generator = rollout(self.model, batch, steps=1)
248
+ pred = next(pred_generator)
249
+ else:
250
+ # Subsequent predictions from previous output
251
+ pred_generator = rollout(self.model, pred, steps=1)
252
+ pred = next(pred_generator)
253
+
254
+ # Move to CPU immediately to save memory
255
+ predictions.append(pred.to("cpu"))
256
+
257
+ # Clear GPU cache after each step if using GPU
258
+ if not self.cpu_only and torch.cuda.is_available():
259
+ torch.cuda.empty_cache()
260
+
261
+ print(f"โœ… Completed {len(predictions)} prediction steps")
262
+ return predictions
263
+
264
+ def save_predictions_to_netcdf(self, predictions, output_path):
265
+ """Save all prediction steps to a single NetCDF file compatible with visualization pipeline"""
266
+ output_path = Path(output_path)
267
+ output_path.parent.mkdir(parents=True, exist_ok=True)
268
+
269
+ print(f"๐Ÿ’พ Saving {len(predictions)} prediction steps to {output_path}")
270
+
271
+ try:
272
+ # Try the new single-file method
273
+ return self._save_predictions_single_file(predictions, output_path)
274
+ except Exception as e:
275
+ print(f"โš ๏ธ Single file method failed: {e}")
276
+ print(f"๐Ÿ”„ Falling back to original method...")
277
+ return self._save_predictions_original_method(predictions, output_path)
278
+
279
+ def _save_predictions_single_file(self, predictions, output_path):
280
+ """Save all prediction steps to a single NetCDF file (new method)"""
281
+ # Get metadata from first prediction
282
+ first_pred = predictions[0]
283
+ metadata = first_pred.metadata
284
+
285
+ # Extract coordinates
286
+ lats = metadata.lat.cpu().numpy() if hasattr(metadata.lat, 'cpu') else metadata.lat.numpy()
287
+ lons = metadata.lon.cpu().numpy() if hasattr(metadata.lon, 'cpu') else metadata.lon.numpy()
288
+
289
+ # Create step coordinate
290
+ steps = np.arange(len(predictions))
291
+
292
+ # Prepare data variables
293
+ data_vars = {}
294
+ coords = {
295
+ 'step': ('step', steps),
296
+ 'lat': ('lat', lats),
297
+ 'lon': ('lon', lons)
298
+ }
299
+
300
+ # Add surface variables
301
+ surf_var_names = list(first_pred.surf_vars.keys())
302
+ for var in surf_var_names:
303
+ # Stack predictions along step dimension
304
+ var_data_list = []
305
+ for pred in predictions:
306
+ var_tensor = pred.surf_vars[var]
307
+ # Move to CPU and convert to numpy
308
+ var_data = var_tensor.cpu().numpy() if hasattr(var_tensor, 'cpu') else var_tensor.numpy()
309
+
310
+ # Robust dimension handling: squeeze all singleton dimensions and keep only last 2 (lat, lon)
311
+ var_data = np.squeeze(var_data) # Remove all singleton dimensions
312
+
313
+ # Ensure we have exactly 2 dimensions (lat, lon) for surface variables
314
+ if var_data.ndim > 2:
315
+ # Take the last 2 dimensions as lat, lon
316
+ var_data = var_data[..., :, :]
317
+ # If still more than 2D, take the first slice of extra dimensions
318
+ while var_data.ndim > 2:
319
+ var_data = var_data[0]
320
+ elif var_data.ndim < 2:
321
+ raise ValueError(f"Surface variable {var} has insufficient dimensions: {var_data.shape}")
322
+
323
+ var_data_list.append(var_data)
324
+
325
+ # Stack along step dimension: (steps, lat, lon)
326
+ arr = np.stack(var_data_list, axis=0)
327
+ data_vars[var] = (['step', 'lat', 'lon'], arr)
328
+
329
+ # Add atmospheric variables if present
330
+ if hasattr(first_pred, 'atmos_vars') and first_pred.atmos_vars:
331
+ atmos_levels = list(metadata.atmos_levels) if hasattr(metadata, 'atmos_levels') else None
332
+ if atmos_levels:
333
+ coords['pressure_level'] = ('pressure_level', atmos_levels)
334
+
335
+ atmos_var_names = list(first_pred.atmos_vars.keys())
336
+ for var in atmos_var_names:
337
+ var_data_list = []
338
+ for pred in predictions:
339
+ var_tensor = pred.atmos_vars[var]
340
+ # Move to CPU and convert to numpy
341
+ var_data = var_tensor.cpu().numpy() if hasattr(var_tensor, 'cpu') else var_tensor.numpy()
342
+
343
+ # Robust dimension handling: squeeze singleton dimensions but keep 3D structure
344
+ var_data = np.squeeze(var_data) # Remove singleton dimensions
345
+
346
+ # Ensure we have exactly 3 dimensions (levels, lat, lon) for atmospheric variables
347
+ if var_data.ndim > 3:
348
+ # Take the last 3 dimensions as levels, lat, lon
349
+ var_data = var_data[..., :, :, :]
350
+ # If still more than 3D, take the first slice of extra dimensions
351
+ while var_data.ndim > 3:
352
+ var_data = var_data[0]
353
+ elif var_data.ndim < 3:
354
+ raise ValueError(f"Atmospheric variable {var} has insufficient dimensions: {var_data.shape}")
355
+
356
+ var_data_list.append(var_data)
357
+
358
+ # Stack along step dimension: (steps, levels, lat, lon)
359
+ arr = np.stack(var_data_list, axis=0)
360
+ data_vars[f"{var}_atmos"] = (['step', 'pressure_level', 'lat', 'lon'], arr)
361
+
362
+ # Create dataset
363
+ ds = xr.Dataset(data_vars, coords=coords)
364
+
365
+ # Add global attributes
366
+ ds.attrs.update({
367
+ 'title': 'Aurora Air Pollution Model Predictions',
368
+ 'source': 'Aurora model by Microsoft Research',
369
+ 'creation_date': datetime.now().isoformat(),
370
+ 'forecast_steps': len(predictions),
371
+ 'spatial_resolution': f"{abs(lons[1] - lons[0]):.3f} degrees",
372
+ 'conventions': 'CF-1.8'
373
+ })
374
+
375
+ # Add variable attributes for better visualization
376
+ var_attrs = {
377
+ '2t': {'long_name': '2 metre temperature', 'units': 'K'},
378
+ '10u': {'long_name': '10 metre U wind component', 'units': 'm s-1'},
379
+ '10v': {'long_name': '10 metre V wind component', 'units': 'm s-1'},
380
+ 'msl': {'long_name': 'Mean sea level pressure', 'units': 'Pa'},
381
+ 'pm1': {'long_name': 'Particulate matter d < 1 um', 'units': 'kg m-3'},
382
+ 'pm2p5': {'long_name': 'Particulate matter d < 2.5 um', 'units': 'kg m-3'},
383
+ 'pm10': {'long_name': 'Particulate matter d < 10 um', 'units': 'kg m-3'},
384
+ 'tcco': {'long_name': 'Total column carbon monoxide', 'units': 'kg m-2'},
385
+ 'tc_no': {'long_name': 'Total column nitrogen monoxide', 'units': 'kg m-2'},
386
+ 'tcno2': {'long_name': 'Total column nitrogen dioxide', 'units': 'kg m-2'},
387
+ 'gtco3': {'long_name': 'Total column ozone', 'units': 'kg m-2'},
388
+ 'tcso2': {'long_name': 'Total column sulphur dioxide', 'units': 'kg m-2'}
389
+ }
390
+
391
+ for var_name, attrs in var_attrs.items():
392
+ if var_name in ds.data_vars:
393
+ ds[var_name].attrs.update(attrs)
394
+
395
+ # Save to NetCDF
396
+ ds.to_netcdf(output_path, format='NETCDF4')
397
+ print(f"โœ… Predictions saved to {output_path}")
398
+ print(f" Variables: {list(ds.data_vars.keys())}")
399
+ print(f" Steps: {len(steps)}")
400
+ print(f" Spatial grid: {len(lats)}x{len(lons)}")
401
+
402
+ return output_path
403
+
404
+ def _save_predictions_original_method(self, predictions, output_path):
405
+ """Fallback: Save predictions using the original method (separate files per step)"""
406
+ output_dir = Path(output_path)
407
+ output_dir.mkdir(exist_ok=True)
408
+
409
+ for step, pred in enumerate(predictions):
410
+ # Create xarray dataset for surface variables
411
+ surf_data = {}
412
+ for var_name, var_data in pred.surf_vars.items():
413
+ surf_data[var_name] = (
414
+ ["time", "batch", "lat", "lon"],
415
+ var_data.cpu().numpy() if hasattr(var_data, 'cpu') else var_data.numpy()
416
+ )
417
+
418
+ # Create xarray dataset for atmospheric variables
419
+ atmos_data = {}
420
+ for var_name, var_data in pred.atmos_vars.items():
421
+ atmos_data[var_name] = (
422
+ ["time", "batch", "level", "lat", "lon"],
423
+ var_data.cpu().numpy() if hasattr(var_data, 'cpu') else var_data.numpy()
424
+ )
425
+
426
+ # Create surface dataset
427
+ surf_ds = xr.Dataset(
428
+ surf_data,
429
+ coords={
430
+ "time": [pred.metadata.time[0]],
431
+ "batch": [0],
432
+ "lat": pred.metadata.lat.cpu().numpy() if hasattr(pred.metadata.lat, 'cpu') else pred.metadata.lat.numpy(),
433
+ "lon": pred.metadata.lon.cpu().numpy() if hasattr(pred.metadata.lon, 'cpu') else pred.metadata.lon.numpy(),
434
+ }
435
+ )
436
+
437
+ # Create atmospheric dataset
438
+ atmos_ds = xr.Dataset(
439
+ atmos_data,
440
+ coords={
441
+ "time": [pred.metadata.time[0]],
442
+ "batch": [0],
443
+ "level": list(pred.metadata.atmos_levels),
444
+ "lat": pred.metadata.lat.cpu().numpy() if hasattr(pred.metadata.lat, 'cpu') else pred.metadata.lat.numpy(),
445
+ "lon": pred.metadata.lon.cpu().numpy() if hasattr(pred.metadata.lon, 'cpu') else pred.metadata.lon.numpy(),
446
+ }
447
+ )
448
+
449
+ # Save to NetCDF
450
+ surf_filename = f"step_{step:02d}_surface.nc"
451
+ atmos_filename = f"step_{step:02d}_atmospheric.nc"
452
+
453
+ surf_ds.to_netcdf(output_dir / surf_filename)
454
+ atmos_ds.to_netcdf(output_dir / atmos_filename)
455
+
456
+ print(f"Saved step {step} predictions (fallback method)")
457
+
458
+ return output_dir
459
+
460
+ def run_pipeline(self, date_str, Batch, Metadata, AuroraAirPollution, rollout, steps=4, output_path=None):
461
+ """Full pipeline: batch creation, model loading, prediction, save output"""
462
+ batch = self.create_batch(date_str, Batch, Metadata)
463
+ self.load_model(AuroraAirPollution)
464
+ predictions = self.predict(batch, rollout, steps=steps)
465
+ if output_path:
466
+ self.save_predictions_to_netcdf(predictions, output_path)
467
+ return predictions
468
+
469
+ # Example usage (not run on import)
470
+ if __name__ == "__main__":
471
+ pass
cams_downloader.py CHANGED
@@ -157,6 +157,7 @@ class CAMSDownloader:
157
 
158
  try:
159
  # Make the API request
 
160
  self.client.retrieve(
161
  "cams-global-atmospheric-composition-forecasts",
162
  {
@@ -171,12 +172,30 @@ class CAMSDownloader:
171
  str(filepath),
172
  )
173
 
174
- print(f"โœ… Successfully downloaded: {filename}")
175
- return str(filepath)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
176
 
177
  except Exception as e:
178
  # Clean up partial download
179
  if filepath.exists():
 
180
  filepath.unlink()
181
  raise Exception(f"Error downloading CAMS data: {str(e)}")
182
 
@@ -194,6 +213,33 @@ class CAMSDownloader:
194
  if not zip_path.exists():
195
  raise FileNotFoundError(f"ZIP file not found: {zip_path}")
196
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
197
  # Extract date from filename
198
  date_str = zip_path.stem.replace("-cams.nc", "")
199
 
 
157
 
158
  try:
159
  # Make the API request
160
+ print("๐Ÿ“ก Requesting data from CAMS API...")
161
  self.client.retrieve(
162
  "cams-global-atmospheric-composition-forecasts",
163
  {
 
172
  str(filepath),
173
  )
174
 
175
+ # Validate the downloaded file
176
+ if filepath.exists():
177
+ file_size = filepath.stat().st_size
178
+ print(f"๐Ÿ“ Downloaded file size: {file_size / 1024 / 1024:.2f} MB")
179
+
180
+ # Basic validation - CAMS files should be reasonably large
181
+ if file_size < 10000: # Less than 10KB is suspicious
182
+ print(f"โš ๏ธ Warning: Downloaded file is very small ({file_size} bytes)")
183
+ # Read first few bytes to check for error messages
184
+ with open(filepath, 'rb') as f:
185
+ header = f.read(200)
186
+ if b'error' in header.lower() or b'html' in header.lower():
187
+ filepath.unlink()
188
+ raise Exception("CAMS API returned an error response instead of data")
189
+
190
+ print(f"โœ… Successfully downloaded: {filename}")
191
+ return str(filepath)
192
+ else:
193
+ raise Exception("Download completed but file was not created")
194
 
195
  except Exception as e:
196
  # Clean up partial download
197
  if filepath.exists():
198
+ print(f"๐Ÿ—‘๏ธ Cleaning up failed download: {filepath}")
199
  filepath.unlink()
200
  raise Exception(f"Error downloading CAMS data: {str(e)}")
201
 
 
213
  if not zip_path.exists():
214
  raise FileNotFoundError(f"ZIP file not found: {zip_path}")
215
 
216
+ # Validate file is actually a ZIP file
217
+ try:
218
+ # Check file size first
219
+ file_size = zip_path.stat().st_size
220
+ if file_size < 1000: # Less than 1KB is probably an error response
221
+ print(f"โš ๏ธ Downloaded file is too small ({file_size} bytes), likely an error response")
222
+ # Try to read first few bytes to see what we got
223
+ with open(zip_path, 'rb') as f:
224
+ header = f.read(100)
225
+ if b'html' in header.lower() or b'error' in header.lower():
226
+ raise Exception("Downloaded file appears to be an HTML error page, not ZIP data")
227
+
228
+ # Test if it's a valid ZIP file
229
+ if not zipfile.is_zipfile(zip_path):
230
+ print(f"โŒ File is not a valid ZIP file: {zip_path}")
231
+ # Try to read first few lines to diagnose
232
+ with open(zip_path, 'r', errors='ignore') as f:
233
+ first_lines = f.read(200)
234
+ print(f"File contents preview: {first_lines[:100]}...")
235
+ raise Exception(f"Downloaded file is not a valid ZIP archive. File size: {file_size} bytes")
236
+
237
+ except Exception as e:
238
+ if "ZIP" in str(e) or "zip" in str(e):
239
+ raise e
240
+ else:
241
+ raise Exception(f"Error validating ZIP file: {str(e)}")
242
+
243
  # Extract date from filename
244
  date_str = zip_path.stem.replace("-cams.nc", "")
245
 
constants.py CHANGED
@@ -1,6 +1,7 @@
1
- # Air pollution variables and their properties
2
 
3
- AIR_POLLUTION_VARIABLES = {
 
4
  # PM2.5
5
  'pm2p5': {'units': 'ยตg/mยณ', 'name': 'PM2.5', 'cmap': 'YlOrRd', 'vmax_percentile': 95, 'type': 'surface'},
6
  'pm25': {'units': 'ยตg/mยณ', 'name': 'PM2.5', 'cmap': 'YlOrRd', 'vmax_percentile': 95, 'type': 'surface'},
@@ -78,8 +79,76 @@ AIR_POLLUTION_VARIABLES = {
78
  'aod550': {'units': '', 'name': 'AOD 550nm', 'cmap': 'plasma', 'vmax_percentile': 95, 'type': 'surface'},
79
  'aod': {'units': '', 'name': 'AOD', 'cmap': 'plasma', 'vmax_percentile': 95, 'type': 'surface'},
80
  'aerosol_optical_depth': {'units': '', 'name': 'AOD', 'cmap': 'plasma', 'vmax_percentile': 95, 'type': 'surface'},
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
  }
82
 
 
 
 
83
  # Available color themes for plotting
84
  COLOR_THEMES = {
85
  'YlOrRd': 'Yellow-Orange-Red',
 
1
+ # NetCDF variables and their properties (includes air pollution, meteorological, and other variables)
2
 
3
+ NETCDF_VARIABLES = {
4
+ # === AIR POLLUTION VARIABLES ===
5
  # PM2.5
6
  'pm2p5': {'units': 'ยตg/mยณ', 'name': 'PM2.5', 'cmap': 'YlOrRd', 'vmax_percentile': 95, 'type': 'surface'},
7
  'pm25': {'units': 'ยตg/mยณ', 'name': 'PM2.5', 'cmap': 'YlOrRd', 'vmax_percentile': 95, 'type': 'surface'},
 
79
  'aod550': {'units': '', 'name': 'AOD 550nm', 'cmap': 'plasma', 'vmax_percentile': 95, 'type': 'surface'},
80
  'aod': {'units': '', 'name': 'AOD', 'cmap': 'plasma', 'vmax_percentile': 95, 'type': 'surface'},
81
  'aerosol_optical_depth': {'units': '', 'name': 'AOD', 'cmap': 'plasma', 'vmax_percentile': 95, 'type': 'surface'},
82
+
83
+ # === METEOROLOGICAL VARIABLES ===
84
+
85
+ # Temperature variables
86
+ '2t': {'units': 'K', 'name': '2m Temperature', 'cmap': 'RdYlBu', 'vmax_percentile': 95, 'type': 'surface'},
87
+ 't2m': {'units': 'K', 'name': '2m Temperature', 'cmap': 'RdYlBu', 'vmax_percentile': 95, 'type': 'surface'},
88
+ 'temperature': {'units': 'K', 'name': 'Temperature', 'cmap': 'RdYlBu', 'vmax_percentile': 95, 'type': 'atmospheric'},
89
+ 't': {'units': 'K', 'name': 'Temperature', 'cmap': 'RdYlBu', 'vmax_percentile': 95, 'type': 'atmospheric'},
90
+ 'air_temperature': {'units': 'K', 'name': 'Air Temperature', 'cmap': 'RdYlBu', 'vmax_percentile': 95, 'type': 'atmospheric'},
91
+ 'sst': {'units': 'K', 'name': 'Sea Surface Temperature', 'cmap': 'coolwarm', 'vmax_percentile': 95, 'type': 'surface'},
92
+
93
+ # Wind variables
94
+ 'u': {'units': 'm/s', 'name': 'U-Wind Component', 'cmap': 'coolwarm', 'vmax_percentile': 95, 'type': 'atmospheric'},
95
+ 'v': {'units': 'm/s', 'name': 'V-Wind Component', 'cmap': 'coolwarm', 'vmax_percentile': 95, 'type': 'atmospheric'},
96
+ 'u10': {'units': 'm/s', 'name': '10m U-Wind', 'cmap': 'coolwarm', 'vmax_percentile': 95, 'type': 'surface'},
97
+ 'v10': {'units': 'm/s', 'name': '10m V-Wind', 'cmap': 'coolwarm', 'vmax_percentile': 95, 'type': 'surface'},
98
+ 'u_component_of_wind': {'units': 'm/s', 'name': 'U-Wind Component', 'cmap': 'coolwarm', 'vmax_percentile': 95, 'type': 'atmospheric'},
99
+ 'v_component_of_wind': {'units': 'm/s', 'name': 'V-Wind Component', 'cmap': 'coolwarm', 'vmax_percentile': 95, 'type': 'atmospheric'},
100
+ 'wind_speed': {'units': 'm/s', 'name': 'Wind Speed', 'cmap': 'viridis', 'vmax_percentile': 95, 'type': 'atmospheric'},
101
+ 'ws': {'units': 'm/s', 'name': 'Wind Speed', 'cmap': 'viridis', 'vmax_percentile': 95, 'type': 'atmospheric'},
102
+
103
+ # Pressure and geopotential
104
+ 'msl': {'units': 'Pa', 'name': 'Mean Sea Level Pressure', 'cmap': 'RdYlBu', 'vmax_percentile': 95, 'type': 'surface'},
105
+ 'sp': {'units': 'Pa', 'name': 'Surface Pressure', 'cmap': 'RdYlBu', 'vmax_percentile': 95, 'type': 'surface'},
106
+ 'pressure': {'units': 'Pa', 'name': 'Pressure', 'cmap': 'RdYlBu', 'vmax_percentile': 95, 'type': 'atmospheric'},
107
+ 'z': {'units': 'mยฒ/sยฒ', 'name': 'Geopotential', 'cmap': 'Spectral', 'vmax_percentile': 95, 'type': 'atmospheric'},
108
+ 'gh': {'units': 'm', 'name': 'Geopotential Height', 'cmap': 'Spectral', 'vmax_percentile': 95, 'type': 'atmospheric'},
109
+
110
+ # Humidity variables
111
+ 'r': {'units': '%', 'name': 'Relative Humidity', 'cmap': 'Blues', 'vmax_percentile': 95, 'type': 'atmospheric'},
112
+ 'rh': {'units': '%', 'name': 'Relative Humidity', 'cmap': 'Blues', 'vmax_percentile': 95, 'type': 'atmospheric'},
113
+ 'q': {'units': 'kg/kg', 'name': 'Specific Humidity', 'cmap': 'Blues', 'vmax_percentile': 95, 'type': 'atmospheric'},
114
+ 'sh': {'units': 'kg/kg', 'name': 'Specific Humidity', 'cmap': 'Blues', 'vmax_percentile': 95, 'type': 'atmospheric'},
115
+ 'd2m': {'units': 'K', 'name': '2m Dewpoint Temperature', 'cmap': 'Blues', 'vmax_percentile': 95, 'type': 'surface'},
116
+
117
+ # Precipitation and cloud variables
118
+ 'tp': {'units': 'm', 'name': 'Total Precipitation', 'cmap': 'Blues', 'vmax_percentile': 98, 'type': 'surface'},
119
+ 'precipitation': {'units': 'm', 'name': 'Precipitation', 'cmap': 'Blues', 'vmax_percentile': 98, 'type': 'surface'},
120
+ 'tcc': {'units': '%', 'name': 'Total Cloud Cover', 'cmap': 'Blues', 'vmax_percentile': 95, 'type': 'surface'},
121
+ 'lcc': {'units': '%', 'name': 'Low Cloud Cover', 'cmap': 'Blues', 'vmax_percentile': 95, 'type': 'surface'},
122
+ 'mcc': {'units': '%', 'name': 'Medium Cloud Cover', 'cmap': 'Blues', 'vmax_percentile': 95, 'type': 'surface'},
123
+ 'hcc': {'units': '%', 'name': 'High Cloud Cover', 'cmap': 'Blues', 'vmax_percentile': 95, 'type': 'surface'},
124
+
125
+ # Radiation variables
126
+ 'ssrd': {'units': 'J/mยฒ', 'name': 'Surface Solar Radiation', 'cmap': 'YlOrRd', 'vmax_percentile': 95, 'type': 'surface'},
127
+ 'strd': {'units': 'J/mยฒ', 'name': 'Surface Thermal Radiation', 'cmap': 'inferno', 'vmax_percentile': 95, 'type': 'surface'},
128
+ 'tsr': {'units': 'J/mยฒ', 'name': 'Top Solar Radiation', 'cmap': 'YlOrRd', 'vmax_percentile': 95, 'type': 'surface'},
129
+ 'ttr': {'units': 'J/mยฒ', 'name': 'Top Thermal Radiation', 'cmap': 'inferno', 'vmax_percentile': 95, 'type': 'surface'},
130
+
131
+ # Vertical motion
132
+ 'w': {'units': 'Pa/s', 'name': 'Vertical Velocity (Pressure)', 'cmap': 'coolwarm', 'vmax_percentile': 95, 'type': 'atmospheric'},
133
+ 'omega': {'units': 'Pa/s', 'name': 'Vertical Velocity (Omega)', 'cmap': 'coolwarm', 'vmax_percentile': 95, 'type': 'atmospheric'},
134
+
135
+ # Surface variables
136
+ 'skt': {'units': 'K', 'name': 'Skin Temperature', 'cmap': 'RdYlBu', 'vmax_percentile': 95, 'type': 'surface'},
137
+ 'swvl1': {'units': 'mยณ/mยณ', 'name': 'Soil Water Level 1', 'cmap': 'Blues', 'vmax_percentile': 95, 'type': 'surface'},
138
+ 'stl1': {'units': 'K', 'name': 'Soil Temperature Level 1', 'cmap': 'RdYlBu', 'vmax_percentile': 95, 'type': 'surface'},
139
+ 'lsm': {'units': '0-1', 'name': 'Land-Sea Mask', 'cmap': 'viridis', 'vmax_percentile': 100, 'type': 'surface'},
140
+
141
+ # === OTHER COMMON NETCDF VARIABLES ===
142
+
143
+ # Generic/Unknown variables (fallback)
144
+ 'var': {'units': 'unknown', 'name': 'Variable', 'cmap': 'viridis', 'vmax_percentile': 95, 'type': 'unknown'},
145
+ 'data': {'units': 'unknown', 'name': 'Data', 'cmap': 'viridis', 'vmax_percentile': 95, 'type': 'unknown'},
146
+ 'field': {'units': 'unknown', 'name': 'Field', 'cmap': 'viridis', 'vmax_percentile': 95, 'type': 'unknown'},
147
  }
148
 
149
+ # Maintain backward compatibility
150
+ AIR_POLLUTION_VARIABLES = {k: v for k, v in NETCDF_VARIABLES.items() if 'pollution' in k.lower() or any(pollutant in k.lower() for pollutant in ['pm', 'no', 'so', 'o3', 'co', 'nh3', 'aod', 'column'])}
151
+
152
  # Available color themes for plotting
153
  COLOR_THEMES = {
154
  'YlOrRd': 'Yellow-Orange-Red',
data_processor.py CHANGED
@@ -13,7 +13,7 @@ from pathlib import Path
13
  from datetime import datetime
14
 
15
  # Imports from our Modules
16
- from constants import AIR_POLLUTION_VARIABLES, PRESSURE_LEVELS
17
  warnings.filterwarnings('ignore')
18
 
19
  class NetCDFProcessor:
@@ -98,7 +98,7 @@ class NetCDFProcessor:
98
  return True
99
 
100
  def detect_variables(self):
101
- """Detect air pollution variables in all loaded datasets"""
102
  self.detected_variables = {}
103
 
104
  # Check surface dataset
@@ -119,60 +119,90 @@ class NetCDFProcessor:
119
  return self.detected_variables
120
 
121
  def _detect_variables_in_dataset(self, dataset, dataset_type):
122
- """Detect air pollution variables in a specific dataset"""
123
  detected = {}
124
 
125
  for var_name in dataset.data_vars:
126
  var_name_lower = var_name.lower()
127
 
128
- # Check exact matches first
129
- if var_name in AIR_POLLUTION_VARIABLES:
130
- detected[var_name] = AIR_POLLUTION_VARIABLES[var_name].copy()
131
  detected[var_name]['original_name'] = var_name
132
  detected[var_name]['dataset_type'] = dataset_type
133
  detected[var_name]['shape'] = dataset[var_name].shape
134
  detected[var_name]['dims'] = list(dataset[var_name].dims)
135
 
136
- elif var_name_lower in AIR_POLLUTION_VARIABLES:
137
- detected[var_name] = AIR_POLLUTION_VARIABLES[var_name_lower].copy()
138
  detected[var_name]['original_name'] = var_name
139
  detected[var_name]['dataset_type'] = dataset_type
140
  detected[var_name]['shape'] = dataset[var_name].shape
141
  detected[var_name]['dims'] = list(dataset[var_name].dims)
142
 
143
  else:
144
- # Check for partial matches
145
  var_info = dataset[var_name]
146
  long_name = getattr(var_info, 'long_name', '').lower()
147
  standard_name = getattr(var_info, 'standard_name', '').lower()
 
148
 
149
- # Check for keywords
150
- pollution_keywords = {
151
- 'pm2.5': {'units': 'ยตg/mยณ', 'name': 'PM2.5', 'cmap': 'YlOrRd', 'vmax_percentile': 95, 'type': 'surface'},
152
- 'pm10': {'units': 'ยตg/mยณ', 'name': 'PM10', 'cmap': 'Oranges', 'vmax_percentile': 95, 'type': 'surface'},
153
- 'pm1': {'units': 'ยตg/mยณ', 'name': 'PM1', 'cmap': 'Reds', 'vmax_percentile': 95, 'type': 'surface'},
154
- 'no2': {'units': 'ยตg/mยณ', 'name': 'NOโ‚‚', 'cmap': 'Reds', 'vmax_percentile': 90, 'type': 'atmospheric'},
155
- 'nitrogen dioxide': {'units': 'ยตg/mยณ', 'name': 'NOโ‚‚', 'cmap': 'Reds', 'vmax_percentile': 90, 'type': 'atmospheric'},
156
- 'so2': {'units': 'ยตg/mยณ', 'name': 'SOโ‚‚', 'cmap': 'Purples', 'vmax_percentile': 90, 'type': 'atmospheric'},
157
- 'sulphur dioxide': {'units': 'ยตg/mยณ', 'name': 'SOโ‚‚', 'cmap': 'Purples', 'vmax_percentile': 90, 'type': 'atmospheric'},
158
- 'sulfur dioxide': {'units': 'ยตg/mยณ', 'name': 'SOโ‚‚', 'cmap': 'Purples', 'vmax_percentile': 90, 'type': 'atmospheric'},
159
- 'ozone': {'units': 'ยตg/mยณ', 'name': 'Oโ‚ƒ', 'cmap': 'Blues', 'vmax_percentile': 90, 'type': 'atmospheric'},
160
- 'carbon monoxide': {'units': 'mg/mยณ', 'name': 'CO', 'cmap': 'Greens', 'vmax_percentile': 90, 'type': 'atmospheric'},
161
- 'nitrogen monoxide': {'units': 'ยตg/mยณ', 'name': 'NO', 'cmap': 'Oranges', 'vmax_percentile': 90, 'type': 'atmospheric'},
162
- 'ammonia': {'units': 'ยตg/mยณ', 'name': 'NHโ‚ƒ', 'cmap': 'viridis', 'vmax_percentile': 90, 'type': 'atmospheric'},
163
- 'particulate': {'units': 'ยตg/mยณ', 'name': 'Particulate Matter', 'cmap': 'YlOrRd', 'vmax_percentile': 95, 'type': 'surface'},
164
- }
165
-
166
- for keyword, properties in pollution_keywords.items():
167
- if (keyword in var_name_lower or
168
- keyword in long_name or
169
- keyword in standard_name):
170
  detected[var_name] = properties.copy()
171
  detected[var_name]['original_name'] = var_name
172
  detected[var_name]['dataset_type'] = dataset_type
173
  detected[var_name]['shape'] = dataset[var_name].shape
174
  detected[var_name]['dims'] = list(dataset[var_name].dims)
 
 
 
175
  break
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
176
 
177
  return detected
178
 
@@ -476,13 +506,27 @@ class NetCDFProcessor:
476
  return PRESSURE_LEVELS # Default pressure levels
477
 
478
  def close(self):
479
- """Close all open datasets"""
480
- if self.dataset is not None:
481
- self.dataset.close()
482
- if self.surface_dataset is not None:
483
- self.surface_dataset.close()
484
- if self.atmospheric_dataset is not None:
485
- self.atmospheric_dataset.close()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
486
 
487
 
488
  def analyze_netcdf_file(file_path):
 
13
  from datetime import datetime
14
 
15
  # Imports from our Modules
16
+ from constants import NETCDF_VARIABLES, AIR_POLLUTION_VARIABLES, PRESSURE_LEVELS
17
  warnings.filterwarnings('ignore')
18
 
19
  class NetCDFProcessor:
 
98
  return True
99
 
100
  def detect_variables(self):
101
+ """Detect all supported variables (pollution, meteorological, etc.) in all loaded datasets"""
102
  self.detected_variables = {}
103
 
104
  # Check surface dataset
 
119
  return self.detected_variables
120
 
121
  def _detect_variables_in_dataset(self, dataset, dataset_type):
122
+ """Detect all supported variables in a specific dataset"""
123
  detected = {}
124
 
125
  for var_name in dataset.data_vars:
126
  var_name_lower = var_name.lower()
127
 
128
+ # Check exact matches first in NETCDF_VARIABLES
129
+ if var_name in NETCDF_VARIABLES:
130
+ detected[var_name] = NETCDF_VARIABLES[var_name].copy()
131
  detected[var_name]['original_name'] = var_name
132
  detected[var_name]['dataset_type'] = dataset_type
133
  detected[var_name]['shape'] = dataset[var_name].shape
134
  detected[var_name]['dims'] = list(dataset[var_name].dims)
135
 
136
+ elif var_name_lower in NETCDF_VARIABLES:
137
+ detected[var_name] = NETCDF_VARIABLES[var_name_lower].copy()
138
  detected[var_name]['original_name'] = var_name
139
  detected[var_name]['dataset_type'] = dataset_type
140
  detected[var_name]['shape'] = dataset[var_name].shape
141
  detected[var_name]['dims'] = list(dataset[var_name].dims)
142
 
143
  else:
144
+ # Auto-detect unknown variables by examining their attributes
145
  var_info = dataset[var_name]
146
  long_name = getattr(var_info, 'long_name', '').lower()
147
  standard_name = getattr(var_info, 'standard_name', '').lower()
148
+ units = getattr(var_info, 'units', 'unknown')
149
 
150
+ # Try to match against any known variable in NETCDF_VARIABLES by keywords
151
+ matched = False
152
+ for known_var, properties in NETCDF_VARIABLES.items():
153
+ if (known_var in var_name_lower or
154
+ known_var in long_name or
155
+ known_var in standard_name or
156
+ properties['name'].lower() in var_name_lower or
157
+ properties['name'].lower() in long_name):
 
 
 
 
 
 
 
 
 
 
 
 
 
158
  detected[var_name] = properties.copy()
159
  detected[var_name]['original_name'] = var_name
160
  detected[var_name]['dataset_type'] = dataset_type
161
  detected[var_name]['shape'] = dataset[var_name].shape
162
  detected[var_name]['dims'] = list(dataset[var_name].dims)
163
+ if units != 'unknown':
164
+ detected[var_name]['units'] = units # Use actual units from file
165
+ matched = True
166
  break
167
+
168
+ # If still no match, create a generic entry for any 2D+ variable
169
+ if not matched and len(dataset[var_name].dims) >= 2:
170
+ # Check if it has lat/lon dimensions
171
+ dims = list(dataset[var_name].dims)
172
+ has_spatial = any(dim in ['lat', 'lon', 'latitude', 'longitude', 'x', 'y']
173
+ for dim in [d.lower() for d in dims])
174
+
175
+ if has_spatial:
176
+ # Determine variable type based on dimensions
177
+ var_type = 'surface'
178
+ if any(dim in ['level', 'plev', 'pressure', 'height'] for dim in [d.lower() for d in dims]):
179
+ var_type = 'atmospheric'
180
+
181
+ # Auto-determine color scheme based on variable name or units
182
+ cmap = 'viridis' # default
183
+ if 'temp' in var_name_lower or 'temperature' in long_name:
184
+ cmap = 'RdYlBu'
185
+ elif any(word in var_name_lower for word in ['wind', 'u', 'v']):
186
+ cmap = 'coolwarm'
187
+ elif any(word in var_name_lower for word in ['precip', 'rain', 'cloud', 'humid']):
188
+ cmap = 'Blues'
189
+ elif 'pressure' in var_name_lower or 'pressure' in long_name:
190
+ cmap = 'RdYlBu'
191
+ elif any(word in var_name_lower for word in ['radiation', 'solar']):
192
+ cmap = 'YlOrRd'
193
+
194
+ detected[var_name] = {
195
+ 'units': units,
196
+ 'name': long_name.title() if long_name else var_name.replace('_', ' ').title(),
197
+ 'cmap': cmap,
198
+ 'vmax_percentile': 95,
199
+ 'type': var_type,
200
+ 'original_name': var_name,
201
+ 'dataset_type': dataset_type,
202
+ 'shape': dataset[var_name].shape,
203
+ 'dims': dims,
204
+ 'auto_detected': True # Flag to indicate this was auto-detected
205
+ }
206
 
207
  return detected
208
 
 
506
  return PRESSURE_LEVELS # Default pressure levels
507
 
508
  def close(self):
509
+ """Close all open datasets safely"""
510
+ try:
511
+ if self.dataset is not None:
512
+ self.dataset.close()
513
+ self.dataset = None
514
+ except (RuntimeError, OSError):
515
+ pass # Dataset already closed or invalid
516
+
517
+ try:
518
+ if self.surface_dataset is not None:
519
+ self.surface_dataset.close()
520
+ self.surface_dataset = None
521
+ except (RuntimeError, OSError):
522
+ pass # Dataset already closed or invalid
523
+
524
+ try:
525
+ if self.atmospheric_dataset is not None:
526
+ self.atmospheric_dataset.close()
527
+ self.atmospheric_dataset = None
528
+ except (RuntimeError, OSError):
529
+ pass # Dataset already closed or invalid
530
 
531
 
532
  def analyze_netcdf_file(file_path):
diagnose_cams.py ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ CAMS Download Diagnostic Tool
4
+ Helps troubleshoot issues with CAMS data downloads
5
+ """
6
+
7
+ import os
8
+ import zipfile
9
+ from pathlib import Path
10
+ from datetime import datetime, timedelta
11
+
12
+ def diagnose_cams_downloads():
13
+ """Diagnose CAMS download issues"""
14
+ print("๐Ÿ” CAMS Download Diagnostic Tool")
15
+ print("=" * 50)
16
+
17
+ # Check downloads directory
18
+ downloads_dir = Path("downloads")
19
+ if not downloads_dir.exists():
20
+ print("โŒ Downloads directory doesn't exist")
21
+ return
22
+
23
+ print(f"๐Ÿ“ Downloads directory: {downloads_dir.absolute()}")
24
+
25
+ # List all files in downloads
26
+ all_files = list(downloads_dir.glob("*"))
27
+ if not all_files:
28
+ print("๐Ÿ“‚ Downloads directory is empty")
29
+ return
30
+
31
+ print(f"\n๐Ÿ“‹ Found {len(all_files)} files:")
32
+
33
+ for file_path in all_files:
34
+ print(f"\n๐Ÿ“„ File: {file_path.name}")
35
+ print(f" Size: {file_path.stat().st_size} bytes ({file_path.stat().st_size / 1024:.1f} KB)")
36
+
37
+ # Check if it's supposed to be a ZIP file
38
+ if file_path.suffix.lower() == '.zip' or 'cams' in file_path.name.lower():
39
+ print(f" Expected: ZIP file")
40
+
41
+ # Test if it's actually a ZIP
42
+ if zipfile.is_zipfile(file_path):
43
+ print(f" โœ… Valid ZIP file")
44
+ try:
45
+ with zipfile.ZipFile(file_path, 'r') as zf:
46
+ contents = zf.namelist()
47
+ print(f" ๐Ÿ“ฆ Contains {len(contents)} files:")
48
+ for content in contents[:5]: # Show first 5 files
49
+ print(f" - {content}")
50
+ if len(contents) > 5:
51
+ print(f" ... and {len(contents) - 5} more")
52
+ except Exception as e:
53
+ print(f" โš ๏ธ Error reading ZIP: {e}")
54
+ else:
55
+ print(f" โŒ NOT a valid ZIP file")
56
+
57
+ # Try to read first few bytes to see what it actually is
58
+ try:
59
+ with open(file_path, 'rb') as f:
60
+ header = f.read(100)
61
+ print(f" ๐Ÿ” File header (first 100 bytes): {header[:50]}...")
62
+
63
+ # Check for common error patterns
64
+ header_str = header.decode('utf-8', errors='ignore').lower()
65
+ if 'html' in header_str:
66
+ print(f" ๐Ÿšจ Appears to be HTML (likely an error page)")
67
+ elif 'error' in header_str:
68
+ print(f" ๐Ÿšจ Contains 'error' - likely an error response")
69
+ elif 'json' in header_str:
70
+ print(f" ๐Ÿšจ Appears to be JSON (likely an API error)")
71
+ elif header.startswith(b'PK'):
72
+ print(f" ๐Ÿค” Has ZIP signature but zipfile module rejects it")
73
+ else:
74
+ print(f" โ“ Unknown file format")
75
+
76
+ except Exception as e:
77
+ print(f" โŒ Error reading file: {e}")
78
+
79
+ def test_cds_connection():
80
+ """Test CDS API connection"""
81
+ print("\n๐ŸŒ Testing CDS API Connection")
82
+ print("-" * 30)
83
+
84
+ try:
85
+ import cdsapi
86
+
87
+ # Check for .cdsapirc file
88
+ cdsapirc_path = Path.home() / '.cdsapirc'
89
+ if cdsapirc_path.exists():
90
+ print("โœ… .cdsapirc file found")
91
+
92
+ # Try to initialize client
93
+ try:
94
+ client = cdsapi.Client()
95
+ print("โœ… CDS API client initialized successfully")
96
+
97
+ # Test a simple info request (doesn't download data)
98
+ print("๐Ÿ”„ Testing API connection...")
99
+ # Note: This is just a connection test, not actually downloading
100
+ print("โœ… CDS API connection appears to be working")
101
+ print("๐Ÿ’ก If downloads fail, it may be due to:")
102
+ print(" - Invalid date range")
103
+ print(" - CAMS service temporary issues")
104
+ print(" - Account limitations")
105
+
106
+ except Exception as e:
107
+ print(f"โŒ CDS API client initialization failed: {e}")
108
+
109
+ else:
110
+ print("โŒ .cdsapirc file not found")
111
+ print("๐Ÿ’ก Create ~/.cdsapirc with your CDS API credentials")
112
+
113
+ except ImportError:
114
+ print("โŒ cdsapi module not installed")
115
+ print("๐Ÿ’ก Install with: pip install cdsapi")
116
+
117
+ def suggest_solutions():
118
+ """Suggest solutions for common issues"""
119
+ print("\n๐Ÿ’ก Common Solutions")
120
+ print("-" * 20)
121
+ print("1. ๐Ÿ”„ Try a different date (some dates may not have data)")
122
+ print("2. ๐Ÿ• Wait and retry (CAMS servers may be busy)")
123
+ print("3. ๐Ÿ”‘ Check CDS API credentials in ~/.cdsapirc")
124
+ print("4. ๐Ÿ—‘๏ธ Clear downloads directory and retry")
125
+ print("5. ๐Ÿ“… Use more recent dates (last 30 days usually work)")
126
+ print("6. ๐ŸŒ Check CDS website status: https://cds.climate.copernicus.eu/")
127
+
128
+ if __name__ == "__main__":
129
+ diagnose_cams_downloads()
130
+ test_cds_connection()
131
+ suggest_solutions()
requirements.txt CHANGED
@@ -1,7 +1,7 @@
1
  flask==2.3.3
2
  numpy==1.24.3
3
- pandas==2.0.3
4
- matplotlib==3.7.2
5
  cartopy==0.22.0
6
  xarray==2023.8.0
7
  netcdf4==1.6.4
@@ -12,4 +12,8 @@ python-dateutil==2.8.2
12
  plotly==6.3.0
13
  kaleido
14
  geopandas
15
- shapely
 
 
 
 
 
1
  flask==2.3.3
2
  numpy==1.24.3
3
+ pandas==2.1.4
4
+ matplotlib==3.8.2
5
  cartopy==0.22.0
6
  xarray==2023.8.0
7
  netcdf4==1.6.4
 
12
  plotly==6.3.0
13
  kaleido
14
  geopandas
15
+ shapely
16
+ fiona
17
+ torch
18
+ huggingface-hub
19
+ microsoft-aurora
templates/aurora_predict.html ADDED
@@ -0,0 +1,586 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Aurora ML Predictions - CAMS Pollution Dashboard</title>
7
+ <style>
8
+ * {
9
+ margin: 0;
10
+ padding: 0;
11
+ box-sizing: border-box;
12
+ }
13
+
14
+ body {
15
+ font-family: 'Arial', sans-serif;
16
+ background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
17
+ min-height: 100vh;
18
+ color: #333;
19
+ }
20
+
21
+ .container {
22
+ max-width: 800px;
23
+ margin: 0 auto;
24
+ padding: 20px;
25
+ }
26
+
27
+ .header {
28
+ text-align: center;
29
+ margin-bottom: 40px;
30
+ color: white;
31
+ }
32
+
33
+ .header h1 {
34
+ font-size: 2.5em;
35
+ margin-bottom: 10px;
36
+ text-shadow: 2px 2px 4px rgba(0,0,0,0.3);
37
+ }
38
+
39
+ .header p {
40
+ font-size: 1.2em;
41
+ opacity: 0.9;
42
+ }
43
+
44
+ .form-container {
45
+ background: rgba(255, 255, 255, 0.95);
46
+ border-radius: 15px;
47
+ padding: 40px;
48
+ box-shadow: 0 15px 35px rgba(0, 0, 0, 0.1);
49
+ backdrop-filter: blur(10px);
50
+ }
51
+
52
+ .form-group {
53
+ margin-bottom: 25px;
54
+ }
55
+
56
+ .form-group label {
57
+ display: block;
58
+ margin-bottom: 8px;
59
+ font-weight: bold;
60
+ color: #555;
61
+ }
62
+
63
+ .form-group input, .form-group select {
64
+ width: 100%;
65
+ padding: 15px;
66
+ border: 2px solid #e1e1e1;
67
+ border-radius: 8px;
68
+ font-size: 16px;
69
+ transition: border-color 0.3s ease;
70
+ }
71
+
72
+ .form-group input:focus, .form-group select:focus {
73
+ outline: none;
74
+ border-color: #667eea;
75
+ box-shadow: 0 0 0 3px rgba(102, 126, 234, 0.1);
76
+ }
77
+
78
+ .info-box {
79
+ background: #f8f9ff;
80
+ border: 2px solid #e3e7ff;
81
+ border-radius: 10px;
82
+ padding: 20px;
83
+ margin-bottom: 25px;
84
+ }
85
+
86
+ .info-box h3 {
87
+ color: #4c63d2;
88
+ margin-bottom: 10px;
89
+ }
90
+
91
+ .info-box ul {
92
+ margin-left: 20px;
93
+ color: #666;
94
+ }
95
+
96
+ .info-box li {
97
+ margin-bottom: 5px;
98
+ }
99
+
100
+ .btn {
101
+ background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
102
+ color: white;
103
+ padding: 15px 30px;
104
+ border: none;
105
+ border-radius: 8px;
106
+ font-size: 18px;
107
+ cursor: pointer;
108
+ transition: transform 0.2s ease, box-shadow 0.2s ease;
109
+ width: 100%;
110
+ }
111
+
112
+ .btn:hover {
113
+ transform: translateY(-2px);
114
+ box-shadow: 0 8px 25px rgba(102, 126, 234, 0.3);
115
+ }
116
+
117
+ .btn:active {
118
+ transform: translateY(0);
119
+ }
120
+
121
+ .btn:disabled {
122
+ background: #bdc3c7;
123
+ cursor: not-allowed;
124
+ transform: none;
125
+ }
126
+
127
+ /* Loading Animation Styles */
128
+ .loading-overlay {
129
+ display: none;
130
+ position: fixed;
131
+ top: 0;
132
+ left: 0;
133
+ width: 100%;
134
+ height: 100%;
135
+ background: rgba(0, 0, 0, 0.8);
136
+ z-index: 9999;
137
+ justify-content: center;
138
+ align-items: center;
139
+ flex-direction: column;
140
+ }
141
+
142
+ .loading-content {
143
+ background: white;
144
+ padding: 40px;
145
+ border-radius: 20px;
146
+ text-align: center;
147
+ max-width: 500px;
148
+ box-shadow: 0 20px 60px rgba(0, 0, 0, 0.3);
149
+ }
150
+
151
+ .loading-spinner {
152
+ width: 80px;
153
+ height: 80px;
154
+ border: 8px solid #f3f3f3;
155
+ border-top: 8px solid #667eea;
156
+ border-radius: 50%;
157
+ animation: spin 1s linear infinite;
158
+ margin: 0 auto 20px;
159
+ }
160
+
161
+ @keyframes spin {
162
+ 0% { transform: rotate(0deg); }
163
+ 100% { transform: rotate(360deg); }
164
+ }
165
+
166
+ .progress-bar {
167
+ width: 100%;
168
+ height: 20px;
169
+ background: #f0f0f0;
170
+ border-radius: 10px;
171
+ overflow: hidden;
172
+ margin: 20px 0;
173
+ }
174
+
175
+ .progress-fill {
176
+ height: 100%;
177
+ background: linear-gradient(90deg, #667eea, #764ba2);
178
+ border-radius: 10px;
179
+ width: 0%;
180
+ transition: width 0.3s ease;
181
+ animation: pulse 2s infinite;
182
+ }
183
+
184
+ @keyframes pulse {
185
+ 0%, 100% { opacity: 1; }
186
+ 50% { opacity: 0.7; }
187
+ }
188
+
189
+ .loading-steps {
190
+ text-align: left;
191
+ margin-top: 20px;
192
+ }
193
+
194
+ .loading-step {
195
+ padding: 8px 0;
196
+ display: flex;
197
+ align-items: center;
198
+ font-size: 14px;
199
+ }
200
+
201
+ .step-icon {
202
+ width: 20px;
203
+ height: 20px;
204
+ border-radius: 50%;
205
+ margin-right: 10px;
206
+ display: flex;
207
+ align-items: center;
208
+ justify-content: center;
209
+ font-size: 12px;
210
+ }
211
+
212
+ .step-pending {
213
+ background: #f0f0f0;
214
+ color: #999;
215
+ }
216
+
217
+ .step-active {
218
+ background: #667eea;
219
+ color: white;
220
+ animation: pulse 1s infinite;
221
+ }
222
+
223
+ .step-complete {
224
+ background: #28a745;
225
+ color: white;
226
+ }
227
+
228
+ .aurora-icon {
229
+ font-size: 3em;
230
+ margin-bottom: 15px;
231
+ animation: float 3s ease-in-out infinite;
232
+ }
233
+
234
+ @keyframes float {
235
+ 0%, 100% { transform: translateY(0px); }
236
+ 50% { transform: translateY(-10px); }
237
+ }
238
+
239
+ .back-link {
240
+ display: inline-block;
241
+ margin-bottom: 20px;
242
+ color: white;
243
+ text-decoration: none;
244
+ font-size: 16px;
245
+ transition: opacity 0.3s ease;
246
+ }
247
+
248
+ .back-link:hover {
249
+ opacity: 0.8;
250
+ }
251
+
252
+ .back-link::before {
253
+ content: "โ† ";
254
+ }
255
+
256
+ .warning-box {
257
+ background: #fff8e1;
258
+ border: 2px solid #ffcc02;
259
+ border-radius: 10px;
260
+ padding: 20px;
261
+ margin-bottom: 25px;
262
+ }
263
+
264
+ .warning-box h3 {
265
+ color: #f57c00;
266
+ margin-bottom: 10px;
267
+ }
268
+
269
+ @media (max-width: 768px) {
270
+ .container {
271
+ padding: 10px;
272
+ }
273
+
274
+ .form-container {
275
+ padding: 20px;
276
+ }
277
+
278
+ .header h1 {
279
+ font-size: 2em;
280
+ }
281
+ }
282
+ </style>
283
+ </head>
284
+ <body>
285
+ <div class="container">
286
+ <a href="{{ url_for('index') }}" class="back-link">Back to Main Dashboard</a>
287
+
288
+ <div class="header">
289
+ <h1>๐Ÿ”ฎ Aurora ML Predictions</h1>
290
+ <p>Generate AI-powered air pollution forecasts using Microsoft's Aurora model</p>
291
+ </div>
292
+
293
+ <div class="form-container">
294
+ <div class="info-box">
295
+ <h3>๐Ÿš€ About Aurora Predictions</h3>
296
+ <ul>
297
+ <li><strong>AI-Powered Forecasting:</strong> Uses Microsoft's Aurora foundation model for atmospheric predictions</li>
298
+ <li><strong>Multi-Step Forecasting:</strong> Generate predictions for up to 10 time steps ahead</li>
299
+ <li><strong>Real CAMS Data:</strong> Downloads actual atmospheric data for the selected date</li>
300
+ <li><strong>Multiple Variables:</strong> Predicts PM2.5, PM10, Oโ‚ƒ, NOโ‚‚, CO, SOโ‚‚ and meteorological variables</li>
301
+ </ul>
302
+ </div>
303
+
304
+ <div class="warning-box">
305
+ <h3>โš ๏ธ Performance Notes</h3>
306
+ <p><strong>CPU Mode:</strong> Aurora will run on CPU for local testing. This is slower but doesn't require GPU.</p>
307
+ <p><strong>GPU Mode:</strong> If CUDA GPU is available, Aurora will use it for faster predictions.</p>
308
+ <p><strong>Processing Time:</strong> CPU: 5-15 minutes per step | GPU: 1-3 minutes total</p>
309
+ <p><strong>Memory:</strong> CPU mode automatically limits to 2 steps to prevent memory issues.</p>
310
+ </div>
311
+
312
+ <form method="POST">
313
+ <div class="form-group">
314
+ <label for="date">๐Ÿ“… Select Date for Initial Conditions:</label>
315
+ <input type="date"
316
+ id="date"
317
+ name="date"
318
+ value="{{ current_date }}"
319
+ min="2015-01-01"
320
+ max="{{ current_date }}"
321
+ required>
322
+ <small style="color: #666; font-size: 14px;">
323
+ Aurora will download CAMS data for this date and generate forecasts
324
+ </small>
325
+ </div>
326
+
327
+ <div class="form-group">
328
+ <label for="steps">๐Ÿ”ข Number of Forecast Steps:</label>
329
+ <select id="steps" name="steps" required>
330
+ <option value="1">1 step (6 hours) - Fastest</option>
331
+ <option value="2" selected>2 steps (12 hours) - CPU Friendly</option>
332
+ <option value="4">4 steps (24 hours) - Standard</option>
333
+ <option value="6">6 steps (36 hours) - GPU Recommended</option>
334
+ <option value="8">8 steps (48 hours) - GPU Required</option>
335
+ <option value="10">10 steps (60 hours) - GPU Required</option>
336
+ </select>
337
+ <small style="color: #666; font-size: 14px;">
338
+ Each step represents 6 hours. CPU mode automatically limits to 2 steps maximum.
339
+ </small>
340
+ </div>
341
+
342
+ <button type="submit" class="btn" id="predictBtn">
343
+ ๐Ÿš€ Generate Aurora Predictions
344
+ </button>
345
+ </form>
346
+
347
+ <!-- Loading Overlay -->
348
+ <div class="loading-overlay" id="loadingOverlay">
349
+ <div class="loading-content">
350
+ <div class="aurora-icon">๐Ÿ”ฎ</div>
351
+ <h2 style="color: #667eea; margin-bottom: 10px;">Aurora AI Processing</h2>
352
+ <p style="color: #666; margin-bottom: 20px;">Generating atmospheric predictions using Microsoft's Aurora model...</p>
353
+
354
+ <div class="loading-spinner"></div>
355
+
356
+ <div class="progress-bar">
357
+ <div class="progress-fill" id="progressFill"></div>
358
+ </div>
359
+
360
+ <div id="currentStep" style="font-weight: bold; color: #667eea; margin-bottom: 20px;">
361
+ Initializing Aurora pipeline...
362
+ </div>
363
+
364
+ <div class="loading-steps">
365
+ <div class="loading-step">
366
+ <div class="step-icon step-pending" id="step1">1</div>
367
+ <span>Downloading CAMS atmospheric data</span>
368
+ </div>
369
+ <div class="loading-step">
370
+ <div class="step-icon step-pending" id="step2">2</div>
371
+ <span>Loading Aurora ML model</span>
372
+ </div>
373
+ <div class="loading-step">
374
+ <div class="step-icon step-pending" id="step3">3</div>
375
+ <span>Processing initial conditions</span>
376
+ </div>
377
+ <div class="loading-step">
378
+ <div class="step-icon step-pending" id="step4">4</div>
379
+ <span>Running AI predictions</span>
380
+ </div>
381
+ <div class="loading-step">
382
+ <div class="step-icon step-pending" id="step5">5</div>
383
+ <span>Saving results and preparing visualization</span>
384
+ </div>
385
+ </div>
386
+
387
+ <p style="margin-top: 20px; font-size: 12px; color: #999;">
388
+ <strong>Estimated time:</strong> <span id="estimatedTime">2-5 minutes</span><br>
389
+ This may take longer on CPU-only systems.
390
+ </p>
391
+ </div>
392
+ </div>
393
+
394
+ <div style="margin-top: 30px; padding: 20px; background: #f5f5f5; border-radius: 10px;">
395
+ <h3 style="color: #555; margin-bottom: 15px;">๐Ÿ“Š What You'll Get:</h3>
396
+ <ul style="color: #666; margin-left: 20px;">
397
+ <li>Interactive visualization of predicted air pollution concentrations</li>
398
+ <li>Step-by-step forecast evolution over time</li>
399
+ <li>Downloadable NetCDF files with all prediction data</li>
400
+ <li>Support for all major pollutants and meteorological variables</li>
401
+ </ul>
402
+ </div>
403
+ </div>
404
+ </div>
405
+
406
+ <script>
407
+ // Aurora Prediction Loading Animation
408
+ class AuroraLoadingManager {
409
+ constructor() {
410
+ this.form = document.querySelector('form');
411
+ this.predictBtn = document.getElementById('predictBtn');
412
+ this.overlay = document.getElementById('loadingOverlay');
413
+ this.progressFill = document.getElementById('progressFill');
414
+ this.currentStep = document.getElementById('currentStep');
415
+ this.estimatedTime = document.getElementById('estimatedTime');
416
+
417
+ this.steps = [
418
+ { id: 'step1', text: 'Downloading CAMS atmospheric data...', duration: 20000 },
419
+ { id: 'step2', text: 'Loading Aurora ML model into memory...', duration: 30000 },
420
+ { id: 'step3', text: 'Processing initial atmospheric conditions...', duration: 15000 },
421
+ { id: 'step4', text: 'Running AI predictions (this may take a while)...', duration: 60000 },
422
+ { id: 'step5', text: 'Saving results and preparing visualization...', duration: 10000 }
423
+ ];
424
+
425
+ this.currentStepIndex = 0;
426
+ this.startTime = null;
427
+
428
+ this.init();
429
+ }
430
+
431
+ init() {
432
+ this.form.addEventListener('submit', (e) => {
433
+ this.startLoading();
434
+ });
435
+ }
436
+
437
+ startLoading() {
438
+ this.startTime = Date.now();
439
+ this.overlay.style.display = 'flex';
440
+ this.predictBtn.disabled = true;
441
+
442
+ // Estimate time based on selected steps
443
+ const steps = parseInt(document.getElementById('steps').value);
444
+ const isCPU = this.detectCPUMode();
445
+ this.updateEstimatedTime(steps, isCPU);
446
+
447
+ // Start progress simulation
448
+ this.simulateProgress();
449
+ }
450
+
451
+ detectCPUMode() {
452
+ // Simple heuristic - if user selected fewer steps, likely CPU mode
453
+ const steps = parseInt(document.getElementById('steps').value);
454
+ return steps <= 2;
455
+ }
456
+
457
+ updateEstimatedTime(steps, isCPU) {
458
+ // Fetch actual system capabilities for better estimates
459
+ fetch('/api/aurora_status')
460
+ .then(response => response.json())
461
+ .then(data => {
462
+ if (data.available) {
463
+ const mode = data.cpu_only ? 'cpu' : 'gpu';
464
+ let estimatedMinutes;
465
+
466
+ if (mode === 'cpu') {
467
+ estimatedMinutes = steps <= 1 ? 5 : 8;
468
+ } else {
469
+ estimatedMinutes = Math.max(2, steps * 0.5 + 2);
470
+ }
471
+
472
+ this.estimatedTime.textContent = `${estimatedMinutes}-${estimatedMinutes + 2} minutes (${mode.toUpperCase()} mode)`;
473
+ } else {
474
+ this.estimatedTime.textContent = 'Aurora not available';
475
+ }
476
+ })
477
+ .catch(() => {
478
+ // Fallback to original logic
479
+ let baseTime = steps * (isCPU ? 5 : 1);
480
+ baseTime += 2;
481
+ this.estimatedTime.textContent = `${baseTime}-${baseTime + 2} minutes`;
482
+ });
483
+ }
484
+
485
+ simulateProgress() {
486
+ let totalDuration = this.steps.reduce((sum, step) => sum + step.duration, 0);
487
+ let elapsed = 0;
488
+
489
+ this.progressSteps(0);
490
+ }
491
+
492
+ progressSteps(stepIndex) {
493
+ if (stepIndex >= this.steps.length) {
494
+ return; // Let the actual response handle completion
495
+ }
496
+
497
+ const step = this.steps[stepIndex];
498
+ const stepElement = document.getElementById(step.id);
499
+
500
+ // Mark previous steps as complete
501
+ for (let i = 0; i < stepIndex; i++) {
502
+ const prevStep = document.getElementById(this.steps[i].id);
503
+ prevStep.className = 'step-icon step-complete';
504
+ prevStep.innerHTML = 'โœ“';
505
+ }
506
+
507
+ // Mark current step as active
508
+ stepElement.className = 'step-icon step-active';
509
+ this.currentStep.textContent = step.text;
510
+
511
+ // Update progress bar
512
+ const progress = ((stepIndex + 1) / this.steps.length) * 100;
513
+ this.progressFill.style.width = `${progress}%`;
514
+
515
+ // Move to next step after duration
516
+ setTimeout(() => {
517
+ this.progressSteps(stepIndex + 1);
518
+ }, step.duration);
519
+ }
520
+
521
+ // Call this when the actual response is received
522
+ completeLoading() {
523
+ // Mark all steps as complete
524
+ this.steps.forEach((step, index) => {
525
+ const stepElement = document.getElementById(step.id);
526
+ stepElement.className = 'step-icon step-complete';
527
+ stepElement.innerHTML = 'โœ“';
528
+ });
529
+
530
+ this.progressFill.style.width = '100%';
531
+ this.currentStep.textContent = 'Complete! Redirecting to results...';
532
+
533
+ // Hide overlay after a short delay
534
+ setTimeout(() => {
535
+ this.overlay.style.display = 'none';
536
+ this.predictBtn.disabled = false;
537
+ }, 2000);
538
+ }
539
+ }
540
+
541
+ // Initialize loading manager when page loads
542
+ document.addEventListener('DOMContentLoaded', function() {
543
+ window.auroraLoader = new AuroraLoadingManager();
544
+
545
+ // Handle form validation
546
+ const form = document.querySelector('form');
547
+ const dateInput = document.getElementById('date');
548
+ const stepsSelect = document.getElementById('steps');
549
+
550
+ form.addEventListener('submit', function(e) {
551
+ if (!dateInput.value) {
552
+ e.preventDefault();
553
+ alert('Please select a date for the prediction.');
554
+ return;
555
+ }
556
+
557
+ const selectedDate = new Date(dateInput.value);
558
+ const today = new Date();
559
+ const minDate = new Date('2015-01-01');
560
+
561
+ if (selectedDate > today || selectedDate < minDate) {
562
+ e.preventDefault();
563
+ alert('Please select a date between 2015-01-01 and today.');
564
+ return;
565
+ }
566
+ });
567
+
568
+ // Update step recommendations based on selection
569
+ stepsSelect.addEventListener('change', function() {
570
+ const steps = parseInt(this.value);
571
+ const recommendations = {
572
+ 1: 'Fastest option - good for testing',
573
+ 2: 'CPU-friendly - recommended for local development',
574
+ 4: 'Standard forecast - good for GPU systems',
575
+ 6: 'Extended forecast - GPU recommended',
576
+ 8: 'Long-range forecast - GPU required',
577
+ 10: 'Maximum forecast - GPU required'
578
+ };
579
+
580
+ const small = this.parentNode.querySelector('small');
581
+ small.textContent = `Each step represents 6 hours. ${recommendations[steps] || 'Custom selection'}`;
582
+ });
583
+ });
584
+ </script>
585
+ </body>
586
+ </html>
templates/aurora_prediction_plot.html ADDED
@@ -0,0 +1,392 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Aurora Prediction Visualization - CAMS Dashboard</title>
7
+ <style>
8
+ * {
9
+ margin: 0;
10
+ padding: 0;
11
+ box-sizing: border-box;
12
+ }
13
+
14
+ body {
15
+ font-family: 'Arial', sans-serif;
16
+ background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
17
+ min-height: 100vh;
18
+ color: #333;
19
+ }
20
+
21
+ .container {
22
+ max-width: 1200px;
23
+ margin: 0 auto;
24
+ padding: 20px;
25
+ }
26
+
27
+ .header {
28
+ text-align: center;
29
+ margin-bottom: 30px;
30
+ color: white;
31
+ }
32
+
33
+ .header h1 {
34
+ font-size: 2.5em;
35
+ margin-bottom: 10px;
36
+ text-shadow: 2px 2px 4px rgba(0,0,0,0.3);
37
+ }
38
+
39
+ .controls-container {
40
+ background: rgba(255, 255, 255, 0.95);
41
+ border-radius: 15px;
42
+ padding: 30px;
43
+ margin-bottom: 30px;
44
+ box-shadow: 0 15px 35px rgba(0, 0, 0, 0.1);
45
+ backdrop-filter: blur(10px);
46
+ }
47
+
48
+ .controls-row {
49
+ display: grid;
50
+ grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
51
+ gap: 20px;
52
+ margin-bottom: 20px;
53
+ }
54
+
55
+ .form-group {
56
+ display: flex;
57
+ flex-direction: column;
58
+ }
59
+
60
+ .form-group label {
61
+ margin-bottom: 8px;
62
+ font-weight: bold;
63
+ color: #555;
64
+ }
65
+
66
+ .form-group select {
67
+ padding: 12px;
68
+ border: 2px solid #e1e1e1;
69
+ border-radius: 8px;
70
+ font-size: 16px;
71
+ transition: border-color 0.3s ease;
72
+ }
73
+
74
+ .form-group select:focus {
75
+ outline: none;
76
+ border-color: #667eea;
77
+ box-shadow: 0 0 0 3px rgba(102, 126, 234, 0.1);
78
+ }
79
+
80
+ .btn {
81
+ background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
82
+ color: white;
83
+ padding: 12px 25px;
84
+ border: none;
85
+ border-radius: 8px;
86
+ font-size: 16px;
87
+ cursor: pointer;
88
+ transition: transform 0.2s ease, box-shadow 0.2s ease;
89
+ }
90
+
91
+ .btn:hover {
92
+ transform: translateY(-2px);
93
+ box-shadow: 0 8px 25px rgba(102, 126, 234, 0.3);
94
+ }
95
+
96
+ .plot-container {
97
+ background: rgba(255, 255, 255, 0.95);
98
+ border-radius: 15px;
99
+ padding: 30px;
100
+ margin-bottom: 30px;
101
+ box-shadow: 0 15px 35px rgba(0, 0, 0, 0.1);
102
+ backdrop-filter: blur(10px);
103
+ text-align: center;
104
+ }
105
+
106
+ .plot-image {
107
+ max-width: 100%;
108
+ height: auto;
109
+ border-radius: 10px;
110
+ box-shadow: 0 10px 30px rgba(0, 0, 0, 0.2);
111
+ }
112
+
113
+ .info-grid {
114
+ display: grid;
115
+ grid-template-columns: repeat(auto-fit, minmax(250px, 1fr));
116
+ gap: 20px;
117
+ margin-bottom: 30px;
118
+ }
119
+
120
+ .info-card {
121
+ background: rgba(255, 255, 255, 0.95);
122
+ border-radius: 15px;
123
+ padding: 25px;
124
+ box-shadow: 0 15px 35px rgba(0, 0, 0, 0.1);
125
+ backdrop-filter: blur(10px);
126
+ }
127
+
128
+ .info-card h3 {
129
+ color: #667eea;
130
+ margin-bottom: 15px;
131
+ font-size: 1.3em;
132
+ }
133
+
134
+ .info-card p {
135
+ margin-bottom: 10px;
136
+ color: #666;
137
+ }
138
+
139
+ .download-section {
140
+ background: rgba(255, 255, 255, 0.95);
141
+ border-radius: 15px;
142
+ padding: 30px;
143
+ margin-bottom: 30px;
144
+ box-shadow: 0 15px 35px rgba(0, 0, 0, 0.1);
145
+ backdrop-filter: blur(10px);
146
+ text-align: center;
147
+ }
148
+
149
+ .download-btn {
150
+ background: linear-gradient(135deg, #28a745 0%, #20c997 100%);
151
+ color: white;
152
+ padding: 15px 30px;
153
+ border: none;
154
+ border-radius: 8px;
155
+ font-size: 18px;
156
+ text-decoration: none;
157
+ display: inline-block;
158
+ transition: transform 0.2s ease, box-shadow 0.2s ease;
159
+ margin: 10px;
160
+ }
161
+
162
+ .download-btn:hover {
163
+ transform: translateY(-2px);
164
+ box-shadow: 0 8px 25px rgba(40, 167, 69, 0.3);
165
+ text-decoration: none;
166
+ color: white;
167
+ }
168
+
169
+ .back-link {
170
+ display: inline-block;
171
+ margin-bottom: 20px;
172
+ color: white;
173
+ text-decoration: none;
174
+ font-size: 16px;
175
+ transition: opacity 0.3s ease;
176
+ }
177
+
178
+ .back-link:hover {
179
+ opacity: 0.8;
180
+ }
181
+
182
+ .back-link::before {
183
+ content: "โ† ";
184
+ }
185
+
186
+ .step-indicator {
187
+ background: #f8f9ff;
188
+ border: 2px solid #e3e7ff;
189
+ border-radius: 10px;
190
+ padding: 15px;
191
+ margin-bottom: 20px;
192
+ text-align: center;
193
+ }
194
+
195
+ .step-indicator h3 {
196
+ color: #4c63d2;
197
+ margin-bottom: 5px;
198
+ }
199
+
200
+ @media (max-width: 768px) {
201
+ .container {
202
+ padding: 10px;
203
+ }
204
+
205
+ .controls-container, .plot-container, .info-card, .download-section {
206
+ padding: 20px;
207
+ }
208
+
209
+ .header h1 {
210
+ font-size: 2em;
211
+ }
212
+
213
+ .controls-row {
214
+ grid-template-columns: 1fr;
215
+ }
216
+ }
217
+
218
+ /* Mini loading indicator for form updates */
219
+ .mini-loading {
220
+ display: none;
221
+ width: 20px;
222
+ height: 20px;
223
+ border: 2px solid #f3f3f3;
224
+ border-top: 2px solid #667eea;
225
+ border-radius: 50%;
226
+ animation: spin 1s linear infinite;
227
+ margin-left: 10px;
228
+ }
229
+
230
+ .form-updating {
231
+ opacity: 0.7;
232
+ pointer-events: none;
233
+ }
234
+ </style>
235
+ </head>
236
+ <body>
237
+ <div class="container">
238
+ <a href="{{ url_for('index') }}" class="back-link">Back to Main Dashboard</a>
239
+
240
+ <div class="header">
241
+ <h1>๐Ÿ”ฎ Aurora ML Prediction Results</h1>
242
+ <p>AI-powered atmospheric forecasting visualization</p>
243
+ </div>
244
+
245
+ <div class="step-indicator">
246
+ <h3>๐Ÿ“Š Current View: Step {{ step }} of {{ steps|length - 1 }}</h3>
247
+ <p>Forecast time: {{ (step * 6) }} hours ahead</p>
248
+ </div>
249
+
250
+ <form method="POST" id="predictionForm">
251
+ <div class="controls-container" id="controlsContainer">
252
+ <div class="controls-row">
253
+ <div class="form-group">
254
+ <label for="variable">๐Ÿงช Variable:</label>
255
+ <select id="variable" name="variable">
256
+ {% for var in variables %}
257
+ <option value="{{ var }}" {% if var == var_name %}selected{% endif %}>
258
+ {{ var }}
259
+ </option>
260
+ {% endfor %}
261
+ </select>
262
+ </div>
263
+
264
+ <div class="form-group">
265
+ <label for="step">โฐ Forecast Step:</label>
266
+ <select id="step" name="step">
267
+ {% for s in steps %}
268
+ <option value="{{ s }}" {% if s == step %}selected{% endif %}>
269
+ Step {{ s }} ({{ s * 6 }}h ahead)
270
+ </option>
271
+ {% endfor %}
272
+ </select>
273
+ </div>
274
+
275
+ <div class="form-group">
276
+ <label for="color_theme">๐ŸŽจ Color Theme:</label>
277
+ <select id="color_theme" name="color_theme">
278
+ {% for theme_id, theme_name in color_themes.items() %}
279
+ <option value="{{ theme_id }}" {% if theme_id == current_color_theme %}selected{% endif %}>
280
+ {{ theme_name }}
281
+ </option>
282
+ {% endfor %}
283
+ </select>
284
+ </div>
285
+
286
+ <div class="form-group">
287
+ <label>&nbsp;</label>
288
+ <button type="submit" class="btn" id="updateBtn">
289
+ ๐Ÿ”„ Update View
290
+ <div class="mini-loading" id="miniLoading"></div>
291
+ </button>
292
+ </div>
293
+ </div>
294
+ </div>
295
+ </form>
296
+
297
+ <div class="plot-container">
298
+ <img src="{{ url_for('serve_plot', filename=plot_filename) }}"
299
+ alt="Aurora Prediction Plot"
300
+ class="plot-image">
301
+ </div>
302
+
303
+ <div class="info-grid">
304
+ <div class="info-card">
305
+ <h3>๐Ÿ”ฌ Variable Information</h3>
306
+ <p><strong>Variable:</strong> {{ var_name }}</p>
307
+ <p><strong>Forecast Step:</strong> {{ step }}</p>
308
+ <p><strong>Time Ahead:</strong> {{ step * 6 }} hours</p>
309
+ <p><strong>Color Theme:</strong> {{ color_themes[current_color_theme] }}</p>
310
+ </div>
311
+
312
+ <div class="info-card">
313
+ <h3>๐Ÿค– Model Information</h3>
314
+ <p><strong>Model:</strong> Microsoft Aurora Air Pollution</p>
315
+ <p><strong>Version:</strong> 0.4</p>
316
+ <p><strong>Type:</strong> Foundation Model</p>
317
+ <p><strong>Resolution:</strong> Global atmospheric prediction</p>
318
+ </div>
319
+
320
+ <div class="info-card">
321
+ <h3>๐Ÿ“ˆ Forecast Details</h3>
322
+ <p><strong>Total Steps:</strong> {{ steps|length }}</p>
323
+ <p><strong>Step Interval:</strong> 6 hours</p>
324
+ <p><strong>Max Forecast:</strong> {{ (steps|length - 1) * 6 }} hours</p>
325
+ <p><strong>Variables:</strong> {{ variables|length }} predicted</p>
326
+ </div>
327
+ </div>
328
+
329
+ <div class="download-section">
330
+ <h3 style="color: #667eea; margin-bottom: 20px;">๐Ÿ“ Download Prediction Data</h3>
331
+ <p style="margin-bottom: 20px; color: #666;">
332
+ Download the complete NetCDF file containing all forecast steps and variables
333
+ </p>
334
+ <a href="{{ download_url }}" class="download-btn">
335
+ ๐Ÿ’พ Download NetCDF File
336
+ </a>
337
+ <a href="{{ url_for('aurora_predict') }}" class="download-btn" style="background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);">
338
+ ๐Ÿ”ฎ Generate New Prediction
339
+ </a>
340
+ </div>
341
+ </div>
342
+
343
+ <script>
344
+ // Enhanced form submission with loading indicators
345
+ document.addEventListener('DOMContentLoaded', function() {
346
+ const form = document.getElementById('predictionForm');
347
+ const updateBtn = document.getElementById('updateBtn');
348
+ const miniLoading = document.getElementById('miniLoading');
349
+ const controlsContainer = document.getElementById('controlsContainer');
350
+ const selects = document.querySelectorAll('select');
351
+
352
+ // Add change event listeners to selects
353
+ selects.forEach(select => {
354
+ select.addEventListener('change', function() {
355
+ showMiniLoading();
356
+
357
+ // Add a small delay to prevent rapid submissions
358
+ setTimeout(() => {
359
+ form.submit();
360
+ }, 100);
361
+ });
362
+ });
363
+
364
+ // Handle manual form submission
365
+ form.addEventListener('submit', function(e) {
366
+ showMiniLoading();
367
+ });
368
+
369
+ function showMiniLoading() {
370
+ miniLoading.style.display = 'inline-block';
371
+ controlsContainer.classList.add('form-updating');
372
+ updateBtn.disabled = true;
373
+ updateBtn.textContent = '๐Ÿ”„ Updating...';
374
+ }
375
+
376
+ // Auto-hide loading indicator if page doesn't redirect within 10 seconds
377
+ setTimeout(() => {
378
+ if (miniLoading.style.display === 'inline-block') {
379
+ hideMiniLoading();
380
+ }
381
+ }, 10000);
382
+
383
+ function hideMiniLoading() {
384
+ miniLoading.style.display = 'none';
385
+ controlsContainer.classList.remove('form-updating');
386
+ updateBtn.disabled = false;
387
+ updateBtn.innerHTML = '๐Ÿ”„ Update View<div class="mini-loading" id="miniLoading"></div>';
388
+ }
389
+ });
390
+ </script>
391
+ </body>
392
+ </html>
templates/index.html CHANGED
@@ -257,6 +257,36 @@
257
  </form>
258
  </div>
259
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
260
  <div class="container">
261
  <h2>๐Ÿ“‹ How to Use</h2>
262
  <ol style="line-height: 1.8;">
 
257
  </form>
258
  </div>
259
 
260
+ <div class="container">
261
+ <div class="method-section" style="border-left: 4px solid #9b59b6;">
262
+ <h2>๐Ÿ”ฎ Method 4: Aurora ML Predictions</h2>
263
+ <p>Generate AI-powered air pollution forecasts using Microsoft's Aurora foundation model</p>
264
+
265
+ <div style="background: #f8f9ff; padding: 15px; border-radius: 8px; margin: 15px 0; border: 2px solid #e3e7ff;">
266
+ <p style="margin-bottom: 10px;"><strong>๐Ÿš€ What is Aurora?</strong></p>
267
+ <ul style="margin-left: 20px; color: #666;">
268
+ <li>Microsoft's state-of-the-art atmospheric foundation model</li>
269
+ <li>Trained on massive amounts of global weather and atmospheric data</li>
270
+ <li>Generates multi-step forecasts for air pollution and meteorology</li>
271
+ <li>Provides predictions up to 60 hours ahead with 6-hour intervals</li>
272
+ </ul>
273
+ </div>
274
+
275
+ {% if aurora_available is defined and aurora_available %}
276
+ <a href="{{ url_for('aurora_predict') }}" class="btn" style="background: linear-gradient(135deg, #9b59b6 0%, #8e44ad 100%);">
277
+ ๐Ÿ”ฎ Generate Aurora Predictions
278
+ </a>
279
+ {% else %}
280
+ <button class="btn" disabled style="background: #bdc3c7; cursor: not-allowed;">
281
+ ๐Ÿ”ฎ Aurora Model Not Available
282
+ </button>
283
+ <p style="margin-top: 10px; font-size: 14px; color: #721c24;">
284
+ Aurora dependencies not installed. Requires PyTorch and aurora-forecast package.
285
+ </p>
286
+ {% endif %}
287
+ </div>
288
+ </div>
289
+
290
  <div class="container">
291
  <h2>๐Ÿ“‹ How to Use</h2>
292
  <ol style="line-height: 1.8;">
test_aurora_fix.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ Quick test for Aurora pipeline generator fix
4
+ """
5
+
6
+ import sys
7
+ import os
8
+ sys.path.append('.')
9
+
10
+ def test_aurora_fix():
11
+ """Test the Aurora pipeline generator fix"""
12
+ print("๐Ÿงช Testing Aurora Pipeline Generator Fix")
13
+ print("=" * 45)
14
+
15
+ try:
16
+ from aurora_pipeline import AuroraPipeline
17
+ print("โœ… Aurora pipeline imported successfully")
18
+
19
+ # Initialize pipeline
20
+ pipeline = AuroraPipeline()
21
+ print("โœ… Pipeline initialized")
22
+
23
+ # Test with minimal configuration
24
+ date = '2022-10-14'
25
+ steps = 1
26
+
27
+ print(f"\n๐Ÿš€ Running test prediction:")
28
+ print(f" ๐Ÿ“… Date: {date}")
29
+ print(f" ๐Ÿ”„ Steps: {steps}")
30
+
31
+ result = pipeline.run_pipeline(date, steps=steps)
32
+
33
+ print("โœ… SUCCESS! Aurora pipeline completed without errors")
34
+
35
+ if isinstance(result, dict):
36
+ print(f"๐Ÿ“Š Result contains: {list(result.keys())}")
37
+
38
+ # Check if we have expected outputs
39
+ if 'air_pollution' in result:
40
+ pollution_data = result['air_pollution']
41
+ print(f"๐ŸŒฌ๏ธ Air pollution predictions: {pollution_data.shape if hasattr(pollution_data, 'shape') else type(pollution_data)}")
42
+
43
+ if 'metadata' in result:
44
+ metadata = result['metadata']
45
+ print(f"๐Ÿ“‹ Metadata: {metadata}")
46
+
47
+ print("\n๐ŸŽ‰ Aurora pipeline is now working correctly!")
48
+
49
+ except Exception as e:
50
+ print(f"โŒ Error during test: {e}")
51
+ import traceback
52
+ print("\n๐Ÿ” Full traceback:")
53
+ traceback.print_exc()
54
+
55
+ print("\n๐Ÿ’ก Potential solutions:")
56
+ print("1. Check if CAMS data exists for the test date")
57
+ print("2. Verify Aurora model downloaded correctly")
58
+ print("3. Ensure all dependencies are installed")
59
+
60
+ if __name__ == "__main__":
61
+ test_aurora_fix()
test_cpu_mode.py ADDED
@@ -0,0 +1,124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python3
2
+ """
3
+ CPU-friendly test for local development without GPU requirements
4
+ """
5
+
6
+ def test_system_readiness():
7
+ """Test if the basic system is ready for local development"""
8
+ print("๐Ÿ”ฌ Testing System for Local Development")
9
+ print("=" * 50)
10
+
11
+ issues = []
12
+
13
+ # Test 1: Basic Python imports
14
+ try:
15
+ import sys
16
+ print(f"โœ… Python version: {sys.version.split()[0]}")
17
+ except Exception as e:
18
+ issues.append(f"Python: {e}")
19
+
20
+ # Test 2: Core dependencies
21
+ try:
22
+ import numpy as np
23
+ print(f"โœ… NumPy: {np.__version__}")
24
+ except ImportError:
25
+ issues.append("NumPy not found")
26
+
27
+ try:
28
+ import xarray as xr
29
+ print(f"โœ… xarray: {xr.__version__}")
30
+ except ImportError:
31
+ issues.append("xarray not found")
32
+
33
+ try:
34
+ import matplotlib
35
+ print(f"โœ… Matplotlib: {matplotlib.__version__}")
36
+ except ImportError:
37
+ issues.append("Matplotlib not found")
38
+
39
+ try:
40
+ import flask
41
+ print(f"โœ… Flask: {flask.__version__}")
42
+ except ImportError:
43
+ issues.append("Flask not found")
44
+
45
+ # Test 3: Optional Aurora dependencies
46
+ aurora_ready = True
47
+ try:
48
+ import torch
49
+ print(f"โœ… PyTorch: {torch.__version__}")
50
+ print(f" CUDA available: {torch.cuda.is_available()}")
51
+ print(f" CPU cores: {torch.get_num_threads()}")
52
+ except ImportError:
53
+ print("โš ๏ธ PyTorch not found (Aurora unavailable)")
54
+ aurora_ready = False
55
+
56
+ try:
57
+ from huggingface_hub import hf_hub_download
58
+ print("โœ… Hugging Face Hub available")
59
+ except ImportError:
60
+ print("โš ๏ธ Hugging Face Hub not found (Aurora unavailable)")
61
+ aurora_ready = False
62
+
63
+ # Test 4: Aurora model
64
+ try:
65
+ from aurora import Batch, Metadata, AuroraAirPollution, rollout
66
+ print("โœ… Aurora model available")
67
+ except ImportError:
68
+ print("โš ๏ธ Aurora model not found (predictions unavailable)")
69
+ aurora_ready = False
70
+
71
+ # Test 5: Custom modules
72
+ try:
73
+ from data_processor import NetCDFProcessor
74
+ from plot_generator import IndiaMapPlotter
75
+ from constants import NETCDF_VARIABLES
76
+ print("โœ… Custom modules loaded")
77
+ except ImportError as e:
78
+ issues.append(f"Custom modules: {e}")
79
+
80
+ # Test 6: GPU vs CPU detection
81
+ gpu_info = "None"
82
+ try:
83
+ import subprocess
84
+ result = subprocess.run(['nvidia-smi', '--query-gpu=name', '--format=csv,noheader,nounits'],
85
+ capture_output=True, text=True, timeout=5)
86
+ if result.returncode == 0:
87
+ gpu_info = result.stdout.strip()
88
+ print(f"๐ŸŽฎ GPU detected: {gpu_info}")
89
+ else:
90
+ print("๐Ÿ’ป No GPU detected (CPU mode)")
91
+ except:
92
+ print("๐Ÿ’ป No GPU utilities found (CPU mode)")
93
+
94
+ print("\n" + "=" * 50)
95
+ print("๐Ÿ“Š System Assessment:")
96
+
97
+ if issues:
98
+ print("โŒ Critical Issues Found:")
99
+ for issue in issues:
100
+ print(f" - {issue}")
101
+ print("\n๐Ÿ”ง Please install missing dependencies")
102
+ else:
103
+ print("โœ… Core system ready!")
104
+
105
+ if aurora_ready:
106
+ print("๐Ÿ”ฎ Aurora ML predictions: Available")
107
+ if "CPU mode" in gpu_info or gpu_info == "None":
108
+ print("๐Ÿ’ป Recommended: Use CPU mode with max 2 steps")
109
+ else:
110
+ print("๐ŸŽฎ GPU available: Can use more prediction steps")
111
+ else:
112
+ print("โš ๏ธ Aurora ML predictions: Not available")
113
+ print(" Install: torch, huggingface_hub, aurora-forecast")
114
+
115
+ return len(issues) == 0
116
+
117
+ if __name__ == "__main__":
118
+ success = test_system_readiness()
119
+
120
+ if success:
121
+ print("\n๐Ÿš€ Ready to run CAMS visualization system!")
122
+ print("๐Ÿ’ก Start with: python app.py")
123
+ else:
124
+ print("\nโŒ Please resolve issues before running the system")