diff --git a/config.toml b/config.toml index 5b15b0a..5f423fe 100755 --- a/config.toml +++ b/config.toml @@ -1,7 +1,7 @@ # NOTE: this config is provided as a TEMPLATE, it should be changed by the user before using it. [simulation] name = "syria_new" # name of the simulation. - experiment_path = "cases/" # path where experiment folder should be placed + experiment_path = "WITOIL_iMagine/cases/" # path where experiment folder should be placed start_datetime = 2021-08-22T03:43:00 # start date of the simulation sim_length = 24.0 # length of the simulation in HOURS spill_lat = [35.25] # lat of oil spill (deg N) @@ -10,11 +10,11 @@ spill_rate = [27.78] # spill rate TONS/HOUR slick_age = [0.0] # age of oil slick in HOURS oil = [28] # either oil api (number) or name (string), e.g. ["Ragusa"]. Please, consider that name must be exact. - area_spill = true + area_spill = false area_vertex = false # comprehends thre levels of lists. 1st: all slicks. 2nd: individual slick. 3rd: Coordinates of each vertex in each individual slick multiple_slick = false advanced_parameters = false # if = true, user must provide parameters.toml file - advanced_parameters_path = "src/parameters.toml" # this path shuld be provided only if "advanced_parameters" is = true + advanced_parameters_path = "WITOIL_iMagine/src/parameters.toml" # this path shuld be provided only if "advanced_parameters" is = true [download] download_data = true # = true if data are not provided by the user download_curr = true # = true : OCE data are downloaded from Copernicus Marine Service @@ -29,8 +29,8 @@ delta = [0.75] # default domain length in degrees (applied to both lon/lat), to download or crop data # note: delta is used only if set_domain = false [input_files.dtm] - bathymetry_path = "data/gebco/GEBCO_2023.nc" # GEBCO 2023 bathymetry file - coastline_path = "data/gshhs/f/GSHHS_f_L1.shp" # coastline shapefile gshhg + bathymetry_path = "WITOIL_iMagine/data/gebco/GEBCO_2023.nc" # GEBCO 2023 bathymetry file + coastline_path = "WITOIL_iMagine/data/gshhs/f/GSHHS_f_L1.shp" # coastline shapefile gshhg [input_files.metoce] oce_data_path = false # to provide if dowload_curr = false met_data_path = false # to provide if dowload_wind = false diff --git a/download/download_copernicus_parser.py b/download/download_copernicus_parser.py deleted file mode 100644 index a2db398..0000000 --- a/download/download_copernicus_parser.py +++ /dev/null @@ -1,138 +0,0 @@ -import copernicusmarine -import argparse -import os -import subprocess -import datetime -import pandas as pd -import xarray as xr - -# Functions outside this script -from src.utils.utils import * - -def download_copernicus(min_lat,max_lat,min_lon,max_lon,min_depth,max_depth, - start_time,end_time, - region,output_path,output_name, - user,password): - - if region == 'global': - - if end_time < pd.to_datetime('2019-01-01'): - dataset_id = "cmems_mod_glo_phy_my_0.083deg_P1D-m" - output_name = output_name.format('reanalysis') - else: - dataset_id = "cmems_mod_glo_phy_anfc_0.083deg_PT1H-m" - output_name = output_name.format('analysis') - - copernicusmarine.subset( - dataset_id=dataset_id, - variables=["uo", "vo","thetao"], - minimum_longitude=min_lon, - maximum_longitude=max_lon, - minimum_latitude=min_lat, - maximum_latitude=max_lat, - start_datetime=start_time, - end_datetime=end_time, - minimum_depth=min_depth, - maximum_depth=max_depth, - output_filename = "temp.nc", - output_directory = output_path, - username=user, - password=password, - force_download=True - ) - - #Transform to medslik standards - ds = xr.open_mfdataset(f'{output_path}temp.nc') - - # Rename variables only if they exist in the dataset - ds = Utils.rename_netcdf_variables_mdk3(ds) - - #Selecting only 4 layers - ds = ds.sel(depth=[0,10,30,120],method='nearest') - #Modifying labels to simplfy drop in temperature columns - ds['depth'] = [0,10,30,120] - - #Selecting only the relavent variables - ds = ds[['uo','vo','thetao']] - - #saves the daily current or temperature netcdf in the case dir - ds.to_netcdf(output_name) - - #remove the temporary files - subprocess.run([f'rm -rf {output_path}temp.nc'],shell=True) - - else: - - if end_time < pd.to_datetime('2021-11-01'): - dataset_id_curr = "med-cmcc-cur-rean-h" - dataset_id_temp = "med-cmcc-tem-rean-d" - output_name = output_name.format('reanalysis') - else: - dataset_id_curr = "cmems_mod_med_phy-cur_anfc_4.2km-2D_PT1H-m" - dataset_id_temp = "cmems_mod_med_phy-tem_anfc_4.2km-2D_PT1H-m" - output_name = output_name.format('analysis') - - files = [] - for dataset in [dataset_id_curr,dataset_id_temp]: - - if 'cur' in dataset: - copernicusmarine.subset( - dataset_id=dataset_id_curr, - variables=["uo", "vo"], - minimum_longitude=min_lon, - maximum_longitude=max_lon, - minimum_latitude=min_lat, - maximum_latitude=max_lat, - start_datetime=start_time, - end_datetime=end_time, - minimum_depth=min_depth, - maximum_depth=max_depth, - output_filename = "curr.nc", - output_directory = output_path, - username=user, - password=password, - force_download=True - ) - - files.append(output_path+'curr.nc') - else: - copernicusmarine.subset( - dataset_id=dataset_id_temp, - variables=["thetao"], - minimum_longitude=min_lon, - maximum_longitude=max_lon, - minimum_latitude=min_lat, - maximum_latitude=max_lat, - start_datetime=start_time, - end_datetime=end_time, - output_filename = "temp.nc", - output_directory = output_path, - username=user, - password=password, - force_download=True - ) - - files.append(output_path+'temp.nc') - - #Transform to medslik standards - ds = xr.open_mfdataset(files) - - # Rename variables only if they exist in the dataset - ds = Utils.rename_netcdf_variables_mdk3(ds) - - #Selecting only 4 layers - try: - ds = ds.sel(depth=[0,10,30,120],method='nearest') - #Modifying labels to simplfy drop in temperature columns - ds['depth'] = [0,10,30,120] - except: - ds = ds.expand_dims(dim={"depth": [0,10,30,120]}) - - #Selecting only the relavent variables - ds = ds[['uo','vo','thetao']] - - #saves the daily current or temperature netcdf in the case dir - ds.to_netcdf(output_name) - - #remove the temporary files - subprocess.run([f'rm -rf {output_path}/curr.nc {output_path}/temp.nc'],shell=True) \ No newline at end of file diff --git a/download/download_era5_parser.py b/download/download_era5_parser.py deleted file mode 100644 index dce153a..0000000 --- a/download/download_era5_parser.py +++ /dev/null @@ -1,114 +0,0 @@ -import numpy as np -import pdb -import datetime -import os -import cdsapi -import argparse -import subprocess -import time - -# Functions outside this script -from src.utils.utils import * - -def get_era5(xmin,xmax,ymin,ymax,start_date,end_date,output_path,output_name): - server = cdsapi.Client() - - days = (end_date-start_date).days - - print(ymin,ymax,xmin,xmax) - - outputs = [] - - for i in range(0,days): - - date = start_date + datetime.timedelta(days=i) - - print(date) - - outputname = output_path + f'temp_{str(date.year)+str(date.month).zfill(2)+str(date.day).zfill(2)}.nc' - - server.retrieve( - 'reanalysis-era5-single-levels', - { - 'product_type': ['reanalysis'], - 'data_format': 'netcdf', - 'download_format' : 'unarchived', - 'variable': [ - '10m_u_component_of_wind', '10m_v_component_of_wind', - ], - 'year' : [str(date.year)], - 'month': [str(date.month).zfill(2)], - 'day' : [str(date.day).zfill(2)], - 'time' : [ - '00:00', '01:00', '02:00', - '03:00', '04:00', '05:00', - '06:00', '07:00', '08:00', - '09:00', '10:00', '11:00', - '12:00', '13:00', '14:00', - '15:00', '16:00', '17:00', - '18:00', '19:00', '20:00', - '21:00', '22:00', '23:00', - ], - 'area': [ - ymax, xmin, ymin, - xmax - ], - }, - outputname) - -def process_era5(output_path,output_name): - - met = xr.open_mfdataset('data/ERA5/temp*.nc') - met = Utils.rename_netcdf_variables_mdk3(met) - - met.to_netcdf(output_name) - - #remove the temporary files - subprocess.run([f'rm -rf {output_path}/temp*.nc'],shell=True) - - -if __name__ == '__main__': - - # Script to download daily ERA-5 files - - parser = argparse.ArgumentParser(description='Download wind fields for a specific area and time window.') - parser.add_argument('lat_min', type=float, help='Minimum Latitude value') - parser.add_argument('lon_min', type=float, help='Minimum Longitude value') - parser.add_argument('lat_max', type=float, help='Maximum Latitude value') - parser.add_argument('lon_max', type=float, help='Maximum Longitude value') - parser.add_argument('date_min', type=str, help='Start date in yyyy-mm-dd format') - parser.add_argument('date_max', type=str, help='End date in yyyy-mm-dd format') - parser.add_argument('output_path', type=str, default='./', help='Output path (default: current directory)') - args = parser.parse_args() - - #Set your area of interest - xmin = float(args.lon_min) - xmax = float(args.lon_max) - ymin = float(args.lat_min) - ymax = float(args.lat_max) - - # Set your period of interest - start_date=args.date_min - end_date=args.date_max - - print('********************************************') - print('PREPARING ERA5 WIND DATA - MEDSLIK II FORMAT') - print('Start date :' + start_date) - print('End date :' + end_date) - print('********************************************') - - - get_era5(args.lon_min,args.lon_max,args.lat_min,args.lat_max,start_date,end_date,args.output_path) - -# os.system('cdo -b F64 mergetime ' + out_folder + file1 + ' ' + out_folder + file2 + ' ' + out_folder + 'output.nc') -# -# string1 = dDate.strftime('%Y') + '-' + dDate.strftime('%m') + '-' + dDate.strftime('%d') + 'T01:00:00' -# string2 = fDate.strftime('%Y') + '-' + fDate.strftime('%m') + '-' + fDate.strftime('%d') + 'T00:00:00' -# -# os.system('cdo seldate,' + string1 + ',' + string2 + ' ' + out_folder + '/output.nc ' + out_folder + '/' + file1[4::]) -# os.system('ncrename -O -d longitude,lon -d latitude,lat -v longitude,lon -v latitude,lat -v u10,U10M -v v10,V10M ' + out_folder + '/' + file1[4::]) -# -# os.system('rm ' + out_folder + '/output.nc') -# -#os.system('rm ' + out_folder + '/pre_*.nc') - diff --git a/src/download/download_copernicus_parser.py b/src/download/download_copernicus_parser.py index 6eac370..07eedf7 100644 --- a/src/download/download_copernicus_parser.py +++ b/src/download/download_copernicus_parser.py @@ -7,7 +7,7 @@ import xarray as xr # Functions outside this script -from src.utils.utils import * +from WITOIL_iMagine.src.utils.utils import * def download_copernicus( diff --git a/src/download/download_era5_parser.py b/src/download/download_era5_parser.py index cd8543f..b23c1ef 100644 --- a/src/download/download_era5_parser.py +++ b/src/download/download_era5_parser.py @@ -8,7 +8,7 @@ import time # Functions outside this script -from src.utils.utils import * +from WITOIL_iMagine.src.utils.utils import * def write_cds(token): """ @@ -73,13 +73,13 @@ def get_era5(xmin,xmax,ymin,ymax,start_date,end_date,output_path,output_name): def process_era5(output_path,output_name): - met = xr.open_mfdataset('data/ERA5/temp*.nc') + met = xr.open_mfdataset('WITOIL_iMagine/data/ERA5/temp*.nc') met = Utils.rename_netcdf_variables_mdk3(met) met.to_netcdf(output_name) #remove the temporary files - subprocess.run([f'rm -rf {output_path}/temp*.nc'],shell=True) + subprocess.run([f'rm -rf {output_path}/temp*.nc'],shell=True) if __name__ == '__main__': diff --git a/src/plot/plot_mdk3.py b/src/plot/plot_mdk3.py index b8329fd..f897f30 100644 --- a/src/plot/plot_mdk3.py +++ b/src/plot/plot_mdk3.py @@ -192,7 +192,7 @@ def plot_pyngl( """ config = self.config current_folder = os.path.dirname(os.path.abspath(__file__)) - path_to_plotspill = 'src/plot/plotngl.py' + path_to_plotspill = 'WITOIL_iMagine/src/plot/plotngl.py' root_directory = self.root_directory spill_lon = config["simulation"]["spill_lon"][0] spill_lat = config["simulation"]["spill_lat"][0] diff --git a/src/preprocessing/preprocessing_mdk3.py b/src/preprocessing/preprocessing_mdk3.py index 35c4d89..425692f 100644 --- a/src/preprocessing/preprocessing_mdk3.py +++ b/src/preprocessing/preprocessing_mdk3.py @@ -9,8 +9,8 @@ from numpy.typing import NDArray from datetime import datetime, timedelta, date -from src.utils.utils import Utils -from src.utils.config import Config +from WITOIL_iMagine.src.utils.utils import Utils +from WITOIL_iMagine.src.utils.config import Config import logging @@ -56,9 +56,8 @@ def process_currents(self, oce_path: str = None): logger.info("Pre processing currents") lon_min, lon_max, lat_min, lat_max = self.domain # opening all files in the directory and concatenating them automatically through open_mfdataset - if oce_path is None: - oce_path = f"{self.exp_folder}/oce_files/" - oce_path = os.path.join(oce_path, "*.nc") + oce_path = f"{self.exp_folder}/oce_files/*.nc" + if glob(oce_path) == []: oce_path = f"{self.exp_folder}/oce_files/*.nc" concat = xr.open_mfdataset(oce_path, combine="nested", engine="netcdf4") @@ -77,9 +76,8 @@ def process_winds(self, met_path: str = None): logger.info("Pre processing winds") lon_min, lon_max, lat_min, lat_max = self.domain # opening all files in the directory and concatenating them automatically through open_mfdataset - if met_path is None: - met_path = f"{self.exp_folder}/met_files/*.nc" - met_path = os.path.join(met_path, "*.nc") + met_path = f"{self.exp_folder}/met_files/*.nc" + if glob(met_path) == []: met_path = f"{self.exp_folder}/met_files/*.nc" concat = xr.open_mfdataset(met_path, combine="nested", engine="netcdf4") @@ -221,14 +219,14 @@ def process_medslik_memmory_array(self): med_for = f'{self.exp_folder}/xp_files/medslik_II.for' - subprocess.run([f'cp src/templates/medslik_II_template.for {med_for}'],shell=True) + subprocess.run([f'cp WITOIL_iMagine/src/templates/medslik_II_template.for {med_for}'],shell=True) # Replacing NMAX in medslik fortran with a python function Utils.search_and_replace(med_for, 'NMAX', str(nmax)) def configuration_parameters(self): - subprocess.run([f'cp src/templates/config2.txt {self.exp_folder}/xp_files/config2.txt'],shell=True) + subprocess.run([f'cp WITOIL_iMagine/src/templates/config2.txt {self.exp_folder}/xp_files/config2.txt'],shell=True) def common_grid(self): @@ -270,11 +268,11 @@ def write_config_files(self, print("...config1.txt...") # Iterating through slicks or doing for single simulation if separate_slicks == False: - config_file = f"cases/{simname}/xp_files/config1.txt" + config_file = f"WITOIL_iMagine/cases/{simname}/xp_files/config1.txt" else: - config_file = f"cases/{simname}/xp_files/slick{s_num+1}/config1.txt" + config_file = f"WITOIL_iMagine/cases/{simname}/xp_files/slick{s_num+1}/config1.txt" subprocess.run( - [f"cp src/templates/config1_template_0.txt {config_file}"], shell=True + [f"cp WITOIL_iMagine/src/templates/config1_template_0.txt {config_file}"], shell=True ) # adding spill Name - Add slick number if separate slicks if separate_slicks == False: