Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions .streamlit/config.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
[server]
headless = true
address = "0.0.0.0"
port = 5000
enableCORS = true
enableXsrfProtection = true

[browser]
gatherUsageStats = false
11 changes: 11 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
FROM mambaorg/micromamba:2.0.4

WORKDIR /app
COPY ogs-env.yml /tmp/ogs-env.yml

RUN micromamba env create -y -f /tmp/ogs-env.yml && \
micromamba clean --all --yes

COPY . /app
EXPOSE 5000
CMD ["micromamba", "run", "-n", "ogs_env", "streamlit", "run", "rom.py", "--server.address=0.0.0.0", "--server.port=5000"]
151 changes: 106 additions & 45 deletions fetch_data.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,51 @@
import os
import copernicusmarine
import numpy as np
from datetime import datetime, timedelta
import netCDF4 as nc
from scipy.interpolate import RegularGridInterpolator, NearestNDInterpolator
import os
import matplotlib.pyplot as plt
from scipy.ndimage import gaussian_filter
masks_red=np.load("masks_red.npy")
masks=np.load("masks.npy")

import matplotlib.pyplot as plt
from pathlib import Path
import shutil

BASE_DIR = Path(__file__).resolve().parent
REQUIRED_DIR = BASE_DIR / "required_data"
DAILY_DIR = BASE_DIR / "daily_data"
BACKUP_DIR = BASE_DIR / "daily_backup"

DAILY_DIR.mkdir(exist_ok=True)
BACKUP_DIR.mkdir(exist_ok=True)

def clear_directory(directory: Path) -> None:
for item in directory.iterdir():
if item.is_dir():
shutil.rmtree(item)
else:
item.unlink()

existing_daily_files = list(DAILY_DIR.glob("*"))
if existing_daily_files:
clear_directory(BACKUP_DIR)

for src in existing_daily_files:
dst = BACKUP_DIR / src.name
if src.is_dir():
shutil.copytree(src, dst)
else:
shutil.copy2(src, dst)

masks_red=np.load(REQUIRED_DIR / "masks_red.npy")
masks=np.load(REQUIRED_DIR / "masks.npy")
print(masks_red.shape)
latitude_red=np.load("latitude_red.npy")
longitude_red=np.load("longitude_red.npy")

latitude_red=np.load(REQUIRED_DIR / "latitude_red.npy")
longitude_red=np.load(REQUIRED_DIR / "longitude_red.npy")

yesterday = datetime.today() - timedelta(days=1)
formatted_date = yesterday.strftime('%Y-%m-%d')

depths=[0,10,30,60,100]


c=np.concatenate([x.reshape(latitude_red.shape[0],longitude_red.shape[0],1) for x in np.meshgrid(latitude_red,longitude_red,indexing="ij")],axis=2)

c=c.reshape(-1,2)
Expand All @@ -34,60 +60,95 @@

}

if not os.path.exists("depths.nc"):
depths_nc_path = DAILY_DIR / "depths.nc"
if not depths_nc_path.exists():
copernicusmarine.subset(
dataset_id="cmems_mod_med_phy_anfc_4.2km_static",
minimum_longitude=11,
maximum_longitude=18,
minimum_latitude=42,
maximum_latitude=47,
minimum_depth=0,
maximum_depth=120,
start_datetime=formatted_date,
end_datetime=formatted_date,
output_filename = "depths.nc",
output_filename=str(depths_nc_path),
force_download=True
)
)

d_dataset=nc.Dataset("depths.nc").variables["depth"][:]
d_dataset = nc.Dataset(str(depths_nc_path)).variables["depth"][:]

vars_list =["Chla","N1p","N3n","S","T"]

for i, depth in enumerate(depths):
for var in vars_list:
depth_tmp = d_dataset[np.argmin(d_dataset - depth)]

vars=["Chla","N1p","N3n","S","T"]
nc_filename = DAILY_DIR / f"{var}_{depth}.nc"
npy_filename = DAILY_DIR / f"{var}_{depth}.npy"
png_filename = DAILY_DIR / f"miao_{var}_{depth}.png"

for i in range(len(depths)):
for var in vars:
depth=depths[i]
depth_tmp=d_dataset[np.argmin(d_dataset-depth)]
if not os.path.exists("{}_{}.nc".format(var,depth)):
if not nc_filename.exists():
copernicusmarine.subset(
dataset_id=data[var][1],
variables=[data[var][0]],
minimum_longitude=11,
maximum_longitude=18,
minimum_latitude=42,
maximum_latitude=47,
start_datetime=formatted_date,
end_datetime=formatted_date,
minimum_depth=depth_tmp,
maximum_depth=depth_tmp,
output_filename = "{}_{}.nc".format(var,depth),
force_download=True
dataset_id=data[var][1],
variables=[data[var][0]],
minimum_longitude=11,
maximum_longitude=18,
minimum_latitude=42,
maximum_latitude=47,
start_datetime=formatted_date,
end_datetime=formatted_date,
minimum_depth=depth_tmp,
maximum_depth=depth_tmp,
output_filename=str(nc_filename),
force_download=True,
)
a=nc.Dataset("{}_{}.nc".format(var,depth))
longitude=a.variables["longitude"][:].data
latitude=a.variables["latitude"][:].data
tmp=a.variables[data[var][0]][0,0].data
tmp[tmp>1e+10]=np.nan
rg=RegularGridInterpolator([latitude,longitude],tmp,method="nearest",fill_value=np.nan)
miao=rg(c).reshape(latitude_red.shape[0],longitude_red.shape[0])

a = nc.Dataset(str(nc_filename))
longitude = a.variables["longitude"][:].data
latitude = a.variables["latitude"][:].data
tmp = a.variables[data[var][0]][0, 0].data
a.close()

tmp[tmp > 1e10] = np.nan

rg = RegularGridInterpolator(
[latitude, longitude],
tmp,
method="nearest",
fill_value=np.nan,
)
miao = rg(c).reshape(latitude_red.shape[0], longitude_red.shape[0])

valid_mask = ~np.isnan(miao)
valid_pts = c[valid_mask.ravel()]
valid_vals = miao[valid_mask]
valid_pts = c[valid_mask.ravel()]
valid_vals = miao[valid_mask]
nn = NearestNDInterpolator(valid_pts, valid_vals)
miao = nn(c).reshape(latitude_red.shape[0],longitude_red.shape[0])
miao = nn(c).reshape(latitude_red.shape[0], longitude_red.shape[0])

miao = gaussian_filter(miao, sigma=1.5, mode='nearest')
miao = gaussian_filter(miao, sigma=1.5, mode='nearest')
plt.imshow(np.ma.masked_array(miao,masks_red[i]))
plt.savefig("miao.png")
miao=miao.reshape(-1)[np.logical_not(masks_red[i]).reshape(-1)]
np.save("{}_{}.npy".format(var,depth),miao)

plt.imshow(np.ma.masked_array(miao, masks_red[i]))
plt.savefig(png_filename)
plt.close()

miao_masked_flat = miao.reshape(-1)[np.logical_not(masks_red[i]).reshape(-1)]
np.save(npy_filename, miao_masked_flat)

expected_files = []
expected_files.append(depths_nc_path)
for depth in depths:
for var in vars_list:
expected_files.append(DAILY_DIR / f"{var}_{depth}.nc")
expected_files.append(DAILY_DIR / f"{var}_{depth}.npy")
expected_files.append(DAILY_DIR / f"miao_{var}_{depth}.png")

for path in expected_files:
if not path.exists():
backup_path = BACKUP_DIR / path.name
if backup_path.exists():
shutil.copy2(backup_path, path)
print(f"[RECOVER] Restored {path.name} from backup.")
else:
print(f"[WARNING] Missing {path.name} in both daily_data and daily_backup.")
Loading