Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions src/CSET/cset_workflow/app/fetch_fcst/bin/fetch_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import itertools
import logging
import os
import pickle
import ssl
import urllib.parse
import urllib.request
Expand All @@ -30,6 +31,8 @@

import isodate

from CSET.operators import read

logging.basicConfig(
level=os.getenv("LOGLEVEL", "INFO"), format="%(asctime)s %(levelname)s %(message)s"
)
Expand Down Expand Up @@ -294,6 +297,23 @@ def fetch_data(file_retriever: FileRetrieverABC):
if not any_files_found:
raise FileNotFoundError("No files found for model!")

# Create the load cache for this model.
prime_load_cache(cycle_data_dir)


def prime_load_cache(data_directory: str):
"""Create the load cache for directory."""
# Load in the cubes, applying all the callbacks and such.
logging.info("Reading in cubes for caching.")
cubes = read.read_cubes(data_directory)
# Remove the added cset_comparison_base attribute.
for cube in cubes:
del cube.attributes["cset_comparison_base"]
logging.info("Writing cache file.")
# Pickle to a cache file.
with open(Path(data_directory, "loadcache.pickle"), "wb") as fp:
pickle.dump(cubes, fp)


def fetch_obs(obs_retriever: FileRetrieverABC):
"""Fetch the observations corresponding to a model run.
Expand Down
20 changes: 14 additions & 6 deletions src/CSET/operators/read.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import glob
import itertools
import logging
import pickle
from pathlib import Path
from typing import Literal

Expand Down Expand Up @@ -218,12 +219,19 @@ def _load_model(
constraint: iris.Constraint | None,
) -> iris.cube.CubeList:
"""Load a single model's data into a CubeList."""
input_files = _check_input_files(paths)
# If unset, a constraint of None lets everything be loaded.
logging.debug("Constraint: %s", constraint)
cubes = iris.load(input_files, constraint, callback=_loading_callback)
# Make the UM's winds consistent with LFRic.
_fix_um_winds(cubes)
cache_file = Path(paths, "loadcache.pickle") if isinstance(paths, str) else None
if cache_file and cache_file.is_file():
# Load from pickled cache.
with open(cache_file, "rb") as fp:
all_cubes = pickle.load(fp)
cubes = all_cubes.extract(constraint)
else:
input_files = _check_input_files(paths)
# If unset, a constraint of None lets everything be loaded.
logging.debug("Constraint: %s", constraint)
cubes = iris.load(input_files, constraint, callback=_loading_callback)
# Make the UM's winds consistent with LFRic.
_fix_um_winds(cubes)

# Add model_name attribute to each cube to make it available at any further
# step without needing to pass it as function parameter.
Expand Down
Loading