Source code for fmu.tools.rms.generate_petro_jobs_for_field_update

"""
Description:
    Implementation of a function to create new petrosim jobs, one per facies
    given an existing petrosim job using facies realization as input.

Summary:

    The current script is made to simplify the preparation step of the RMS project by
    creating the necessary petrosim jobs for a workflow supporting simultaneous update
    of both facies and petrophysical properties in ERT.
"""

import copy
import json
import pprint

import yaml

from fmu.tools import ROXAR

if ROXAR:
    try:
        import rmsapi
        import rmsapi.jobs as jobs
    except ModuleNotFoundError:
        import roxar as rmsapi
        import roxar.jobs as jobs
else:
    pass

from typing import Dict, List, Tuple, no_type_check

# Fixed global variables
GRID_MODELS = "Grid models"
GRID = "Grid"
JOB_TYPE = "Petrophysical Modeling"
PP = pprint.PrettyPrinter(depth=7)


[docs]def main(config_file: str, debug: bool = False, report_unused: bool = False) -> None: """Generate new RMS petrosim jobs from an original petrosim job. Description This function can be used to generate new petrosim jobs in RMS from an existing petrosim job. The input (original) petrosim job is assumed to be a job for either a single zone or multi zone grid where the petrophysical properties are conditioned to an existing facies realization. The new generated petrosim jobs will not use facies realization as input, but assume that all grid cells belongs to the same facies. Hence, if the original petrosim job specify model parameters for petrophysical properties conditioned to a facies realization with N different facies, this script can generate up to N new petrosim jobs, one per facies the user wants to use in field parameter updating in ERT. Input An existing RMS project with a petrophysical job conditioning petrophysical properties on an existing facies realization. A yaml format configuration file specifying necessary input to the function. This includes name of original petrosim job, grid model name and for each zone and each facies which petrophysical variables to use in the new petrosim jobs that are created. It is possible to not use all petrophysical variables in the original job and this can vary from facies to facies and zone to zone. Output A set of new petrosim jobs, one per facies that is specified in at least one of the zones. For a multi zone grid, each of the new petrosim jobs will contain specification of the petrophysical properties that is to be modelled for each zone for a given facies. The new jobs will get a name reflecting which facies it belongs to. How to use the new petrosim jobs in the RMS workflow When initial ensemble is created (ERT iteration = 0) The RMS workflow should first use the original petrosim job to generate realizations of petrophysical properties as usual when not all of the petrophysical properties are going to be used as field parameters in RMS. If all petrophysical properties are going to be updated as field parameters in ERT, it is not necessary to do this step. Add all the new petrosim jobs (one per facies) into the workflow in RMS. Copy the petrophysical property realizations for each facies from the geomodel grid into the ERTBOX grid used for field parameter update in ERT. Export the petrophysical properties for each of the facies to ERT in ROFF format. Use a script that take as input the facies realization and the petrophysical properties per facies and use the facies realization as filter to select which values to copy from the petrophysical realizations to put into the petrophysical property parameter that was generated by the original job. This will overwrite the petrophysical properties in the realization from the original job for those parameters that are used as field parameters to be updated by ERT. The other petrophysical parameters generated by the original job but not used as field parameters in ERT will be untouched. When updated ensemble is fetched from ERT (ERT iteration > 0): Run the original petrosim job to generate all petrophysical parameters in the geogrid. Import the updated field parameters for petrophysical properties per facies into ERTBOX grid. Copy the petrophysical property parameters that were updated as field parameters by ERT into geomodel grid from the ERTBOX grid. This operation is the same as the last step when initial ensemble realization was created. This step will then overwrite the petrophysical property parameters already created by the original job by the new values updated by ERT. All petrophysical property parameters not updated as field parameters by ERT will be untouched and will therefore have the same values as they had after the original petrosim job was run. Summary The current function is made to simplify the preparation step of the RMS project by creating the necessary petrosim jobs for a workflow supporting simultaneous update of both facies and petrophysical properties in ERT. Usage of this function Input Name of config file with specification of which field parameters to simulate per zone per facies. Optional parameters debug info (True/False), report field parameters not used (True/False) Output A set of new petro sim jobs to appear in RMS project. """ spec_dict = read_specification_file(config_file) create_new_petro_job_per_facies( spec_dict, debug_print=debug, report_unused=report_unused )
[docs]@no_type_check def check_rms_project(project): if not isinstance(project, rmsapi.Project): # type: ignore raise RuntimeError("This run must be ran in an RoxAPI environment!")
[docs]def read_specification_file(config_file_name: str, check: bool = True) -> Dict: with open(config_file_name, encoding="utf-8") as yml_file: spec_dict = yaml.safe_load(yml_file) if check and (not check_specification(spec_dict)): raise ValueError(f"Errors found in {config_file_name}") return spec_dict
[docs]def check_specification(spec_dict: Dict) -> bool: # Check that the same petro variables are specified for # all zones using the same facies. This means that for instance # if the case has zone A,B,C and facies F1 is present in zone A and C # and zone A for facies F1 has petro variables P1, P2, then # zone C which also has facies F1 must use petro variables P1, P2 # for facies F1. used_petro_var_dict = spec_dict["used_petro_var"] ok = True if len(used_petro_var_dict) > 1: for zone_name1, zone_dict1 in used_petro_var_dict.items(): for zone_name2, zone_dict2 in used_petro_var_dict.items(): if zone_name1 != zone_name2: for facies_name1, petro_list1 in zone_dict1.items(): petro_set1 = set(petro_list1) for facies_name2, petro_list2 in zone_dict2.items(): petro_set2 = set(petro_list2) if ( facies_name1 == facies_name2 and petro_set1 != petro_set2 ): print( "Petro variables to use must be " "the same for all zones for facies: " f"{facies_name1}" ) print( f" This is specified for zone {zone_name1}:" f" {petro_list1}" ) print( f" This is specified for zone {zone_name2}:" f" {petro_list2}" ) ok = False break if not ok: break if not ok: break if not ok: break return ok
[docs]def define_new_variable_names_and_correlation_matrix( orig_var_names: List[str], facies_name: str, new_var_names: List[str], orig_corr_matrix: List[List[float]], ) -> Tuple[List[str], List[List[float]]]: nvar_new = len(new_var_names) if nvar_new == 1: # No correlation matrix return new_var_names, [] # Order the var_names_to_keep list to be in same sequence # as orig_var_names for those variables that are common with the var_names_to_keep # Example: the keep list is: ["A", "C", "B"] # and the original var name list # is ["A", "D", "F", "B", "C"] # The sorted keep list should be: ["A", "B", "C"] sorted_new_var_names = sort_new_var_names( orig_var_names, facies_name, new_var_names ) new_corr_matrix = [] index_in_orig_var = [] for i, var_name in enumerate(orig_var_names): name = set_new_var_name(facies_name, var_name) if name in new_var_names: index_in_orig_var.append(i) for row in range(nvar_new): row_list = [] for col in range(row): row_list.append( orig_corr_matrix[index_in_orig_var[row]][index_in_orig_var[col]] ) new_corr_matrix.append(row_list) return sorted_new_var_names, new_corr_matrix
[docs]def sort_new_var_names( original_variable_names: List[str], facies_name: str, new_variable_names: List[str] ) -> List[str]: sorted_keep_list = [] for varname in original_variable_names: name = set_new_var_name(facies_name, varname) if name in new_variable_names: sorted_keep_list.append(name) return sorted_keep_list
[docs]def get_original_job_settings( owner_string_list: List[str], job_type: str, job_name: str ) -> dict: original_job = jobs.Job.get_job(owner_string_list, job_type, job_name) return original_job.get_arguments(skip_defaults=False)
[docs]def create_copy_of_job( owner_string_list: List[str], job_type: str, original_job_arguments: Dict, new_job_name: str, ): new_job = jobs.Job.create(owner_string_list, job_type, new_job_name) new_job.set_arguments(original_job_arguments) return new_job
[docs]def get_zone_names_per_facies(used_petro_per_zone_per_facies_dict: Dict) -> Dict: zone_names_with_facies_dict: Dict[str, List[str]] = {} for zone_name, facies_dict in used_petro_per_zone_per_facies_dict.items(): for facies_name, _ in facies_dict.items(): if facies_name not in zone_names_with_facies_dict: zone_names_with_facies_dict[facies_name] = [] zone_names_with_facies_dict[facies_name].append(zone_name) return zone_names_with_facies_dict
[docs]def get_used_petro_names(used_petro_per_zone_per_facies_dict: Dict) -> List[str]: all_petro_var_list = [] for _, petro_per_facies_dict in used_petro_per_zone_per_facies_dict.items(): for _, petro_list in petro_per_facies_dict.items(): for petro_name in petro_list: if petro_name not in all_petro_var_list: all_petro_var_list.append(petro_name) return all_petro_var_list
[docs]def set_new_var_name(facies_name: str, petro_name: str): return facies_name + "_" + petro_name
[docs]def report_unused_fields( owner_string_list: List[str], job_name: str, used_petro_per_zone_per_facies_dict: Dict, ): job_arguments = get_original_job_settings(owner_string_list, JOB_TYPE, job_name) zone_models_list = job_arguments["Zone Models"] # First report which field parameters from original model # that is not specified to be used print("Report of unused petrophysical variables in generated jobs:") for zone_model in zone_models_list: zone_name = zone_model["ZoneName"] if zone_name not in used_petro_per_zone_per_facies_dict: print(f" No field parameters are used from zone: {zone_name}") else: petro_per_facies_dict = used_petro_per_zone_per_facies_dict[zone_name] facies_models_list = zone_model["Facies Models"] for facies_model in facies_models_list: facies_name = facies_model["FaciesName"] petro_model_list = facies_model["Variable Models"] if facies_name not in petro_per_facies_dict: print( " No field parameters are used for facies " f"{facies_name} for zone {zone_name}" ) else: petro_list = petro_per_facies_dict[facies_name] for petro_model in petro_model_list: var_name = petro_model["VariableName"] if var_name not in petro_list: print( f" Field parameter {var_name} is not used " f"for facies {facies_name} for zone {zone_name}" ) print("")
[docs]def check_consistency( owner_string_list: List[str], job_name: str, used_petro_per_zone_per_facies_dict: Dict, report_unused: bool = True, ) -> None: # Check if there are specified field parameters which does not exist # in the original job and report errors if this is the case job_arguments = get_original_job_settings(owner_string_list, JOB_TYPE, job_name) zone_models_list = job_arguments["Zone Models"] specified_petro_var_list = get_used_petro_names(used_petro_per_zone_per_facies_dict) err_list = [] for specified_petro_var in specified_petro_var_list: found = False for zone_model in zone_models_list: facies_models_list = zone_model["Facies Models"] for facies_model in facies_models_list: petro_model_list = facies_model["Variable Models"] for petro_model in petro_model_list: if specified_petro_var == petro_model["VariableName"]: found = True break if found: break if found: break if not found: err_list.append(specified_petro_var) if len(err_list) > 0: print("Error in specification of used petrophysical variables.") print("Unknown petrophysical variables:") for name in err_list: print(f"{name}") raise ValueError("Unknown petrophysical variable names are specified.")
[docs]def create_new_petro_job_per_facies( spec_dict: Dict, debug_print: bool = False, report_unused: bool = False ) -> List[str]: grid_name = spec_dict["grid_name"] original_job_name = spec_dict["original_job_name"] used_petro_per_zone_per_facies_dict = spec_dict["used_petro_var"] if len(used_petro_per_zone_per_facies_dict.keys()) == 1: # Only one zone, set zone name to empty zone_name = list(used_petro_per_zone_per_facies_dict.keys())[0] tmp: Dict[str, Dict] = {} tmp[""] = used_petro_per_zone_per_facies_dict[zone_name] used_petro_per_zone_per_facies_dict = tmp # Original job parameter setting owner_string_list = [GRID_MODELS, grid_name, GRID] if report_unused: report_unused_fields( owner_string_list, original_job_name, used_petro_per_zone_per_facies_dict ) check_consistency( owner_string_list, original_job_name, used_petro_per_zone_per_facies_dict, report_unused=report_unused, ) orig_job_arguments = get_original_job_settings( owner_string_list, JOB_TYPE, original_job_name ) zone_names_per_facies_dict = get_zone_names_per_facies( used_petro_per_zone_per_facies_dict ) # for each facies used in any zone, find the zone models having the facies original_zone_models_list = orig_job_arguments["Zone Models"] new_job_name_list = [] for facies_name, zone_name_list in zone_names_per_facies_dict.items(): if debug_print: print(f"Facies: {facies_name}") new_job_arguments_current = copy.deepcopy(orig_job_arguments) # Remove unused keys or keys that should be set to default for new job del new_job_arguments_current["InputFaciesProperty"] del new_job_arguments_current["PrefixOutputName"] # Only keep specification for zones having the facies new_job_arguments_current["Zone Models"] = [] new_variable_names_all_zones = [] for zone_model_dict in original_zone_models_list: zone_name = zone_model_dict["ZoneName"] if zone_name in zone_name_list: zone_model_dict_current = copy.deepcopy(zone_model_dict) new_job_arguments_current["Zone Models"].append(zone_model_dict_current) # for this zone model remove all specifications not relevant # for current facies_name used_petro_var_list = used_petro_per_zone_per_facies_dict[zone_name][ facies_name ] # Loop over facies for this zone and keep only current facies tmp_list = zone_model_dict_current["Facies Models"] current_facies_model_dict = None for facies_model_dict in tmp_list: if facies_model_dict["FaciesName"] == facies_name: current_facies_model_dict = facies_model_dict break # Here at least one facies model must exist in the tmp_list, # if not there is a consistency error in input dictionary # used_petro_per_zone_per_facies_dict related to the # specified original job. if current_facies_model_dict is None: raise ValueError( "There are some facies name errors in input dict" " 'used_petro_per_zone_per_facies_dict'. " "Check consistency with original job " f"'{original_job_name}' and facies name '{facies_name}'" ) # Assign current facies model as only entry in the list zone_model_dict_current["Facies Models"] = [current_facies_model_dict] # Loop over petro variable for this facies model and only keep # the petro variables specified to be used new_petro_variable_model_list = [] new_variable_names = [] current_petro_variable_model_list = current_facies_model_dict[ "Variable Models" ] for petro_variable_dict in current_petro_variable_model_list: petro_name = petro_variable_dict["VariableName"] if petro_name in used_petro_var_list: # Set a new name of the petro variables to keep new_petro_name = set_new_var_name(facies_name, petro_name) petro_variable_dict["VariableName"] = new_petro_name new_variable_names.append(new_petro_name) new_petro_variable_model_list.append(petro_variable_dict) current_facies_model_dict["Variable Models"] = ( new_petro_variable_model_list ) original_variable_names = orig_job_arguments["VariableNames"] # Only one element in this always ?????????? original_corr_model_dict = current_facies_model_dict[ "Correlation Model" ][0] original_corr_matrix = original_corr_model_dict["CorrelationMatrix"] new_variable_names, new_corr_matrix = ( define_new_variable_names_and_correlation_matrix( original_variable_names, facies_name, new_variable_names, original_corr_matrix, ) ) original_corr_model_dict["CorrelationMatrix"] = new_corr_matrix # Add variable names for current zone which is not already # within the list for var_name in new_variable_names: if var_name not in new_variable_names_all_zones: new_variable_names_all_zones.append(var_name) new_job_arguments_current["VariableNames"] = new_variable_names_all_zones new_job_name = facies_name + "_petro" new_job_name = new_job_name.lower() new_job_name_list.append(new_job_name) print(f"Create job: {new_job_name}") if debug_print: print("-" * 100) PP.pprint(new_job_arguments_current) print("-" * 100) new_job = create_copy_of_job( owner_string_list, JOB_TYPE, new_job_arguments_current, new_job_name ) ok, err_msg_list, warn_msg_list = jobs.Job.check(new_job) if not ok: print("Error messages from created job object:") for err_msg in err_msg_list: print(f"{err_msg}") print("\n") print("Warnings from created job object:") for warn_msg in warn_msg_list: print(f"{warn_msg}") print(f"\nThe job with name {new_job_name} is not saved.") else: print(f"Save new job: {new_job_name}") new_job.save() return new_job_name_list
[docs]def write_petro_job_to_file( owner_string_list: List[str], job_type: str, job_name: str, filename: str ) -> None: job_instance = jobs.Job.get_job( owner=owner_string_list, type=job_type, name=job_name ) arguments = job_instance.get_arguments(True) print(f"Write file: {filename}") with open(filename, "w") as outfile: outfile.write(json.dumps(arguments, sort_keys=True, indent=3)) outfile.write("\n")
if __name__ == "__main__": config_file = "generate_petro_jobs.yml" main(config_file)