Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Generation of spe11x_performance_time_series_detailed.csv #29

Merged
merged 1 commit into from
Jan 31, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions examples/finner_grids/spe11b.txt
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,4 @@ PERM7 1e-5 PORO7 1e-6 THCONR7 2.00
1000 1000 0.1 1 0 10 1 0 10
25 5 0.1 1 0.035 10 1 0 10
25 5 0.1 1 0.035 10 1 0.035 10
50 25 0.1 1 0 10 1 0 10
400 50 0.1 1 0 10 1 0 10
500 100 0.1 1 0 10 1 0 10
950 5 0.1 1 0 10 1 0 10
8 changes: 4 additions & 4 deletions src/pyopmspe11/core/pyopmspe11.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,11 +68,11 @@ def pyopmspe11():
# Write used opm related files
opm_files(dic)

if dic["mode"] in ["all", "flow", "deck_flow", "deck_flow_data"]:
if dic["mode"] in ["all", "flow", "deck_flow", "flow_data", "deck_flow_data"]:
# Run the simulations
simulations(dic, dic["fol"].upper(), "flow")

if dic["mode"] in ["all", "data", "deck_flow_data", "data_plot"]:
if dic["mode"] in ["all", "data", "flow_data", "deck_flow_data", "data_plot"]:
# Write the data
if not os.path.exists(f"{dic['exe']}/{dic['fol']}/data"):
os.system(f"mkdir {dic['exe']}/{dic['fol']}/data")
Expand Down Expand Up @@ -103,8 +103,8 @@ def load_parser():
help="Run the whole framework ('all'), only create decks ('deck'), "
"only run flow ('flow'), only write benchmark data ('data'), "
"only create plots ('plot'), deck and run ('deck_flow'), "
"data and plot (data_plot), or deck, run, and data "
"(deck_flow_data) ('deck_flow' by default).",
"data and plot ('data_plot'), run and data ('flow_data'), or deck, "
"run, and data ('deck_flow_data') ('deck_flow' by default).",
)
parser.add_argument(
"-c",
Expand Down
39 changes: 37 additions & 2 deletions src/pyopmspe11/visualization/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ def opm_files(dig):


def performance(dig):
"""Write the performance within the benchmark format SECONDS_IN_YEAR"""
"""Write the performance within the benchmark format"""
dil = {"infosteps": []}
dil["times_data"] = np.linspace(
0, dig["times"][-1], round(dig["times"][-1] / dig["sparse_t"]) + 1
Expand Down Expand Up @@ -248,6 +248,7 @@ def performance(dig):
dil["tsteps"] = np.array(
[86400 * infostep[1] * infostep[11] for infostep in dil["infosteps"]]
)
dil["alltsteps"] = np.array([86400 * infostep[1] for infostep in dil["infosteps"]])
if dig["use"] == "opm":
fgip = dig["smspec"]["FGIP"]
times = 86400.0 * dig["smspec"]["TIME"] - dig["time_initial"]
Expand Down Expand Up @@ -293,6 +294,30 @@ def performance(dig):
encoding="utf8",
) as file:
file.write("\n".join(dil["text"]))
dil["text"] = []
dil["text"].append(
"# t [s], tstep [s], fsteps [-], mass [kg], dof [-], nliter [-], "
+ "nres [-], liniter [-], runtime [s], tlinsol [s]"
)
for j, time in enumerate(infotimes):
dil["text"].append(
f"{time:.3e}, "
+ f"{dil['alltsteps'][j]:.3e}, "
+ f"{dil['fsteps'][j]:.3e}, "
+ f"{interp_fgip(time):.3e}, "
+ f"{dig['dof'] * dig['nocellsa']:.3e}, "
+ f"{dil['nliters'][j]:.3e}, "
+ f"{dil['nress'][j]:.3e}, "
+ f"{dil['liniters'][j]:.3e}, "
+ f"{dil['runtimes'][j]:.3e}, "
+ f"{dil['tlinsols'][j]:.3e}"
)
with open(
f"{dig['where']}/{dig['case']}_performance_time_series_detailed.csv",
"w",
encoding="utf8",
) as file:
file.write("\n".join(dil["text"]))


def create_from_summary(dig, dil):
Expand Down Expand Up @@ -708,14 +733,24 @@ def generate_arrays(dig, dil, names, t_n):
dil["sgas_array"][dig["actind"]] = sgas
dil["gden_array"][dig["actind"]] = rhog * (sgas > 0.0)
dil["wden_array"][dig["actind"]] = rhow
dil["xco2_array"][dig["actind"]] = np.divide(co2_d, co2_d + h2o_l)
dil["tco2_array"][dig["actind"]] = co2_d + co2_g
dil = compute_xco2(dig, dil, co2_d, h2o_l)
if dig["case"] != "spe11a":
h2o_v = rvv * rhog * sgas * dig["porva"] * WAT_DEN_REF / GAS_DEN_REF
dil = compute_xh20(dig, dil, h2o_v, co2_g)
return dil


def compute_xco2(dig, dil, co2_d, h2o_l):
"""Compute the mass fraction of CO2 in liquid"""
mliq = co2_d + h2o_l
xco2 = 0.0 * co2_d
inds = mliq > 0.0
xco2[inds] = np.divide(co2_d[inds], mliq[inds])
dil["xco2_array"][dig["actind"]] = xco2
return dil


def compute_xh20(dig, dil, h2o_v, co2_g):
"""Compute the mass fraction of water in vapour"""
mgas = h2o_v + co2_g
Expand Down
103 changes: 49 additions & 54 deletions src/pyopmspe11/visualization/plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,60 +144,55 @@ def plot_results(dic):

def performance(dic):
"""time solver plots"""
csv = np.genfromtxt(
f"{dic['exe']}/{dic['folders'][0]}/data/{dic['case']}_performance_time_series.csv",
delimiter=",",
skip_header=1,
)
times = [csv[i][0] for i in range(csv.shape[0])]
dic["fig"] = plt.figure(figsize=(40, 75))
plots = [
"tstep",
"fsteps",
"mass",
"dof",
"nliter",
"nres",
"liniter",
"runtime",
"tlinsol",
]
ylabels = ["s", "\\#", "kg", "\\#", "\\#", "\\#", "\\#", "s", "s"]
for k, (plot, ylabel) in enumerate(zip(plots, ylabels)):
axis = dic["fig"].add_subplot(9, 5, k + 1)
for nfol, fol in enumerate(dic["folders"]):
csv = np.genfromtxt(
f"{dic['exe']}/{fol}/data/{dic['case']}_performance_time_series.csv",
delimiter=",",
skip_header=1,
)
labels = [
f"sum={sum((csv[i][1] for i in range(csv.shape[0]))):.3e}",
f"sum={sum((csv[i][2] for i in range(csv.shape[0]))):.3e}",
f"max={max((csv[i][3] for i in range(csv.shape[0]))):.3e}",
f"max={csv[-1][4]:.3e}",
f"sum={sum((csv[i][5] for i in range(csv.shape[0]))):.3e}",
f"sum={sum((csv[i][6] for i in range(csv.shape[0]))):.3e}",
f"sum={sum((csv[i][7] for i in range(csv.shape[0]))):.3e}",
f"sum={sum((csv[i][8] for i in range(csv.shape[0]))):.3e}",
f"sum={sum((csv[i][9] for i in range(csv.shape[0]))):.3e}",
]
times = [csv[i][0] / dic["tscale"] for i in range(csv.shape[0])]
labels[k] += f" ({fol})"
axis.step(
times,
[csv[i][k + 1] for i in range(csv.shape[0])],
lw=2,
color=dic["colors"][nfol],
label=labels[k],
)
axis.set_title(plot + f", {dic['case']}")
axis.set_ylabel(ylabel)
axis.set_xlabel(f"Time [{dic['tlabel']}]")
axis.legend()
dic["fig"].savefig(
f"{dic['where']}/{dic['case']}_performance.png", bbox_inches="tight"
)
for kind in ["", "_detailed"]:
dic["fig"] = plt.figure(figsize=(40, 75))
plots = [
"tstep",
"fsteps",
"mass",
"dof",
"nliter",
"nres",
"liniter",
"runtime",
"tlinsol",
]
ylabels = ["s", "\\#", "kg", "\\#", "\\#", "\\#", "\\#", "s", "s"]
for k, (plot, ylabel) in enumerate(zip(plots, ylabels)):
axis = dic["fig"].add_subplot(9, 5, k + 1)
for nfol, fol in enumerate(dic["folders"]):
csv = np.genfromtxt(
f"{dic['exe']}/{fol}/data/{dic['case']}_performance_time_series{kind}.csv",
delimiter=",",
skip_header=1,
)
labels = [
f"sum={sum((csv[i][1] for i in range(csv.shape[0]))):.3e}",
f"sum={sum((csv[i][2] for i in range(csv.shape[0]))):.3e}",
f"max={max((csv[i][3] for i in range(csv.shape[0]))):.3e}",
f"max={csv[-1][4]:.3e}",
f"sum={sum((csv[i][5] for i in range(csv.shape[0]))):.3e}",
f"sum={sum((csv[i][6] for i in range(csv.shape[0]))):.3e}",
f"sum={sum((csv[i][7] for i in range(csv.shape[0]))):.3e}",
f"sum={sum((csv[i][8] for i in range(csv.shape[0]))):.3e}",
f"sum={sum((csv[i][9] for i in range(csv.shape[0]))):.3e}",
]
times = [csv[i][0] / dic["tscale"] for i in range(csv.shape[0])]
labels[k] += f" ({fol})"
axis.step(
times,
[csv[i][k + 1] for i in range(csv.shape[0])],
lw=2,
color=dic["colors"][nfol],
label=labels[k],
)
axis.set_title(plot + f", {dic['case']}")
axis.set_ylabel(ylabel)
axis.set_xlabel(f"Time [{dic['tlabel']}]")
axis.legend()
dic["fig"].savefig(
f"{dic['where']}/{dic['case']}_performance{kind}.png", bbox_inches="tight"
)


def sparse_data(dic):
Expand Down
Loading