445 lines
19 KiB
Python
445 lines
19 KiB
Python
import osyris
|
|
import numpy as np
|
|
from numba import jit, prange
|
|
|
|
|
|
|
|
pc = osyris.units("pc")
|
|
year = osyris.units("year")
|
|
from scipy.spatial import KDTree
|
|
|
|
import matplotlib.pyplot as plt
|
|
import os
|
|
|
|
|
|
def recenter(array, center, periodicity=[0,1, 2]):
|
|
radial_vector = (array - center).to(pc)
|
|
radial_vector = np.array([radial_vector.x.values, radial_vector.y.values, radial_vector.z.values]).T
|
|
for i in periodicity:
|
|
mask = np.abs(radial_vector[:,i]) > 500
|
|
radial_vector[:,i][mask] = radial_vector[:,i][mask] - np.sign(radial_vector[:,i][mask]) * 1000
|
|
radial_vector = osyris.Vector(x=radial_vector[:,0], y=radial_vector[:,1], z=radial_vector[:,2], unit=pc)
|
|
return radial_vector
|
|
|
|
|
|
def find_tracer_index(data, tracer_id):
|
|
mask_tracer = data["part"]["family"].values == 0
|
|
tracer_index = np.nonzero((data["part"]["identity"].values == tracer_id) & mask_tracer)[0]
|
|
return int(tracer_index)
|
|
|
|
def find_closest_tracer(data, position):
|
|
mask_tracer = data["part"]["family"].values == 0
|
|
distance = (data["part"]["position"] - position).norm.to("pc").values[mask_tracer]
|
|
closest_tracer_index = np.argmin(distance)
|
|
closest_tracer_id = int(data["part"]["identity"][mask_tracer][closest_tracer_index].values)
|
|
return closest_tracer_id
|
|
|
|
def compute_derived_quantities(data, center=None, tracer=None, find_tracer=False, periodicity=[0, 1, 2]):
|
|
|
|
mesh = data["mesh"]
|
|
mesh["grav_potential"] = mesh["grav_potential"].to("pc^2 / Myr^2")
|
|
mesh["grav_force"] = mesh["density"] * mesh["grav_acceleration"] * mesh["dx"]**3
|
|
|
|
mesh["mass"] = (mesh["density"] * (mesh["dx"] ** 3)).to("M_sun")
|
|
|
|
ind = np.argmin(mesh["grav_potential"])
|
|
pos_minimum = mesh["position"][ind]
|
|
vel_minimum = mesh["velocity"][ind]
|
|
|
|
# Recenter the data
|
|
if isinstance(tracer, int):
|
|
tracer_index = find_tracer_index(data, tracer)
|
|
center = data["part"]["position"][tracer_index]
|
|
vel_ref = data["part"]["velocity"][tracer_index]
|
|
elif center is None and find_tracer:
|
|
tracer = find_closest_tracer(data, pos_minimum)
|
|
close_tracer_index = find_tracer_index(data, tracer)
|
|
center = data["part"]["position"][close_tracer_index]
|
|
vel_ref = data["part"]["velocity"][close_tracer_index]
|
|
elif center is None:
|
|
center = pos_minimum
|
|
vel_ref = vel_minimum
|
|
else:
|
|
tracer = find_closest_tracer(data, center)
|
|
close_tracer_index = find_tracer_index(data, tracer)
|
|
vel_ref = data["part"]["velocity"][close_tracer_index]
|
|
|
|
|
|
mesh["rel_velocity"] = mesh["velocity"] - vel_ref
|
|
|
|
sink = data["sink"]
|
|
sink["age"] = -(sink["tform"] - data.meta["time"])
|
|
|
|
part = data["part"]
|
|
|
|
mesh["radial_vector"] = recenter(mesh["position"], center, periodicity=periodicity)
|
|
sink["radial_vector"] = recenter(sink["position"], center, periodicity=periodicity)
|
|
part["radial_vector"] = recenter(part["position"], center, periodicity=periodicity)
|
|
|
|
# Save old position
|
|
if "old_position" not in mesh.keys():
|
|
mesh["old_position"] = mesh["position"]
|
|
sink["old_position"] = sink["position"]
|
|
part["old_position"] = part["position"]
|
|
|
|
|
|
mesh["position"].x._array = mesh["radial_vector"].x.to("cm").values
|
|
mesh["position"].y._array = mesh["radial_vector"].y.to("cm").values
|
|
mesh["position"].z._array = mesh["radial_vector"].z.to("cm").values
|
|
|
|
sink["position"].x._array = sink["radial_vector"].x.to("cm").values
|
|
sink["position"].y._array = sink["radial_vector"].y.to("cm").values
|
|
sink["position"].z._array = sink["radial_vector"].z.to("cm").values
|
|
|
|
part["position"].x._array = part["radial_vector"].x.to("cm").values
|
|
part["position"].y._array = part["radial_vector"].y.to("cm").values
|
|
part["position"].z._array = part["radial_vector"].z.to("cm").values
|
|
|
|
# Compute integrated accelerations
|
|
data["part"]["velocity_grav"] = data["part"]["vp_grav"] * data.units["velocity"]
|
|
data["part"]["velocity_init"] = data["part"]["vp_init"] * data.units["velocity"]
|
|
data["part"]["velocity_diff"] = data["part"]["velocity"] - data["part"]["velocity_init"]
|
|
|
|
# Change potential gauge
|
|
min_pot = np.min(data["mesh"]["grav_potential"].values)
|
|
data["mesh"]["grav_potential"].values = data["mesh"]["grav_potential"].values - min_pot
|
|
data["mesh"]["grav_potential"] = data["mesh"]["grav_potential"].to("km*m/s^2")
|
|
|
|
|
|
# Compute temperature
|
|
mesh["temperature"] = ((mesh["pressure"] / mesh["density"] ) * (1.4 *osyris.units("proton_mass") / osyris.units("boltzmann_constant"))).to("K")
|
|
|
|
# Compute distance to sink
|
|
pos_sinks = data["sink"]["position"].to("pc").values
|
|
tree = KDTree(pos_sinks)
|
|
dist_sinks, _ = tree.query(data["part"]["position"].to("pc").values )
|
|
data["part"]["dist_sinks"] = osyris.Array(dist_sinks, unit=pc)
|
|
|
|
if isinstance(tracer, int):
|
|
return tracer
|
|
else:
|
|
return center
|
|
|
|
|
|
|
|
def load_snapshot(dataset, ind_cell_d, path, snap):
|
|
|
|
print(f"Load dataset {snap}")
|
|
dataset[snap] = osyris.RamsesDataset(snap, path=os.path.expanduser(path)).load(select=["mesh","hydro", "part", "sink"])
|
|
|
|
print(f"Compute derived quantities {snap}")
|
|
compute_derived_quantities(dataset[snap], periodicity=[0,1])
|
|
|
|
print(f"Build tracer index {snap}")
|
|
ind_cell_d[snap] = link_tracer(dataset, snap)
|
|
|
|
## Link tracers to cells
|
|
|
|
def link_tracer(dataset, snap):
|
|
mask_tracer = dataset[snap]["part"]["family"].values == 0
|
|
|
|
pos_cells = dataset[snap]["mesh"]["position"].to("pc").values
|
|
pos_tracers = dataset[snap]["part"]["position"].to("pc").values[mask_tracer]
|
|
kdtree = KDTree(pos_cells)
|
|
dist, ind_cell = kdtree.query(pos_tracers, k=1, p=np.inf)
|
|
return ind_cell
|
|
|
|
def build_tracer_index(dataset, snaps):
|
|
ind_cell_d = {}
|
|
for i in snaps:
|
|
print(f"Linking tracers to cells for snapshot {i}...")
|
|
ind_cell_d[i] = link_tracer(dataset, i)
|
|
return ind_cell_d
|
|
|
|
def get_cell_data(dataset, ind_cell_d, snap, key, unit):
|
|
ind_cell = ind_cell_d[snap]
|
|
cell_data = dataset[snap]["mesh"][key].to(unit).values[ind_cell]
|
|
return cell_data
|
|
|
|
|
|
@jit(nopython=True, parallel=True)
|
|
def compute_fraction_gravdom_helper(mass, dist, aligned, bins, cumulative:bool, masks:np.ndarray, stacked=False):
|
|
fraction = np.zeros(shape=(max(1, masks.shape[0]), bins.shape[0]), dtype=np.float64)
|
|
|
|
for i in prange(len(bins)):
|
|
if i > 0 and not cumulative:
|
|
mask_distance = (bins[i - 1] < dist) & (dist <= bins[i])
|
|
else:
|
|
mask_distance = (dist <= bins[i])
|
|
|
|
if np.sum(mask_distance) == 0:
|
|
fraction[:,i] = np.nan
|
|
else:
|
|
for m in range(masks.shape[0]):
|
|
if stacked:
|
|
mask_total = mask_distance
|
|
else:
|
|
mask_total = mask_distance & masks[m]
|
|
fraction[m][i] = np.nansum(mass[mask_distance & aligned & masks[m]]) / np.nansum(mass[mask_total])
|
|
if masks.shape[0] == 0:
|
|
fraction[0][i] = np.nansum(mass[mask_distance & aligned]) / np.nansum(mass[mask_distance])
|
|
return fraction
|
|
|
|
def compute_fraction_tracer(dataset, i, bins, id_grav_dom, cumulative=True, masks={}, mask_id=False, stacked=False):
|
|
bins = bins
|
|
mask_tracer = dataset[i]["part"]["family"].values == 0
|
|
mask_tracer_gravdom = np.isin(dataset[i]["part"]["identity"].values[mask_tracer], id_grav_dom, assume_unique=True)
|
|
mass = dataset[i]["part"]["mass"].values[mask_tracer]
|
|
dist = dataset[i]["part"]["radial_vector"].norm.to(pc).values[mask_tracer]
|
|
if mask_id:
|
|
masks = {key: np.isin(dataset[i]["part"]["identity"].values[mask_tracer], value, assume_unique=True) for key, value in masks.items()}
|
|
|
|
fraction = compute_fraction_gravdom_helper(mass, dist, mask_tracer_gravdom, bins, cumulative, np.array(list(masks.values()), dtype=np.bool_), stacked=stacked)
|
|
if len(masks) > 0:
|
|
fraction = {key: value for key, value in zip(masks.keys(), fraction)}
|
|
else:
|
|
fraction = fraction[0]
|
|
return bins, fraction
|
|
|
|
|
|
def get_aligment(dataset, ind_cell_d, start, end):
|
|
|
|
mask_tracer_end = dataset[end]["part"]["family"].values == 0
|
|
distance_end = dataset[end]["part"]["position"].norm.to(pc).values[mask_tracer_end]
|
|
id_end = dataset[end]["part"]["identity"].values[mask_tracer_end]
|
|
|
|
sort_end = np.argsort(id_end)
|
|
unsort_end = np.argsort(sort_end)
|
|
velocity_end = dataset[end]["part"]["velocity"].to("km/s").values[mask_tracer_end]
|
|
velocity_grav_end = dataset[end]["part"]["velocity_grav"].to("km/s").values[mask_tracer_end]
|
|
density_end = get_cell_data(dataset, ind_cell_d, end, "density", "g/cm**3")
|
|
|
|
mask_tracer_start = dataset[start]["part"]["family"].values == 0
|
|
distance_start = dataset[start]["part"]["position"].norm.to(pc).values[mask_tracer_start]
|
|
id_start = dataset[start]["part"]["identity"].values[mask_tracer_start]
|
|
sort_start = np.argsort(id_start)
|
|
velocity_start = dataset[start]["part"]["velocity"].to("km/s").values[mask_tracer_start]
|
|
velocity_grav_start = dataset[start]["part"]["velocity_grav"].to("km/s").values[mask_tracer_start]
|
|
density_start = get_cell_data(dataset, ind_cell_d, start, "density", "g/cm**3")
|
|
|
|
|
|
distance_diff_sorted = distance_end[sort_end] - distance_start[sort_start]
|
|
velocity_diff_sorted = velocity_end[sort_end] - velocity_start[sort_start]
|
|
velocity_grav_diff_sorted = velocity_grav_end[sort_end] - velocity_grav_start[sort_start]
|
|
density_diff_sorted = density_end[sort_end] - density_start[sort_start]
|
|
|
|
distance_diff_end = distance_diff_sorted[unsort_end]
|
|
velocity_diff_end = velocity_diff_sorted[unsort_end]
|
|
velocity_grav_diff_end = velocity_grav_diff_sorted[unsort_end]
|
|
density_diff_end = density_diff_sorted[unsort_end]
|
|
|
|
sorters = dict(
|
|
sort_end=sort_end,
|
|
unsort_end=unsort_end,
|
|
sort_start=sort_start,
|
|
)
|
|
|
|
alignement = dict(
|
|
start = start,
|
|
end = end,
|
|
velocity_diff_end=velocity_diff_end, # km/s
|
|
velocity_grav_diff_end=velocity_grav_diff_end, # km/s
|
|
distance_diff_end=distance_diff_end, # pc
|
|
density_diff_end=density_diff_end, # g/cm3
|
|
sorters=sorters,)
|
|
|
|
delta_t = dataset[end].meta["time"].to("Myr").magnitude - dataset[start].meta["time"].to("Myr").magnitude
|
|
|
|
alignement['delta_t'] = delta_t # Myr
|
|
|
|
acc_grav = alignement['velocity_grav_diff_end'] / delta_t
|
|
alignement['acc_grav'] = osyris.Vector(x=acc_grav[:,0], y=acc_grav[:,1], z=acc_grav[:,2], unit="km/s/ Myr")
|
|
acc_tot = alignement['velocity_diff_end'] / delta_t
|
|
alignement['acc_tot'] = osyris.Vector(x=acc_tot[:,0], y=acc_tot[:,1], z=acc_tot[:,2], unit="km/s/ Myr")
|
|
acc_other = acc_tot - acc_grav
|
|
alignement['acc_other'] = osyris.Vector(x=acc_other[:,0], y=acc_other[:,1], z=acc_other[:,2], unit="km/s/ Myr")
|
|
|
|
norm_acc_grav = np.linalg.norm(acc_grav, axis=1)
|
|
norm_acc_other = np.linalg.norm(acc_other, axis=1)
|
|
|
|
mask_grav_dom = (norm_acc_grav > norm_acc_other)
|
|
id_end = dataset[end]["part"]["identity"].values[mask_tracer_end]
|
|
|
|
mask_denser = (alignement['density_diff_end'] > 0)
|
|
mask_grav_dom_denser = mask_grav_dom & mask_denser
|
|
|
|
alignement["mask_gravdom"] = mask_grav_dom
|
|
alignement['id_grav_dom_denser'] = id_end[mask_grav_dom_denser]
|
|
alignement["id_grav_dom"] = id_end[mask_grav_dom]
|
|
|
|
return alignement
|
|
|
|
|
|
|
|
def compute_mass_inflow_distance(dataset, alignement_d, end, masks=None, key_mask_gravdom="id_grav_dom"):
|
|
|
|
mask_tracer_end = dataset[end]["part"]["family"].values == 0
|
|
|
|
distance_end = dataset[end]["part"]["position"].norm.to(pc).values[mask_tracer_end]
|
|
distance_diff_end = alignement_d[end]['distance_diff_end']
|
|
distance_start = distance_end - distance_diff_end
|
|
|
|
mass_total = dataset[end]["part"]["mass"].to("M_sun").values[mask_tracer_end]
|
|
|
|
delta_t = alignement_d[end]['delta_t']
|
|
dist = np.logspace(np.log10(np.min(distance_end)), np.log10(np.max(distance_end)), 55)
|
|
flow_total = np.zeros_like(dist)
|
|
flow_gravdom = np.zeros_like(dist)
|
|
enclosed_mass_start = np.zeros_like(dist)
|
|
enclosed_mass_end = np.zeros_like(dist)
|
|
|
|
id_tracer_end = dataset[end]["part"]["identity"].values[mask_tracer_end]
|
|
|
|
if isinstance(key_mask_gravdom, str):
|
|
mask_tracer_gravdom = np.isin(id_tracer_end, alignement_d[end][key_mask_gravdom], assume_unique=True)
|
|
elif isinstance(key_mask_gravdom, np.ndarray) and isinstance(key_mask_gravdom[0], bool):
|
|
mask_tracer_gravdom = key_mask_gravdom
|
|
else:
|
|
mask_tracer_gravdom = np.isin(id_tracer_end, key_mask_gravdom, assume_unique=True)
|
|
|
|
print(delta_t)
|
|
for i, d in enumerate(dist):
|
|
in_flow = (distance_end > d) & (distance_start < d)
|
|
flow_total[i] = np.sum(mass_total[in_flow])/ delta_t
|
|
flow_gravdom[i] = np.sum(mass_total[in_flow & mask_tracer_gravdom])/ delta_t
|
|
enclosed_mass_start[i] = np.sum(mass_total[distance_start > d])
|
|
enclosed_mass_end[i] = np.sum(mass_total[distance_end > d])
|
|
|
|
flow_mask = {}
|
|
enclosed_mass_end_mask = {}
|
|
enclosed_mass_start_mask = {}
|
|
for key in masks:
|
|
flow_mask[key] = np.zeros_like(dist)
|
|
flow_mask[key + "_gravdom"] = np.zeros_like(dist)
|
|
enclosed_mass_end_mask[key] = np.zeros_like(dist)
|
|
enclosed_mass_start_mask[key] = np.zeros_like(dist)
|
|
for i, d in enumerate(dist):
|
|
in_flow = (distance_end > d) & (distance_start < d)
|
|
flow_mask[key][i] = np.sum(mass_total[in_flow & masks[key]])/ delta_t
|
|
flow_mask[key + "_gravdom"][i] = np.sum(mass_total[in_flow & masks[key] & mask_tracer_gravdom]) / delta_t
|
|
enclosed_mass_end_mask[key][i] = np.sum(mass_total[(distance_end > d) & masks[key]])
|
|
enclosed_mass_start_mask[key][i] = np.sum(mass_total[(distance_start> d) & masks[key]])
|
|
|
|
result = dict(distance = dist,
|
|
flow_total = flow_total,
|
|
flow_gravdom = flow_gravdom,
|
|
enclosed_mass_start = enclosed_mass_start,
|
|
enclosed_mass_end = enclosed_mass_end,
|
|
flow_mask = flow_mask,
|
|
enclosed_mass_start_mask = enclosed_mass_start_mask,
|
|
enclosed_mass_end_mask = enclosed_mass_end_mask)
|
|
|
|
return result
|
|
|
|
def compute_mass_inflow_density(dataset, ind_cell_d, alignement_gravdom, end, masks=None, key_mask_gravdom="id_grav_dom", alignement_inflow=None):
|
|
|
|
mask_tracer_end = dataset[end]["part"]["family"].values == 0
|
|
|
|
if alignement_inflow is None:
|
|
alignement_inflow = alignement_gravdom
|
|
|
|
density_end = get_cell_data(dataset, ind_cell_d, end, "density", "g/cm**3")
|
|
density_diff_end = alignement_inflow[end]['density_diff_end']
|
|
density_start = density_end - density_diff_end
|
|
|
|
mass_total = dataset[end]["part"]["mass"].to("M_sun").values[mask_tracer_end]
|
|
|
|
delta_t = alignement_gravdom[end]['delta_t']
|
|
dens = np.logspace(np.log10(np.min(density_end)), np.log10(np.max(density_end)), 55)
|
|
flow_total = np.zeros_like(dens)
|
|
flow_gravdom = np.zeros_like(dens)
|
|
enclosed_mass_start = np.zeros_like(dens)
|
|
enclosed_mass_end = np.zeros_like(dens)
|
|
|
|
id_tracer_end = dataset[end]["part"]["identity"].values[mask_tracer_end]
|
|
|
|
if isinstance(key_mask_gravdom, str):
|
|
mask_tracer_gravdom = np.isin(id_tracer_end, alignement_gravdom[end][key_mask_gravdom], assume_unique=True)
|
|
elif isinstance(key_mask_gravdom, np.ndarray) and isinstance(key_mask_gravdom[0], bool):
|
|
mask_tracer_gravdom = key_mask_gravdom
|
|
else:
|
|
mask_tracer_gravdom = np.isin(id_tracer_end, key_mask_gravdom, assume_unique=True)
|
|
|
|
print(delta_t)
|
|
for i, d in enumerate(dens):
|
|
in_flow = (density_end > d) & (density_start < d)
|
|
flow_total[i] = np.sum(mass_total[in_flow])/ delta_t
|
|
flow_gravdom[i] = np.sum(mass_total[in_flow & mask_tracer_gravdom])/ delta_t
|
|
enclosed_mass_start[i] = np.sum(mass_total[density_start > d])
|
|
enclosed_mass_end[i] = np.sum(mass_total[density_end > d])
|
|
|
|
flow_mask = {}
|
|
enclosed_mass_end_mask = {}
|
|
enclosed_mass_start_mask = {}
|
|
for key in masks:
|
|
flow_mask[key] = np.zeros_like(dens)
|
|
flow_mask[key + "_gravdom"] = np.zeros_like(dens)
|
|
enclosed_mass_end_mask[key] = np.zeros_like(dens)
|
|
enclosed_mass_start_mask[key] = np.zeros_like(dens)
|
|
for i, d in enumerate(dens):
|
|
in_flow = (density_end > d) & (density_start < d)
|
|
flow_mask[key][i] = np.sum(mass_total[in_flow & masks[key]])/ delta_t
|
|
flow_mask[key + "_gravdom"][i] = np.sum(mass_total[in_flow & masks[key] & mask_tracer_gravdom]) / delta_t
|
|
enclosed_mass_end_mask[key][i] = np.sum(mass_total[(density_end > d) & masks[key]])
|
|
enclosed_mass_start_mask[key][i] = np.sum(mass_total[(density_start> d) & masks[key]])
|
|
|
|
result = dict(density = dens,
|
|
flow_total = flow_total,
|
|
flow_gravdom = flow_gravdom,
|
|
enclosed_mass_start = enclosed_mass_start,
|
|
enclosed_mass_end = enclosed_mass_end,
|
|
flow_mask = flow_mask,
|
|
enclosed_mass_start_mask = enclosed_mass_start_mask,
|
|
enclosed_mass_end_mask = enclosed_mass_end_mask)
|
|
|
|
return result
|
|
|
|
## Plot helpers
|
|
|
|
def linehist(x, weights, bins=100, density=False, ax=None, **kwargs):
|
|
"""
|
|
Draw a histogram as a line plot.
|
|
"""
|
|
if ax is None:
|
|
ax = plt.gca()
|
|
hist, bin_edges = np.histogram(x, bins=bins, weights=weights, density=density)
|
|
bin_centers = (bin_edges[:-1] + bin_edges[1:]) / 2
|
|
ax.plot(bin_centers, hist, **kwargs)
|
|
return bin_centers, hist
|
|
|
|
def scatter_hist(x, y, weights, ax, ax_histx, ax_histy):
|
|
"""
|
|
Scatter plot with marginal histograms.
|
|
"""
|
|
# no labels
|
|
ax_histx.tick_params(axis="x", labelbottom=False)
|
|
ax_histy.tick_params(axis="y", labelleft=False)
|
|
|
|
# the hist2d plot:
|
|
im = ax.hist2d(x, y, bins=100, weights=weights, norm=mpl.colors.LogNorm())
|
|
|
|
# now determine nice limits by hand:
|
|
ax_histx.hist(x, bins=100, weights=weights, density=True, histtype="step", lw=2)
|
|
ax_histy.hist(y, bins=100, orientation='horizontal', weights=weights, histtype="step", density=True, lw=2)
|
|
ax_histx.set_yscale("log")
|
|
ax_histy.set_xscale("log")
|
|
return im
|
|
|
|
# Data helpers
|
|
|
|
def groupby(array, key):
|
|
"""
|
|
Group an array by a key.
|
|
"""
|
|
sort = np.argsort(key)
|
|
key = key[sort]
|
|
array = array[sort]
|
|
unique, indices = np.unique(key, return_index=True)
|
|
return unique, np.split(array, indices[1:])
|
|
|
|
def aggregate(array, key, function=np.sum):
|
|
"""
|
|
Aggregate an array by a key using a function.
|
|
"""
|
|
unique, groups = groupby(array, key)
|
|
return unique, np.array([function(x) for x in groups]) |