Module pf_to_dem
@author: Alexandre Sac–Morane alexandre.sac-morane@uclouvain.be
This is the file used to transmit data between phase field and dem.
Expand source code
# -*- encoding=utf-8 -*-
import pickle, math, os, shutil
from pathlib import Path
import numpy as np
import matplotlib.pyplot as plt
import vtk, skfmm
from vtk.util.numpy_support import vtk_to_numpy
# own
import tools
# -----------------------------------------------------------------------------#
def index_to_str(j):
'''
Convert a integer into a string with the format XXX.
'''
if j < 10:
return '00'+str(j)
elif 10 <= j and j < 100:
return '0'+str(j)
else :
return str(j)
# -----------------------------------------------------------------------------#
def read_vtk(dict_user, dict_sample, j_str):
'''
Read the last vtk files to obtain data from MOOSE.
Do not work calling yade.
'''
if not dict_sample['Map_known']:
L_XYZ = []
L_L_i_XYZ = []
# iterate on the proccessors used
for i_proc in range(dict_user['n_proc']):
print('processor',i_proc+1,'/',dict_user['n_proc'])
# name of the file to load
namefile = 'vtk/pf_other_'+j_str+'_'+str(i_proc)+'.vtu'
# load a vtk file as input
reader = vtk.vtkXMLUnstructuredGridReader()
reader.SetFileName(namefile)
reader.Update()
# Grab a scalar from the vtk file
nodes_vtk_array = reader.GetOutput().GetPoints().GetData()
L_etai_vtk_array = []
for i_grain in range(len(dict_sample['L_etai_map'])):
L_etai_vtk_array.append(reader.GetOutput().GetPointData().GetArray("eta"+str(i_grain+1)))
c_vtk_array = reader.GetOutput().GetPointData().GetArray("c")
#Get the coordinates of the nodes and the scalar values
nodes_array = vtk_to_numpy(nodes_vtk_array)
L_etai_array = []
for i_grain in range(len(dict_sample['L_etai_map'])):
L_etai_array.append(vtk_to_numpy(L_etai_vtk_array[i_grain]))
c_array = vtk_to_numpy(c_vtk_array)
# map is not know
if not dict_sample['Map_known']:
# save the map
L_i_XYZ = []
# Must detect common zones between processors
for i_XYZ in range(len(nodes_array)) :
XYZ = nodes_array[i_XYZ]
# Do not consider twice a point
if list(XYZ) not in L_XYZ :
# search node in the mesh
L_search = list(abs(np.array(dict_sample['x_L']-list(XYZ)[0])))
i_x = L_search.index(min(L_search))
L_search = list(abs(np.array(dict_sample['y_L']-list(XYZ)[1])))
i_y = L_search.index(min(L_search))
L_search = list(abs(np.array(dict_sample['z_L']-list(XYZ)[2])))
i_z = L_search.index(min(L_search))
# save map
L_XYZ.append(list(XYZ))
L_i_XYZ.append([i_x, i_y, i_z])
# rewrite map
for i_grain in range(len(dict_sample['L_etai_map'])):
dict_sample['L_etai_map'][i_grain][i_x, i_y, i_z] = L_etai_array[i_grain][i_XYZ]
dict_sample['c_map'][i_x, i_y, i_z] = c_array[i_XYZ]
else :
L_i_XYZ.append(None)
# Here the algorithm can be help as the mapping is known
L_L_i_XYZ.append(L_i_XYZ)
# map is known
else :
# iterate on data
for i_XYZ in range(len(nodes_array)) :
# read
if dict_sample['L_L_i_XYZ_used'][i_proc][i_XYZ] != None :
i_x = dict_sample['L_L_i_XYZ_used'][i_proc][i_XYZ][0]
i_y = dict_sample['L_L_i_XYZ_used'][i_proc][i_XYZ][1]
i_z = dict_sample['L_L_i_XYZ_used'][i_proc][i_XYZ][2]
# rewrite map
for i_grain in range(len(dict_sample['L_etai_map'])):
dict_sample['L_etai_map'][i_grain][i_x, i_y, i_z] = L_etai_array[i_grain][i_XYZ]
dict_sample['c_map'][i_x, i_y, i_z] = c_array[i_XYZ]
if not dict_sample['Map_known']:
# the map is known
dict_sample['Map_known'] = True
dict_sample['L_L_i_XYZ_used'] = L_L_i_XYZ
# -----------------------------------------------------------------------------#
def compute_levelset(dict_user, dict_sample):
'''
From a phase map, compute level set function.
'''
L_sdf_i_map = []
L_rbm_to_apply = []
L_x_L_i = []
L_y_L_i = []
L_z_L_i = []
# iterate on the phase variable
for i_grain in range(len(dict_sample['L_etai_map'])):
# compute binary map
bin_i_map = np.array(-np.ones((dict_user['n_mesh_x'], dict_user['n_mesh_y'], dict_user['n_mesh_z'])))
# compute center of the grain
L_points_inside = []
# iteration on x
for i_x in range(len(dict_sample['x_L'])):
# iteration on y
for i_y in range(len(dict_sample['y_L'])):
# iteration on z
for i_z in range(len(dict_sample['z_L'])):
# grain 1
if dict_sample['L_etai_map'][i_grain][i_x, i_y, i_z] > 0.5:
bin_i_map[i_x, i_y, i_z] = 1
L_points_inside.append(np.array([dict_sample['x_L'][i_x], dict_sample['y_L'][i_y], dict_sample['z_L'][i_z]]))
else :
bin_i_map[i_x, i_y, i_z] = -1
# look for dimensions of box
# -x limit
i_x = 0
found = False
while (not found) and (i_x < bin_i_map.shape[0]):
if np.max(bin_i_map[i_x, :, :]) == -1:
i_x_min_lim = i_x
else :
found = True
i_x = i_x + 1
# +x limit
i_x = bin_i_map.shape[0]-1
found = False
while not found and 0 <= i_x:
if np.max(bin_i_map[i_x, :, :]) == -1:
i_x_max_lim = i_x
else :
found = True
i_x = i_x - 1
# number of nodes on x
n_nodes_x = i_x_max_lim-i_x_min_lim+1
# -y limit
i_y = 0
found = False
while (not found) and (i_y < bin_i_map.shape[1]):
if np.max(bin_i_map[:, i_y, :]) == -1:
i_y_min_lim = i_y
else :
found = True
i_y = i_y + 1
# +y limit
i_y = bin_i_map.shape[1]-1
found = False
while not found and 0 <= i_y:
if np.max(bin_i_map[:, i_y, :]) == -1:
i_y_max_lim = i_y
else :
found = True
i_y = i_y - 1
# number of nodes on y
n_nodes_y = i_y_max_lim-i_y_min_lim+1
# -z limit
i_z = 0
found = False
while (not found) and (i_z < bin_i_map.shape[2]):
if np.max(bin_i_map[:, :, i_z]) == -1:
i_z_min_lim = i_z
else :
found = True
i_z = i_z + 1
# +z limit
i_z = bin_i_map.shape[2]-1
found = False
while not found and 0 <= i_z:
if np.max(bin_i_map[:, :, i_z]) == -1:
i_z_max_lim = i_z
else :
found = True
i_z = i_z - 1
# number of nodes on z
n_nodes_z = i_z_max_lim-i_z_min_lim+1
# extraction of data
bin_i_map = bin_i_map[i_x_min_lim:i_x_max_lim+1,
i_y_min_lim:i_y_max_lim+1,
i_z_min_lim:i_z_max_lim+1]
# creation of sub mesh
m_size = dict_sample['x_L'][1] - dict_sample['x_L'][0]
x_L_i = np.arange(-m_size*(n_nodes_x-1)/2,
m_size*(n_nodes_x-1)/2+0.1*m_size,
m_size)
y_L_i = np.arange(-m_size*(n_nodes_y-1)/2,
m_size*(n_nodes_y-1)/2+0.1*m_size,
m_size)
z_L_i = np.arange(-m_size*(n_nodes_z-1)/2,
m_size*(n_nodes_z-1)/2+0.1*m_size,
m_size)
# compute rbm to apply
rbm_to_apply = [dict_sample['x_L'][i_x_min_lim]-x_L_i[0],
dict_sample['y_L'][i_y_min_lim]-y_L_i[0],
dict_sample['z_L'][i_z_min_lim]-z_L_i[0]]
# compute signed distance function
sdf_i_map = -skfmm.distance(bin_i_map, dx=np.array([m_size, m_size, m_size]))
# save
L_sdf_i_map.append(sdf_i_map)
L_rbm_to_apply.append(rbm_to_apply)
L_x_L_i.append(x_L_i)
L_y_L_i.append(y_L_i)
L_z_L_i.append(z_L_i)
# save data
dict_save = {
'L_sdf_i_map': L_sdf_i_map,
'L_rbm_to_apply': L_rbm_to_apply,
'L_x_L_i': L_x_L_i,
'L_y_L_i': L_y_L_i,
'L_z_L_i': L_z_L_i
}
with open('data/level_set.data', 'wb') as handle:
pickle.dump(dict_save, handle, protocol=pickle.HIGHEST_PROTOCOL)
Functions
def index_to_str()
-
Convert a integer into a string with the format XXX.
Expand source code
def index_to_str(j): ''' Convert a integer into a string with the format XXX. ''' if j < 10: return '00'+str(j) elif 10 <= j and j < 100: return '0'+str(j) else : return str(j)
def read_vtk()
-
Read the last vtk files to obtain data from MOOSE.
Expand source code
def read_vtk(dict_user, dict_sample, j_str): ''' Read the last vtk files to obtain data from MOOSE. Do not work calling yade. ''' if not dict_sample['Map_known']: L_XYZ = [] L_L_i_XYZ = [] # iterate on the proccessors used for i_proc in range(dict_user['n_proc']): print('processor',i_proc+1,'/',dict_user['n_proc']) # name of the file to load namefile = 'vtk/pf_other_'+j_str+'_'+str(i_proc)+'.vtu' # load a vtk file as input reader = vtk.vtkXMLUnstructuredGridReader() reader.SetFileName(namefile) reader.Update() # Grab a scalar from the vtk file nodes_vtk_array = reader.GetOutput().GetPoints().GetData() L_etai_vtk_array = [] for i_grain in range(len(dict_sample['L_etai_map'])): L_etai_vtk_array.append(reader.GetOutput().GetPointData().GetArray("eta"+str(i_grain+1))) c_vtk_array = reader.GetOutput().GetPointData().GetArray("c") #Get the coordinates of the nodes and the scalar values nodes_array = vtk_to_numpy(nodes_vtk_array) L_etai_array = [] for i_grain in range(len(dict_sample['L_etai_map'])): L_etai_array.append(vtk_to_numpy(L_etai_vtk_array[i_grain])) c_array = vtk_to_numpy(c_vtk_array) # map is not know if not dict_sample['Map_known']: # save the map L_i_XYZ = [] # Must detect common zones between processors for i_XYZ in range(len(nodes_array)) : XYZ = nodes_array[i_XYZ] # Do not consider twice a point if list(XYZ) not in L_XYZ : # search node in the mesh L_search = list(abs(np.array(dict_sample['x_L']-list(XYZ)[0]))) i_x = L_search.index(min(L_search)) L_search = list(abs(np.array(dict_sample['y_L']-list(XYZ)[1]))) i_y = L_search.index(min(L_search)) L_search = list(abs(np.array(dict_sample['z_L']-list(XYZ)[2]))) i_z = L_search.index(min(L_search)) # save map L_XYZ.append(list(XYZ)) L_i_XYZ.append([i_x, i_y, i_z]) # rewrite map for i_grain in range(len(dict_sample['L_etai_map'])): dict_sample['L_etai_map'][i_grain][i_x, i_y, i_z] = L_etai_array[i_grain][i_XYZ] dict_sample['c_map'][i_x, i_y, i_z] = c_array[i_XYZ] else : L_i_XYZ.append(None) # Here the algorithm can be help as the mapping is known L_L_i_XYZ.append(L_i_XYZ) # map is known else : # iterate on data for i_XYZ in range(len(nodes_array)) : # read if dict_sample['L_L_i_XYZ_used'][i_proc][i_XYZ] != None : i_x = dict_sample['L_L_i_XYZ_used'][i_proc][i_XYZ][0] i_y = dict_sample['L_L_i_XYZ_used'][i_proc][i_XYZ][1] i_z = dict_sample['L_L_i_XYZ_used'][i_proc][i_XYZ][2] # rewrite map for i_grain in range(len(dict_sample['L_etai_map'])): dict_sample['L_etai_map'][i_grain][i_x, i_y, i_z] = L_etai_array[i_grain][i_XYZ] dict_sample['c_map'][i_x, i_y, i_z] = c_array[i_XYZ] if not dict_sample['Map_known']: # the map is known dict_sample['Map_known'] = True dict_sample['L_L_i_XYZ_used'] = L_L_i_XYZ
def compute_levelset()
-
From a phase map, compute level set function.
Expand source code
def compute_levelset(dict_user, dict_sample): ''' From a phase map, compute level set function. ''' L_sdf_i_map = [] L_rbm_to_apply = [] L_x_L_i = [] L_y_L_i = [] L_z_L_i = [] # iterate on the phase variable for i_grain in range(len(dict_sample['L_etai_map'])): # compute binary map bin_i_map = np.array(-np.ones((dict_user['n_mesh_x'], dict_user['n_mesh_y'], dict_user['n_mesh_z']))) # compute center of the grain L_points_inside = [] # iteration on x for i_x in range(len(dict_sample['x_L'])): # iteration on y for i_y in range(len(dict_sample['y_L'])): # iteration on z for i_z in range(len(dict_sample['z_L'])): # grain 1 if dict_sample['L_etai_map'][i_grain][i_x, i_y, i_z] > 0.5: bin_i_map[i_x, i_y, i_z] = 1 L_points_inside.append(np.array([dict_sample['x_L'][i_x], dict_sample['y_L'][i_y], dict_sample['z_L'][i_z]])) else : bin_i_map[i_x, i_y, i_z] = -1 # look for dimensions of box # -x limit i_x = 0 found = False while (not found) and (i_x < bin_i_map.shape[0]): if np.max(bin_i_map[i_x, :, :]) == -1: i_x_min_lim = i_x else : found = True i_x = i_x + 1 # +x limit i_x = bin_i_map.shape[0]-1 found = False while not found and 0 <= i_x: if np.max(bin_i_map[i_x, :, :]) == -1: i_x_max_lim = i_x else : found = True i_x = i_x - 1 # number of nodes on x n_nodes_x = i_x_max_lim-i_x_min_lim+1 # -y limit i_y = 0 found = False while (not found) and (i_y < bin_i_map.shape[1]): if np.max(bin_i_map[:, i_y, :]) == -1: i_y_min_lim = i_y else : found = True i_y = i_y + 1 # +y limit i_y = bin_i_map.shape[1]-1 found = False while not found and 0 <= i_y: if np.max(bin_i_map[:, i_y, :]) == -1: i_y_max_lim = i_y else : found = True i_y = i_y - 1 # number of nodes on y n_nodes_y = i_y_max_lim-i_y_min_lim+1 # -z limit i_z = 0 found = False while (not found) and (i_z < bin_i_map.shape[2]): if np.max(bin_i_map[:, :, i_z]) == -1: i_z_min_lim = i_z else : found = True i_z = i_z + 1 # +z limit i_z = bin_i_map.shape[2]-1 found = False while not found and 0 <= i_z: if np.max(bin_i_map[:, :, i_z]) == -1: i_z_max_lim = i_z else : found = True i_z = i_z - 1 # number of nodes on z n_nodes_z = i_z_max_lim-i_z_min_lim+1 # extraction of data bin_i_map = bin_i_map[i_x_min_lim:i_x_max_lim+1, i_y_min_lim:i_y_max_lim+1, i_z_min_lim:i_z_max_lim+1] # creation of sub mesh m_size = dict_sample['x_L'][1] - dict_sample['x_L'][0] x_L_i = np.arange(-m_size*(n_nodes_x-1)/2, m_size*(n_nodes_x-1)/2+0.1*m_size, m_size) y_L_i = np.arange(-m_size*(n_nodes_y-1)/2, m_size*(n_nodes_y-1)/2+0.1*m_size, m_size) z_L_i = np.arange(-m_size*(n_nodes_z-1)/2, m_size*(n_nodes_z-1)/2+0.1*m_size, m_size) # compute rbm to apply rbm_to_apply = [dict_sample['x_L'][i_x_min_lim]-x_L_i[0], dict_sample['y_L'][i_y_min_lim]-y_L_i[0], dict_sample['z_L'][i_z_min_lim]-z_L_i[0]] # compute signed distance function sdf_i_map = -skfmm.distance(bin_i_map, dx=np.array([m_size, m_size, m_size])) # save L_sdf_i_map.append(sdf_i_map) L_rbm_to_apply.append(rbm_to_apply) L_x_L_i.append(x_L_i) L_y_L_i.append(y_L_i) L_z_L_i.append(z_L_i) # save data dict_save = { 'L_sdf_i_map': L_sdf_i_map, 'L_rbm_to_apply': L_rbm_to_apply, 'L_x_L_i': L_x_L_i, 'L_y_L_i': L_y_L_i, 'L_z_L_i': L_z_L_i } with open('data/level_set.data', 'wb') as handle: pickle.dump(dict_save, handle, protocol=pickle.HIGHEST_PROTOCOL)