172 lines
5.2 KiB
Python
172 lines
5.2 KiB
Python
import os
|
|
import numpy as np
|
|
|
|
SUBLINE_TO_EXPORT = 2
|
|
|
|
LOG_DIR = './MCL_KW_logs_etc'
|
|
POS_DIR = './position_data'
|
|
STRES_DIR = './stress_data'
|
|
OUTPUT_DIR = './truncated_stress'
|
|
CENTERS ={
|
|
'S1' : np.array([-19.7,102.6]),
|
|
'S2' : np.array([-8.6,97.1]),
|
|
}
|
|
RADIUS = 25.7
|
|
|
|
log_dtype_2d = np.dtype([
|
|
('time','U8'),
|
|
('name','U24'),
|
|
('mot1',np.float),
|
|
('mot2',np.float),
|
|
('exposure',np.float),
|
|
('diode',np.float),
|
|
('petra',np.float),
|
|
])
|
|
|
|
def get_slice(file_name):
|
|
subline_logs = get_log_paths(file_name)
|
|
|
|
sublengths = [get_num_subs(log) for log in subline_logs]
|
|
cuts = cumsum(sublengths)
|
|
|
|
start = cuts[SUBLINE_TO_EXPORT]
|
|
end = cuts[SUBLINE_TO_EXPORT] + sublengths[SUBLINE_TO_EXPORT]
|
|
return slice(start, end)
|
|
|
|
def get_log_paths(file_name):
|
|
identifier = os.path.splitext(file_name)[0]
|
|
return sorted([os.path.join(LOG_DIR,f) for f in logs if identifier in f])
|
|
|
|
def get_num_subs(file_path):
|
|
with open(file_path) as file_in:
|
|
lines = file_in.readlines()
|
|
|
|
lines = [l for l in lines if (l and not l.startswith('!') and not l.isspace())]
|
|
|
|
return len(lines)
|
|
|
|
def get_depth_from_logs(file_name):
|
|
log_path = get_log_paths(file_name)[SUBLINE_TO_EXPORT]
|
|
coords = np.loadtxt(log_path, comments='!', usecols=(2,3))
|
|
center = CENTERS[get_disk_id(log_path, 1)]
|
|
vectors = coords - center
|
|
return RADIUS - np.linalg.norm(vectors,axis=1)
|
|
|
|
def get_depth_from_posfile(file_name, slice_=slice(None)):
|
|
pos_path = get_pos_path(file_name)
|
|
coords = np.loadtxt(pos_path, comments='#',usecols=(0,1))[slice_]
|
|
center = CENTERS[get_disk_id(pos_path)]
|
|
vectors = coords - center
|
|
return RADIUS - np.linalg.norm(vectors,axis=1)
|
|
|
|
def get_pos_path(file_name):
|
|
identifier = os.path.splitext(file_name)[0]
|
|
return os.path.join(POS_DIR,identifier+'_positions.txt')
|
|
|
|
def get_disk_id(path, i=0):
|
|
base = os.path.basename(path)
|
|
return base.split('_')[i]
|
|
|
|
def slice_stressfile(name, s=slice(None)):
|
|
file_path = os.path.join(STRES_DIR, name)
|
|
with open(file_path) as file_in:
|
|
lines = file_in.read().split('\n')
|
|
head = '\n'.join(lines[:2]).replace(' ','_')
|
|
|
|
if head.startswith('Depth'):
|
|
s = slice(None)
|
|
|
|
array = np.loadtxt(file_path, skiprows=2, )
|
|
|
|
return head, array[s]
|
|
|
|
def cumsum(l):
|
|
return [sum(l[0:x:1]) for x in range(0, len(l))]
|
|
|
|
def determine_hardenin_depth(data):
|
|
|
|
a = np.delete(data, np.where(np.isnan(data[:,-1])), axis=0)
|
|
a = np.delete(a, np.where(a[:,0]<0), axis=0)
|
|
x,y= a[:,[0,-1]].T
|
|
|
|
zero_crossings = np.where(np.diff(np.sign(y)))[0]
|
|
zero_crossings = np.delete(zero_crossings, np.where(zero_crossings == 0))
|
|
if len(zero_crossings) == 0:
|
|
return np.nan
|
|
|
|
i, j = zero_crossings[0], zero_crossings[0]+1
|
|
|
|
delta_x = x[j]-x[i]
|
|
delta_y = y[j]-y[i]
|
|
depth = x[i] + delta_x * y[i]/delta_y
|
|
|
|
return depth
|
|
|
|
def get_angle(file_name, numgetter = None):
|
|
if not numgetter:
|
|
numgetter = lambda f: int(os.path.splitext(f)[0].split('-')[-1])/10.
|
|
num = numgetter(file_name)
|
|
return flip_angle_notation(num)
|
|
|
|
def flip_angle_notation(value, axis = 45):
|
|
rot_to_0 = value-axis
|
|
flipped = (360 - rot_to_0)%360
|
|
rot_to_axis = (flipped + axis)%360
|
|
return rot_to_axis
|
|
|
|
def save_new(name, lines, head=''):
|
|
file_path = os.path.join(OUTPUT_DIR, name)
|
|
np.savetxt(file_path, lines, fmt='%.5f', delimiter='\t', header=head, comments='')
|
|
|
|
def collect_depth(depths, file_name, data):
|
|
ident = get_disk_id(file_name)
|
|
pair = (get_angle(file_name), determine_hardenin_depth(data))
|
|
if ident in hardenings_depth:
|
|
depths[ident].append(pair)
|
|
else:
|
|
depths[ident] = [pair]
|
|
|
|
def save_depth(depths):
|
|
head = "angle\tdepth\ndeg\tmm"
|
|
|
|
for key, value in depths.items():
|
|
out = np.array(sorted(value))
|
|
out[:,1] = cull_data(out[:,1] , 7)
|
|
out = np.append(out, out[0:1], axis=0)
|
|
out[-1,0]+=360
|
|
save_new("{}_inversion_depth.dat".format(key), out, head)
|
|
|
|
def cull_data(data, threshold):
|
|
return np.where(np.abs(data)<threshold, data, np.nan)
|
|
|
|
if __name__=="__main__":
|
|
if not os.path.exists(OUTPUT_DIR):
|
|
os.makedirs(OUTPUT_DIR)
|
|
|
|
logs = [f for f in os.listdir(LOG_DIR) if f.endswith('.log')]
|
|
datafiles = [f for f in os.listdir(STRES_DIR) if f.endswith('.dat')]
|
|
# posfiles = [f for f in os.listdir(POS_DIR) if f.endswith('.txt')]
|
|
|
|
hardenings_depth = {}
|
|
|
|
for file_name in datafiles:
|
|
the_slice = get_slice(file_name)
|
|
head, numbers = slice_stressfile(file_name, the_slice)
|
|
|
|
if not head.startswith('Depth'):
|
|
depth = get_depth_from_posfile(file_name, the_slice)
|
|
numbers[:,0] = depth
|
|
else:
|
|
try:
|
|
depth = get_depth_from_posfile(file_name)
|
|
numbers[:,0] = depth
|
|
except ValueError as e:
|
|
print 'ValueError in {}: {}'.format(file_name, e)
|
|
|
|
numbers = cull_data(numbers, 1000)
|
|
|
|
collect_depth(hardenings_depth, file_name, numbers)
|
|
|
|
save_new(file_name, numbers, head)
|
|
|
|
save_depth(hardenings_depth)
|