27-04-2021 GC
from pathlib import Path
import numpy as np
import pandas as pd
from math import log10 as log10
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sns
def read_head():
# function to read first 2 columns
f_in = '/media/guido/LACIE/Cingle_Guido/Master/AC_thr_third.xlsx'
p_in = Path(f_in)
df = pd.read_excel(p_in, sheet_name=0, header=0, usecols=[1, 2], nrows=85)
return df
def read_AC_thresholds():
# function to read threshold in dB SPL op de eardrum
f_in = '/media/guido/LACIE/Cingle_Guido/Master/AC_thr_third.xlsx'
p_in = Path(f_in)
df = pd.read_excel(p_in, sheet_name='AC_thr_eardrum', header=0, nrows=85)
df = df.drop(['Unnamed: 0', 'Study_ID', 'Device', '125_Hz'], axis=1)
df = df.fillna(pd.NA)
return df
def read_BC_thresholds():
# function to read threshold in dB FL
f_in = '/media/guido/LACIE/Cingle_Guido/Master/BC_dir_thr_third.xlsx'
p_in = Path(f_in)
df = pd.read_excel(p_in, sheet_name='BC_dir_thr_third', header=0, nrows=85)
df = df.drop(['Unnamed: 0'], axis=1)
df = df.fillna(pd.NA)
return df
def read_BCD_output_65():
# function to read threshold in dB FL
f_in = '/media/guido/LACIE/Cingle_Guido/Master/BCD_band_output.xlsx'
p_in = Path(f_in)
df = pd.read_excel(p_in, sheet_name='BCD_output_65', header=0, nrows=85)
df = df.drop(['Unnamed: 0'], axis=1)
df = df.fillna(pd.NA)
return df
def read_BCD_output_55():
# function to read threshold in dB FL
f_in = '/media/guido/LACIE/Cingle_Guido/Master/BCD_band_output.xlsx'
p_in = Path(f_in)
df = pd.read_excel(p_in, sheet_name='BCD_output_55', header=0, nrows=85)
df = df.drop(['Unnamed: 0'], axis=1)
df = df.fillna(pd.NA)
return df
def read_ISTS_SPL():
# function to read ISTS dB SPL for 1/3 thirdbands, 65 dB & 55 dB
f_in = '/media/guido/LACIE/Cingle_Guido/Master/constants.xlsx'
p_in = Path(f_in)
col_to_use = list(range(20))
df = pd.read_excel(p_in, sheet_name='ISTS_sound_pressure',
header=0, nrows=2, usecols=col_to_use)
df = df.fillna(pd.NA)
df = df.rename(columns={'Unnamed: 0' : 'Signal'})
df = df.set_index(['Signal'])
df = df.drop('125_Hz', axis = 1)
s65 = pd.Series(df.iloc[0])
s55 = pd.Series(df.iloc[1])
return (s65, s55)
def read_HRTF():
# function to read HRTF data for the AC path, collected by Stenfelt
f_in = '/media/guido/LACIE/Cingle_Guido/Master/constants.xlsx'
p_in = Path(f_in)
col_to_use = list(range(20))
df = pd.read_excel(p_in, sheet_name='HRTF_KEMAR_Stenfelt',
header=0, nrows=3, usecols=col_to_use)
df = df.fillna(pd.NA)
df = df.rename(columns={'Unnamed: 0' : 'Angle'})
df = df.set_index(['Angle'])
df = df.drop('125_Hz', axis = 1)
df = df.round(1)
s0 = pd.Series(df.iloc[0])
s90 = pd.Series(df.iloc[1])
s270 = pd.Series(df.iloc[2])
return (s0, s90, s270)
def read_HRTF_BCD():
# function to read HRTF data for the BC path, collected by Stenfelt
f_in = '/media/guido/LACIE/Cingle_Guido/Master/constants.xlsx'
p_in = Path(f_in)
col_to_use = list(range(20))
df = pd.read_excel(p_in, sheet_name='HRTF_BAHA_Stenfelt', header=0, nrows=3,
usecols=col_to_use)
#df = df.drop(['Unnamed: 0'], axis=1)
df = df.fillna(pd.NA)
df = df.rename(columns={'Unnamed: 0' : 'Angle'})
df = df.set_index(['Angle'])
df = df.drop('125_Hz', axis = 1)
df = df.round(1)
s0 = pd.Series(df.iloc[0])
s90 = pd.Series(df.iloc[1])
s270 = pd.Series(df.iloc[2])
return (s0, s90, s270)
def ISTS_HRTF_65():
# ISTS 65 dB corrected with HRTF for 0, 90 and 270 degrees, return list of 3 pd.Series
ists65 = read_ISTS_SPL()[0]
hrtf = read_HRTF()
res = list()
for s in hrtf:
corr = s + ists65
res.append(corr)
return res
def ISTS_HRTF_55():
# ISTS 55 dB corrected with HRTF for 0, 90 and 270 degrees, return list of 3 pd.Series
ists55 = read_ISTS_SPL()[1]
hrtf = read_HRTF()
res = list()
for s in hrtf:
corr = s + ists55
res.append(corr)
return res
def read_ANSI():
# function to read data from tabel 3 from ANSI norm
f_in = '/media/guido/LACIE/Cingle_Guido/Master/constants.xlsx'
p_in = Path(f_in)
df = pd.read_excel(p_in, sheet_name='Tabel3_SII', header=0, index_col=0)
return df
def read_importance():
# function to read importance values from xlsx file
f_in = '/media/guido/LACIE/Cingle_Guido/Master/constants.xlsx'
p_in = Path(f_in)
df = pd.read_excel(p_in, sheet_name='SII_importance', header=0, nrows=1)
df = df.drop(['Unnamed: 0', '125_Hz'], axis=1)
df = df.fillna(pd.NA)
s = pd.Series(df.iloc[0])
return s
def order_columns(df):
# set BC data low freq. = data at 250 Hz
for f in ['160_Hz', '200_Hz']: df[f] = df['250_Hz']
ordered_columns = ['160_Hz', '200_Hz', '250_Hz', '315_Hz', '400_Hz', '500_Hz',
'630_Hz', '800_Hz', '1000_Hz', '1250_Hz', '1600_Hz', '2000_Hz',
'2500_Hz', '3150_Hz', '4000_Hz', '5000_Hz', '6300_Hz', '8000_Hz']
df = df[ordered_columns]
return df
def band_audibility(sl):
ba = (sl + 15) / 30
ba = ba.fillna(-999)
ba = ba.where(ba<=1, 1)
ba = ba.replace(-999, 999)
ba = ba.where(ba>=0, 0)
ba = ba.replace(999, pd.NA)
return ba
def band_intelligibility(sl):
ba = band_audibility(sl)
importance = read_importance()
bi = ba * importance
return bi
def speech_intelligibility_index(sl):
bi = band_intelligibility(sl)
SII = bi.sum(axis=1, min_count=18)
SII = SII.round(4)
return SII
def calculate_B(V, N):
''' calculation of B, Ansi S3.5 1997 4.3.2.2
B is the larger value between equivalent noise spectrum level N
and self-speech masking spectrum level V'''
b = pd.concat([V, N], axis='columns')
l = list()
for col in b.columns:
if col not in l: l.append(col)
bb = pd.DataFrame()
for col in l:
bb[col] = b[col].max(axis=1)
bb = bb.astype('float')
bb = bb.round(1)
return bb
def calculate_C(V, N):
# calculation of C, slope of spread of masking, Ansi S3.5 1997 4.3.2.2
B = calculate_B(V, N)
l = list()
c = pd.DataFrame()
for col in B.columns:
f = (int(col.strip('_Hz')))
c[col] = -80 + 0.6 * (B[col] + 10 * log10(f) - 6.353)
return c
def calculate_Z(V, N):
# calculation of Z, equivalent masking spectrum level, Ansi S3.5 1997 4.3.2.5
b = calculate_B(V, N)
c = calculate_C(V, N)
# make list with values of CF of 1/3 octave bands (160-8000 Hz)
clmns = b.columns
l = list()
for col in clmns:
f = (int(col.strip('_Hz')))
l.append(f)
d ={0: (b.iloc[:, 0]).to_numpy()} # make dict, value Z=B for 160 Hz, ANSI 4.3.2.4
for i in range (1, 18): # equation 9, i index f band > 160 Hz
n2 = pd.array([0]*len(b)) # make null array for update with summation
for k in range (0, i-1): # k, summation index, range
bb = (b.iloc[:, k]).to_numpy()
cc = c.iloc[:, k].to_numpy()
fi = l[i]
fk = l[k]
r = log10(0.89 * fi / fk)
n3 = 10 ** (0.1 * (bb + 3.32 * cc * r))
n2 += n3 # summation term in eq.9
n1 = 10 ** (0.1 * N.iloc[:, i])
z = 10 * np.log10(n1 + n2) # equation 9
d1 = {i : z}
d.update(d1) # dict {f : array z}
z = pd.DataFrame(d) # convert dict in DataFrame
lc = clmns.to_list() # set back names 1/3 octave bands (eg 160_Hz)
dc = dict()
for j in range(len(lc)):
up = {j: lc[j]}
dc.update(up)
z = z.rename(columns=dc)
z = z.astype('float')
z = z.round(1)
return z
def calculate_X(thresholds):
# calculation of X, equivalent internal noise for AC or BC path, Ansi S3.5 1997 4.4
ansi = read_ANSI()
internal_noise = ansi.iloc[:, 4]
x = thresholds.add(internal_noise, axis='columns')
x = x.astype('float')
x = x.round(1)
return x
def calculate_D(V, N, thresholds):
# calculation of disturbance D, Ansi S3.5 1997 4.5
Z = calculate_Z(V, N)
X = calculate_X(thresholds)
filter_x = (Z < X)
take_x = X[filter_x]
D = take_x.fillna(value=Z)
D = D.astype('float')
D = D.round(1)
return D
def combination_path (acpath, bcpath, bcindex):
# make mask filters bc vs ac
mask_filter = acpath >= bcpath.iloc[bcindex]
mask_filter_inv = ~(mask_filter)
filtered1 = acpath[mask_filter]
filtered1.fillna(value=0, inplace=True)
filtered2 = (bcpath.iloc[bcindex])[mask_filter_inv]
filtered2.fillna(value=0, inplace=True)
combi = filtered1 + filtered2
return combi
def CVC_transfer_function(sii):
# CVC score calculation as a funtion of SII
n = 0.9
q = 0.6
p = 1
cvc_score = round((1-10**(-(sii*p)/q))**n, 2)
return cvc_score
delta = 0 # S/N
Calculation of SII for AC path
# read AC thresholds
act = read_AC_thresholds()
# speech spectrum level
e_65_270 = ISTS_HRTF_65()[2] # speech source on the BCD side (270 degrees opposit be)
E_65_270 = pd.concat([e_65_270] * len(act), axis=1).T
# calculation of self-speech masking spectrum level V, Ansi S3.5 1997 4.3.2.1
V_65_best_ear = E_65_270 - 24 # speech source on the BCD side (270 degrees)
# calculation of equivalent noise spectrum level N, Ansi S3.5 1997 4.3.2.2
n_best_ear = ISTS_HRTF_65()[0] - delta # noise source in front (0 degrees)
N_best_ear = pd.concat([n_best_ear] * len(act), axis=1).T
# calculation of disturbance D, Ansi S3.5 1997 4.5
disturbance_ac = calculate_D(V_65_best_ear, N_best_ear, act)
# calculation of SII for AC path
unmasked_ac = E_65_270.subtract(disturbance_ac, axis='columns') # ANSI step 7 (4.7) E - D
sii_ac_s270_n0 = speech_intelligibility_index(unmasked_ac)
# calculation of CVC-score for AC path
cvc_ac_s270_n0 = CVC_transfer_function(sii_ac_s270_n0)
Calculation of SII for BC path
# read BC thresholds
bct = read_BC_thresholds()
bct = bct.dropna() # deleate rows with missing data
idx = bct.index # index of complete rows
bct = order_columns(bct)
# read the output of the BCD, measured on the skull simulator with input ISTS 65 dB
BCD_out_65 = read_BCD_output_65()
BCD_out_65 = order_columns(BCD_out_65)
BCD_out_65 = BCD_out_65.iloc[idx]
# read the output of the BCD, measured on the skull simulator with input ISTS 55 dB
#BCD_out_55 = read_BCD_output_55()
#BCD_out_55 = order_columns(BCD_out_55)
#BCD_out_55 = BCD_out_55.iloc[idx]
# read head related transfer function for the BCD, signal longitudinal on BCD side
HRTF_BCD_90 = read_HRTF_BCD()[1]
# calculation of equivalent speech spectrum level Ansi S3.5 1997 4.2
E_65_90 = BCD_out_65 + HRTF_BCD_90
# calculation of self-speech masking spectrum level V, Ansi S3.5 1997 4.3.2.1
V_65_BCD = E_65_90 - 24 # speech source on the BCD side (90 degrees)
# read head related transfer function for the BCD, noise in front
HRTF_BCD_0 = read_HRTF_BCD()[0]
# calculation of equivalent noise spectrum level N, Ansi S3.5 1997 4.3.2.2
# noise source on the best ear side (0 degrees)
N_BCD = BCD_out_65 + HRTF_BCD_0 - (delta)
# calculation of disturbance D, Ansi S3.5 1997 4.5
disturbance_bc = calculate_D(V_65_BCD, N_BCD, bct)
#calculation of SII for BC path
unmasked_bc = E_65_90.subtract(disturbance_bc, axis='columns') # ANSI step 7 (4.7) E - D
sii_bc_s90_n0 = speech_intelligibility_index(unmasked_bc)
# calculation of CVC-score for BC path
cvc_bc_s90_n0 = CVC_transfer_function(sii_bc_s90_n0)
Adaptation of BC path to AC path to compare both en determine combination path
# difference between air- vs. bone thresholds
kk = act.iloc[idx] - bct
# speech signal BC path
E_65_90 = E_65_90 + kk
# noise BC path
N_BCD = N_BCD + kk
Filtering higher values speech and noise to determine combination path
# make mask filters bc vs ac
speech_combi = combination_path (E_65_90, E_65_270, idx)
noise_combi = combination_path (N_BCD, N_best_ear, idx)
Calculation of SII for combination pad
# calculation of self-speech masking spectrum level V, Ansi S3.5 1997 4.3.2.1
V_combi = speech_combi - 24
# calculation of disturbance D, Ansi S3.5 1997 4.5
disturbance_combi = calculate_D(V_combi, noise_combi, act.iloc[idx])
#calculation of SII for BC path
unmasked_combi = speech_combi.subtract(disturbance_combi, axis='columns') # ANSI step 7 (4.7) E - D
sii_combi = speech_intelligibility_index(unmasked_combi)
# calculation of CVC-score for combination path
cvc_combi = CVC_transfer_function(sii_combi)
presentation of results
sii_ac = sii_ac_s270_n0.iloc[idx]
sii_bc = sii_bc_s90_n0
sii_diff = sii_combi - sii_ac
cvc_ac = cvc_ac_s270_n0.iloc[idx]
cvc_bc = cvc_bc_s90_n0
cvc_diff = cvc_combi - cvc_ac
# make Pandas DataFrame with SII results
complete = read_head().iloc[idx]
SII_Sbcd65_Nfront65 = pd.concat([complete, sii_ac, sii_bc, sii_combi, sii_diff],
axis='columns')
SII_Sbcd65_Nfront65.rename(columns={0:'AC_path', 1:'BC_path', 2:'AC&BC_path', 3: 'diff_combi_AC'},
inplace=True)
SII_Sbcd65_Nfront65.reset_index(drop=True, inplace=True)
SII_Sbcd65_Nfront65
Study_ID | Device | AC_path | BC_path | AC&BC_path | diff_combi_AC | |
---|---|---|---|---|---|---|
0 | 1 | BP110 | 0.3248 | 0.3323 | 0.3248 | 0.0000 |
1 | 2 | BP110 | 0.3193 | 0.3461 | 0.3517 | 0.0324 |
2 | 6 | BP110 | 0.3232 | 0.3377 | 0.3677 | 0.0445 |
3 | 10 | BP110 | 0.3232 | 0.3110 | 0.3334 | 0.0102 |
4 | 12 | BP110 | 0.3248 | 0.3079 | 0.3248 | 0.0000 |
... | ... | ... | ... | ... | ... | ... |
65 | 81 | BAHA5P | 0.3248 | 0.3773 | 0.3467 | 0.0219 |
66 | 82 | BAHA5P | 0.3248 | 0.4541 | 0.3248 | 0.0000 |
67 | 83 | BAHA5P | 0.3248 | 0.4506 | 0.3565 | 0.0317 |
68 | 84 | BAHA5P | 0.3193 | 0.4047 | 0.3608 | 0.0415 |
69 | 85 | BAHA5P | 0.3248 | 0.4130 | 0.3332 | 0.0084 |
70 rows × 6 columns
# make Pandas DataFrame with CVC results
#complete = read_head().iloc[idx]
CVC_Sbcd65_Nfront65 = pd.concat([complete, cvc_ac, cvc_bc, cvc_combi, cvc_diff],
axis='columns')
CVC_Sbcd65_Nfront65.rename(columns={0:'AC_path', 1:'BC_path', 2:'AC&BC_path', 3: 'diff_combi_AC'}, inplace=True)
CVC_Sbcd65_Nfront65.reset_index(drop=True, inplace=True)
CVC_Sbcd65_Nfront65
Study_ID | Device | AC_path | BC_path | AC&BC_path | diff_combi_AC | |
---|---|---|---|---|---|---|
0 | 1 | BP110 | 0.74 | 0.74 | 0.74 | 0.00 |
1 | 2 | BP110 | 0.73 | 0.76 | 0.76 | 0.03 |
2 | 6 | BP110 | 0.74 | 0.75 | 0.78 | 0.04 |
3 | 10 | BP110 | 0.74 | 0.72 | 0.75 | 0.01 |
4 | 12 | BP110 | 0.74 | 0.72 | 0.74 | 0.00 |
... | ... | ... | ... | ... | ... | ... |
65 | 81 | BAHA5P | 0.74 | 0.79 | 0.76 | 0.02 |
66 | 82 | BAHA5P | 0.74 | 0.84 | 0.74 | 0.00 |
67 | 83 | BAHA5P | 0.74 | 0.84 | 0.77 | 0.03 |
68 | 84 | BAHA5P | 0.73 | 0.81 | 0.77 | 0.04 |
69 | 85 | BAHA5P | 0.74 | 0.81 | 0.75 | 0.01 |
70 rows × 6 columns
# write SII results to xlsx file in Master directory
fout = '/media/guido/LACIE/Cingle_Guido/Master/SII_Sbcd65_Nfront65.xlsx'
pout = Path(fout)
with pd.ExcelWriter(pout) as writer:
SII_Sbcd65_Nfront65.to_excel(writer, sheet_name='SII_Sbcd65_Nfront65')
# write CVC results to xlsx file in Master directory
fout = '/media/guido/LACIE/Cingle_Guido/Master/CVC_Sbcd65_Nfront65.xlsx'
pout = Path(fout)
with pd.ExcelWriter(pout) as writer:
CVC_Sbcd65_Nfront65.to_excel(writer, sheet_name='CVC_Sbcd65_Nfront65')
# make a figure to plot SII for the 3 paths
ttl = 'SII for the combination path, air conduction path, bone conduction path, S at BCD side 65 dB, N in front 65 dB'
tp = pd.melt(pd.DataFrame( {'AC & BC' : sii_combi, 'AC' : sii_ac, 'BC' : sii_bc }),
var_name = 'Transmission path', value_name = 'Speech Intelligibility Index')
fig, ax = plt.subplots(constrained_layout=True)
fig.set_figheight(6)
fig.set_figwidth(14)
ax = sns.swarmplot(data = tp, x = 'Transmission path', y = 'Speech Intelligibility Index',
hue = 'Transmission path', size=3, palette={'silver', 'grey', 'black'})
ax.set_title(ttl)
plt.legend(bbox_to_anchor=(0.15, 0.25), fontsize='large')
plt.show()
# make a figure to plot CVC for the 3 paths
ttl = 'CVC for the combination path, air conduction path, bone conduction path, S at BCD side 65 dB, N in front 65 dB'
tp = pd.melt(pd.DataFrame( {'AC & BC' : cvc_combi, 'AC' : cvc_ac, 'BC' : cvc_bc }),
var_name = 'Transmission path', value_name = 'CVC - score')
fig, ax = plt.subplots(constrained_layout=True)
fig.set_figheight(6)
fig.set_figwidth(14)
ax = sns.swarmplot(data = tp, x = 'Transmission path', y = 'CVC - score',
hue = 'Transmission path', size=3, palette={'silver', 'grey', 'black'})
ax.set_title(ttl)
plt.legend(bbox_to_anchor=(0.15, 0.25), fontsize='large')
plt.show()