echo-cancel: Use anonymous unions for echo canceller params

Makes this part of the code just a little less verbose.
This commit is contained in:
Arun Raghavan 2016-02-17 19:47:06 +05:30
parent aa02e1654b
commit 8de7dfec14
5 changed files with 71 additions and 70 deletions

View file

@ -78,16 +78,16 @@ bool pa_adrian_ec_init(pa_core *c, pa_echo_canceller *ec,
rate = out_ss->rate; rate = out_ss->rate;
*nframes = (rate * frame_size_ms) / 1000; *nframes = (rate * frame_size_ms) / 1000;
ec->params.priv.adrian.blocksize = (*nframes) * pa_frame_size(out_ss); ec->params.adrian.blocksize = (*nframes) * pa_frame_size(out_ss);
pa_log_debug ("Using nframes %d, blocksize %u, channels %d, rate %d", *nframes, ec->params.priv.adrian.blocksize, out_ss->channels, out_ss->rate); pa_log_debug ("Using nframes %d, blocksize %u, channels %d, rate %d", *nframes, ec->params.adrian.blocksize, out_ss->channels, out_ss->rate);
/* For now we only support SSE */ /* For now we only support SSE */
if (c->cpu_info.cpu_type == PA_CPU_X86 && (c->cpu_info.flags.x86 & PA_CPU_X86_SSE)) if (c->cpu_info.cpu_type == PA_CPU_X86 && (c->cpu_info.flags.x86 & PA_CPU_X86_SSE))
have_vector = 1; have_vector = 1;
ec->params.priv.adrian.aec = AEC_init(rate, have_vector); ec->params.adrian.aec = AEC_init(rate, have_vector);
if (!ec->params.priv.adrian.aec) if (!ec->params.adrian.aec)
goto fail; goto fail;
pa_modargs_free(ma); pa_modargs_free(ma);
@ -102,17 +102,17 @@ fail:
void pa_adrian_ec_run(pa_echo_canceller *ec, const uint8_t *rec, const uint8_t *play, uint8_t *out) { void pa_adrian_ec_run(pa_echo_canceller *ec, const uint8_t *rec, const uint8_t *play, uint8_t *out) {
unsigned int i; unsigned int i;
for (i = 0; i < ec->params.priv.adrian.blocksize; i += 2) { for (i = 0; i < ec->params.adrian.blocksize; i += 2) {
/* We know it's S16NE mono data */ /* We know it's S16NE mono data */
int r = *(int16_t *)(rec + i); int r = *(int16_t *)(rec + i);
int p = *(int16_t *)(play + i); int p = *(int16_t *)(play + i);
*(int16_t *)(out + i) = (int16_t) AEC_doAEC(ec->params.priv.adrian.aec, r, p); *(int16_t *)(out + i) = (int16_t) AEC_doAEC(ec->params.adrian.aec, r, p);
} }
} }
void pa_adrian_ec_done(pa_echo_canceller *ec) { void pa_adrian_ec_done(pa_echo_canceller *ec) {
if (ec->params.priv.adrian.aec) { if (ec->params.adrian.aec) {
AEC_done(ec->params.priv.adrian.aec); AEC_done(ec->params.adrian.aec);
ec->params.priv.adrian.aec = NULL; ec->params.adrian.aec = NULL;
} }
} }

View file

@ -69,10 +69,11 @@ struct pa_echo_canceller_params {
void *trace_callback; void *trace_callback;
bool agc; bool agc;
bool first; bool first;
unsigned int agc_start_volume;
} webrtc; } webrtc;
#endif #endif
/* each canceller-specific structure goes here */ /* each canceller-specific structure goes here */
} priv; };
/* Set this if canceller can do drift compensation. Also see set_drift() /* Set this if canceller can do drift compensation. Also see set_drift()
* below */ * below */

View file

@ -34,7 +34,7 @@ bool pa_null_ec_init(pa_core *c, pa_echo_canceller *ec,
char strss_sink[PA_SAMPLE_SPEC_SNPRINT_MAX]; char strss_sink[PA_SAMPLE_SPEC_SNPRINT_MAX];
*nframes = 256; *nframes = 256;
ec->params.priv.null.out_ss = *out_ss; ec->params.null.out_ss = *out_ss;
*rec_ss = *out_ss; *rec_ss = *out_ss;
*rec_map = *out_map; *rec_map = *out_map;
@ -49,7 +49,7 @@ bool pa_null_ec_init(pa_core *c, pa_echo_canceller *ec,
void pa_null_ec_run(pa_echo_canceller *ec, const uint8_t *rec, const uint8_t *play, uint8_t *out) { void pa_null_ec_run(pa_echo_canceller *ec, const uint8_t *rec, const uint8_t *play, uint8_t *out) {
/* The null implementation simply copies the recorded buffer to the output /* The null implementation simply copies the recorded buffer to the output
buffer and ignores the play buffer. */ buffer and ignores the play buffer. */
memcpy(out, rec, 256 * pa_frame_size(&ec->params.priv.null.out_ss)); memcpy(out, rec, 256 * pa_frame_size(&ec->params.null.out_ss));
} }
void pa_null_ec_done(pa_echo_canceller *ec) { void pa_null_ec_done(pa_echo_canceller *ec) {

View file

@ -111,26 +111,26 @@ static bool pa_speex_ec_preprocessor_init(pa_echo_canceller *ec, pa_sample_spec
goto fail; goto fail;
} }
ec->params.priv.speex.pp_state = speex_preprocess_state_init(nframes, out_ss->rate); ec->params.speex.pp_state = speex_preprocess_state_init(nframes, out_ss->rate);
tmp = agc; tmp = agc;
speex_preprocess_ctl(ec->params.priv.speex.pp_state, SPEEX_PREPROCESS_SET_AGC, &tmp); speex_preprocess_ctl(ec->params.speex.pp_state, SPEEX_PREPROCESS_SET_AGC, &tmp);
tmp = denoise; tmp = denoise;
speex_preprocess_ctl(ec->params.priv.speex.pp_state, SPEEX_PREPROCESS_SET_DENOISE, &tmp); speex_preprocess_ctl(ec->params.speex.pp_state, SPEEX_PREPROCESS_SET_DENOISE, &tmp);
if (echo_suppress) { if (echo_suppress) {
if (echo_suppress_attenuation) if (echo_suppress_attenuation)
speex_preprocess_ctl(ec->params.priv.speex.pp_state, SPEEX_PREPROCESS_SET_ECHO_SUPPRESS, speex_preprocess_ctl(ec->params.speex.pp_state, SPEEX_PREPROCESS_SET_ECHO_SUPPRESS,
&echo_suppress_attenuation); &echo_suppress_attenuation);
if (echo_suppress_attenuation_active) { if (echo_suppress_attenuation_active) {
speex_preprocess_ctl(ec->params.priv.speex.pp_state, SPEEX_PREPROCESS_SET_ECHO_SUPPRESS_ACTIVE, speex_preprocess_ctl(ec->params.speex.pp_state, SPEEX_PREPROCESS_SET_ECHO_SUPPRESS_ACTIVE,
&echo_suppress_attenuation_active); &echo_suppress_attenuation_active);
} }
speex_preprocess_ctl(ec->params.priv.speex.pp_state, SPEEX_PREPROCESS_SET_ECHO_STATE, speex_preprocess_ctl(ec->params.speex.pp_state, SPEEX_PREPROCESS_SET_ECHO_STATE,
ec->params.priv.speex.state); ec->params.speex.state);
} }
pa_log_info("Loaded speex preprocessor with params: agc=%s, denoise=%s, echo_suppress=%s", pa_yes_no(agc), pa_log_info("Loaded speex preprocessor with params: agc=%s, denoise=%s, echo_suppress=%s", pa_yes_no(agc),
@ -176,12 +176,12 @@ bool pa_speex_ec_init(pa_core *c, pa_echo_canceller *ec,
*nframes = pa_echo_canceller_blocksize_power2(rate, frame_size_ms); *nframes = pa_echo_canceller_blocksize_power2(rate, frame_size_ms);
pa_log_debug ("Using nframes %d, channels %d, rate %d", *nframes, out_ss->channels, out_ss->rate); pa_log_debug ("Using nframes %d, channels %d, rate %d", *nframes, out_ss->channels, out_ss->rate);
ec->params.priv.speex.state = speex_echo_state_init_mc(*nframes, (rate * filter_size_ms) / 1000, out_ss->channels, out_ss->channels); ec->params.speex.state = speex_echo_state_init_mc(*nframes, (rate * filter_size_ms) / 1000, out_ss->channels, out_ss->channels);
if (!ec->params.priv.speex.state) if (!ec->params.speex.state)
goto fail; goto fail;
speex_echo_ctl(ec->params.priv.speex.state, SPEEX_ECHO_SET_SAMPLING_RATE, &rate); speex_echo_ctl(ec->params.speex.state, SPEEX_ECHO_SET_SAMPLING_RATE, &rate);
if (!pa_speex_ec_preprocessor_init(ec, out_ss, *nframes, ma)) if (!pa_speex_ec_preprocessor_init(ec, out_ss, *nframes, ma))
goto fail; goto fail;
@ -192,34 +192,34 @@ bool pa_speex_ec_init(pa_core *c, pa_echo_canceller *ec,
fail: fail:
if (ma) if (ma)
pa_modargs_free(ma); pa_modargs_free(ma);
if (ec->params.priv.speex.pp_state) { if (ec->params.speex.pp_state) {
speex_preprocess_state_destroy(ec->params.priv.speex.pp_state); speex_preprocess_state_destroy(ec->params.speex.pp_state);
ec->params.priv.speex.pp_state = NULL; ec->params.speex.pp_state = NULL;
} }
if (ec->params.priv.speex.state) { if (ec->params.speex.state) {
speex_echo_state_destroy(ec->params.priv.speex.state); speex_echo_state_destroy(ec->params.speex.state);
ec->params.priv.speex.state = NULL; ec->params.speex.state = NULL;
} }
return false; return false;
} }
void pa_speex_ec_run(pa_echo_canceller *ec, const uint8_t *rec, const uint8_t *play, uint8_t *out) { void pa_speex_ec_run(pa_echo_canceller *ec, const uint8_t *rec, const uint8_t *play, uint8_t *out) {
speex_echo_cancellation(ec->params.priv.speex.state, (const spx_int16_t *) rec, (const spx_int16_t *) play, speex_echo_cancellation(ec->params.speex.state, (const spx_int16_t *) rec, (const spx_int16_t *) play,
(spx_int16_t *) out); (spx_int16_t *) out);
/* preprecessor is run after AEC. This is not a mistake! */ /* preprecessor is run after AEC. This is not a mistake! */
if (ec->params.priv.speex.pp_state) if (ec->params.speex.pp_state)
speex_preprocess_run(ec->params.priv.speex.pp_state, (spx_int16_t *) out); speex_preprocess_run(ec->params.speex.pp_state, (spx_int16_t *) out);
} }
void pa_speex_ec_done(pa_echo_canceller *ec) { void pa_speex_ec_done(pa_echo_canceller *ec) {
if (ec->params.priv.speex.pp_state) { if (ec->params.speex.pp_state) {
speex_preprocess_state_destroy(ec->params.priv.speex.pp_state); speex_preprocess_state_destroy(ec->params.speex.pp_state);
ec->params.priv.speex.pp_state = NULL; ec->params.speex.pp_state = NULL;
} }
if (ec->params.priv.speex.state) { if (ec->params.speex.state) {
speex_echo_state_destroy(ec->params.priv.speex.state); speex_echo_state_destroy(ec->params.speex.state);
ec->params.priv.speex.state = NULL; ec->params.speex.state = NULL;
} }
} }

View file

@ -259,8 +259,8 @@ bool pa_webrtc_ec_init(pa_core *c, pa_echo_canceller *ec,
if (trace) { if (trace) {
webrtc::Trace::CreateTrace(); webrtc::Trace::CreateTrace();
webrtc::Trace::set_level_filter(webrtc::kTraceAll); webrtc::Trace::set_level_filter(webrtc::kTraceAll);
ec->params.priv.webrtc.trace_callback = new PaWebrtcTraceCallback(); ec->params.webrtc.trace_callback = new PaWebrtcTraceCallback();
webrtc::Trace::SetTraceCallback((PaWebrtcTraceCallback *) ec->params.priv.webrtc.trace_callback); webrtc::Trace::SetTraceCallback((PaWebrtcTraceCallback *) ec->params.webrtc.trace_callback);
} }
pa_webrtc_ec_fixate_spec(rec_ss, rec_map, play_ss, play_map, out_ss, out_map); pa_webrtc_ec_fixate_spec(rec_ss, rec_map, play_ss, play_map, out_ss, out_map);
@ -296,17 +296,17 @@ bool pa_webrtc_ec_init(pa_core *c, pa_echo_canceller *ec,
if (mobile && rm <= webrtc::EchoControlMobile::kEarpiece) { if (mobile && rm <= webrtc::EchoControlMobile::kEarpiece) {
/* Maybe this should be a knob, but we've got a lot of knobs already */ /* Maybe this should be a knob, but we've got a lot of knobs already */
apm->gain_control()->set_mode(webrtc::GainControl::kFixedDigital); apm->gain_control()->set_mode(webrtc::GainControl::kFixedDigital);
ec->params.priv.webrtc.agc = false; ec->params.webrtc.agc = false;
} else if (dgc) { } else if (dgc) {
apm->gain_control()->set_mode(webrtc::GainControl::kAdaptiveDigital); apm->gain_control()->set_mode(webrtc::GainControl::kAdaptiveDigital);
ec->params.priv.webrtc.agc = false; ec->params.webrtc.agc = false;
} else { } else {
apm->gain_control()->set_mode(webrtc::GainControl::kAdaptiveAnalog); apm->gain_control()->set_mode(webrtc::GainControl::kAdaptiveAnalog);
if (apm->gain_control()->set_analog_level_limits(0, WEBRTC_AGC_MAX_VOLUME) != apm->kNoError) { if (apm->gain_control()->set_analog_level_limits(0, WEBRTC_AGC_MAX_VOLUME) != apm->kNoError) {
pa_log("Failed to initialise AGC"); pa_log("Failed to initialise AGC");
goto fail; goto fail;
} }
ec->params.priv.webrtc.agc = true; ec->params.webrtc.agc = true;
} }
apm->gain_control()->Enable(true); apm->gain_control()->Enable(true);
@ -315,11 +315,11 @@ bool pa_webrtc_ec_init(pa_core *c, pa_echo_canceller *ec,
if (vad) if (vad)
apm->voice_detection()->Enable(true); apm->voice_detection()->Enable(true);
ec->params.priv.webrtc.apm = apm; ec->params.webrtc.apm = apm;
ec->params.priv.webrtc.sample_spec = *out_ss; ec->params.webrtc.sample_spec = *out_ss;
ec->params.priv.webrtc.blocksize = (uint64_t)pa_bytes_per_second(out_ss) * BLOCK_SIZE_US / PA_USEC_PER_SEC; ec->params.webrtc.blocksize = (uint64_t)pa_bytes_per_second(out_ss) * BLOCK_SIZE_US / PA_USEC_PER_SEC;
*nframes = ec->params.priv.webrtc.blocksize / pa_frame_size(out_ss); *nframes = ec->params.webrtc.blocksize / pa_frame_size(out_ss);
ec->params.priv.webrtc.first = true; ec->params.webrtc.first = true;
pa_modargs_free(ma); pa_modargs_free(ma);
return true; return true;
@ -327,9 +327,9 @@ bool pa_webrtc_ec_init(pa_core *c, pa_echo_canceller *ec,
fail: fail:
if (ma) if (ma)
pa_modargs_free(ma); pa_modargs_free(ma);
if (ec->params.priv.webrtc.trace_callback) { if (ec->params.webrtc.trace_callback) {
webrtc::Trace::ReturnTrace(); webrtc::Trace::ReturnTrace();
delete ((PaWebrtcTraceCallback *) ec->params.priv.webrtc.trace_callback); delete ((PaWebrtcTraceCallback *) ec->params.webrtc.trace_callback);
} if (apm) } if (apm)
delete apm; delete apm;
@ -337,17 +337,17 @@ fail:
} }
void pa_webrtc_ec_play(pa_echo_canceller *ec, const uint8_t *play) { void pa_webrtc_ec_play(pa_echo_canceller *ec, const uint8_t *play) {
webrtc::AudioProcessing *apm = (webrtc::AudioProcessing*)ec->params.priv.webrtc.apm; webrtc::AudioProcessing *apm = (webrtc::AudioProcessing*)ec->params.webrtc.apm;
webrtc::AudioFrame play_frame; webrtc::AudioFrame play_frame;
const pa_sample_spec *ss = &ec->params.priv.webrtc.sample_spec; const pa_sample_spec *ss = &ec->params.webrtc.sample_spec;
play_frame.num_channels_ = ss->channels; play_frame.num_channels_ = ss->channels;
play_frame.sample_rate_hz_ = ss->rate; play_frame.sample_rate_hz_ = ss->rate;
play_frame.interleaved_ = true; play_frame.interleaved_ = true;
play_frame.samples_per_channel_ = ec->params.priv.webrtc.blocksize / pa_frame_size(ss); play_frame.samples_per_channel_ = ec->params.webrtc.blocksize / pa_frame_size(ss);
pa_assert(play_frame.samples_per_channel_ <= webrtc::AudioFrame::kMaxDataSizeSamples); pa_assert(play_frame.samples_per_channel_ <= webrtc::AudioFrame::kMaxDataSizeSamples);
memcpy(play_frame.data_, play, ec->params.priv.webrtc.blocksize); memcpy(play_frame.data_, play, ec->params.webrtc.blocksize);
apm->ProcessReverseStream(&play_frame); apm->ProcessReverseStream(&play_frame);
@ -359,21 +359,21 @@ void pa_webrtc_ec_play(pa_echo_canceller *ec, const uint8_t *play) {
} }
void pa_webrtc_ec_record(pa_echo_canceller *ec, const uint8_t *rec, uint8_t *out) { void pa_webrtc_ec_record(pa_echo_canceller *ec, const uint8_t *rec, uint8_t *out) {
webrtc::AudioProcessing *apm = (webrtc::AudioProcessing*)ec->params.priv.webrtc.apm; webrtc::AudioProcessing *apm = (webrtc::AudioProcessing*)ec->params.webrtc.apm;
webrtc::AudioFrame out_frame; webrtc::AudioFrame out_frame;
const pa_sample_spec *ss = &ec->params.priv.webrtc.sample_spec; const pa_sample_spec *ss = &ec->params.webrtc.sample_spec;
pa_cvolume v; pa_cvolume v;
int old_volume, new_volume; int old_volume, new_volume;
out_frame.num_channels_ = ss->channels; out_frame.num_channels_ = ss->channels;
out_frame.sample_rate_hz_ = ss->rate; out_frame.sample_rate_hz_ = ss->rate;
out_frame.interleaved_ = true; out_frame.interleaved_ = true;
out_frame.samples_per_channel_ = ec->params.priv.webrtc.blocksize / pa_frame_size(ss); out_frame.samples_per_channel_ = ec->params.webrtc.blocksize / pa_frame_size(ss);
pa_assert(out_frame.samples_per_channel_ <= webrtc::AudioFrame::kMaxDataSizeSamples); pa_assert(out_frame.samples_per_channel_ <= webrtc::AudioFrame::kMaxDataSizeSamples);
memcpy(out_frame.data_, rec, ec->params.priv.webrtc.blocksize); memcpy(out_frame.data_, rec, ec->params.webrtc.blocksize);
if (ec->params.priv.webrtc.agc) { if (ec->params.webrtc.agc) {
pa_cvolume_init(&v); pa_cvolume_init(&v);
pa_echo_canceller_get_capture_volume(ec, &v); pa_echo_canceller_get_capture_volume(ec, &v);
old_volume = webrtc_volume_from_pa(pa_cvolume_avg(&v)); old_volume = webrtc_volume_from_pa(pa_cvolume_avg(&v));
@ -383,13 +383,13 @@ void pa_webrtc_ec_record(pa_echo_canceller *ec, const uint8_t *rec, uint8_t *out
apm->set_stream_delay_ms(0); apm->set_stream_delay_ms(0);
apm->ProcessStream(&out_frame); apm->ProcessStream(&out_frame);
if (ec->params.priv.webrtc.agc) { if (ec->params.webrtc.agc) {
if (PA_UNLIKELY(ec->params.priv.webrtc.first)) { if (PA_UNLIKELY(ec->params.webrtc.first)) {
/* We start at a sane default volume (taken from the Chromium /* We start at a sane default volume (taken from the Chromium
* condition on the experimental AGC in audio_processing.h). This is * condition on the experimental AGC in audio_processing.h). This is
* needed to make sure that there's enough energy in the capture * needed to make sure that there's enough energy in the capture
* signal for the AGC to work */ * signal for the AGC to work */
ec->params.priv.webrtc.first = false; ec->params.webrtc.first = false;
new_volume = WEBRTC_AGC_START_VOLUME; new_volume = WEBRTC_AGC_START_VOLUME;
} else { } else {
new_volume = apm->gain_control()->stream_analog_level(); new_volume = apm->gain_control()->stream_analog_level();
@ -401,14 +401,14 @@ void pa_webrtc_ec_record(pa_echo_canceller *ec, const uint8_t *rec, uint8_t *out
} }
} }
memcpy(out, out_frame.data_, ec->params.priv.webrtc.blocksize); memcpy(out, out_frame.data_, ec->params.webrtc.blocksize);
} }
void pa_webrtc_ec_set_drift(pa_echo_canceller *ec, float drift) { void pa_webrtc_ec_set_drift(pa_echo_canceller *ec, float drift) {
webrtc::AudioProcessing *apm = (webrtc::AudioProcessing*)ec->params.priv.webrtc.apm; webrtc::AudioProcessing *apm = (webrtc::AudioProcessing*)ec->params.webrtc.apm;
const pa_sample_spec *ss = &ec->params.priv.webrtc.sample_spec; const pa_sample_spec *ss = &ec->params.webrtc.sample_spec;
apm->echo_cancellation()->set_stream_drift_samples(drift * ec->params.priv.webrtc.blocksize / pa_frame_size(ss)); apm->echo_cancellation()->set_stream_drift_samples(drift * ec->params.webrtc.blocksize / pa_frame_size(ss));
} }
void pa_webrtc_ec_run(pa_echo_canceller *ec, const uint8_t *rec, const uint8_t *play, uint8_t *out) { void pa_webrtc_ec_run(pa_echo_canceller *ec, const uint8_t *rec, const uint8_t *play, uint8_t *out) {
@ -417,13 +417,13 @@ void pa_webrtc_ec_run(pa_echo_canceller *ec, const uint8_t *rec, const uint8_t *
} }
void pa_webrtc_ec_done(pa_echo_canceller *ec) { void pa_webrtc_ec_done(pa_echo_canceller *ec) {
if (ec->params.priv.webrtc.trace_callback) { if (ec->params.webrtc.trace_callback) {
webrtc::Trace::ReturnTrace(); webrtc::Trace::ReturnTrace();
delete ((PaWebrtcTraceCallback *) ec->params.priv.webrtc.trace_callback); delete ((PaWebrtcTraceCallback *) ec->params.webrtc.trace_callback);
} }
if (ec->params.priv.webrtc.apm) { if (ec->params.webrtc.apm) {
delete (webrtc::AudioProcessing*)ec->params.priv.webrtc.apm; delete (webrtc::AudioProcessing*)ec->params.webrtc.apm;
ec->params.priv.webrtc.apm = NULL; ec->params.webrtc.apm = NULL;
} }
} }