get rid of 'default' min/max latencies, simplify things by just having absolute boundaries

This commit is contained in:
Lennart Poettering 2009-03-25 00:25:17 +01:00
parent a7246bd635
commit 9151107cb9
3 changed files with 9 additions and 23 deletions

View file

@ -49,7 +49,6 @@
#define MAX_MIX_CHANNELS 32
#define MIX_BUFFER_LENGTH (PA_PAGE_SIZE)
#define ABSOLUTE_MIN_LATENCY (500)
#define DEFAULT_MIN_LATENCY (4*PA_USEC_PER_MSEC)
#define ABSOLUTE_MAX_LATENCY (10*PA_USEC_PER_SEC)
static PA_DEFINE_CHECK_TYPE(pa_sink, pa_msgobject);
@ -232,8 +231,8 @@ pa_sink* pa_sink_new(
s->thread_info.max_request = 0;
s->thread_info.requested_latency_valid = FALSE;
s->thread_info.requested_latency = 0;
s->thread_info.min_latency = DEFAULT_MIN_LATENCY;
s->thread_info.max_latency = DEFAULT_MIN_LATENCY;
s->thread_info.min_latency = ABSOLUTE_MIN_LATENCY;
s->thread_info.max_latency = ABSOLUTE_MAX_LATENCY;
pa_assert_se(pa_idxset_put(core->sinks, s, &s->index) >= 0);
@ -1857,21 +1856,15 @@ void pa_sink_set_latency_range(pa_sink *s, pa_usec_t min_latency, pa_usec_t max_
pa_sink_assert_ref(s);
/* min_latency == 0: no limit
* min_latency == (size_t) -1: default limit
* min_latency anything else: specified limit
*
* Similar for max_latency */
if (min_latency == (pa_usec_t) -1)
min_latency = DEFAULT_MIN_LATENCY;
if (min_latency < ABSOLUTE_MIN_LATENCY)
min_latency = ABSOLUTE_MIN_LATENCY;
if (max_latency == (pa_usec_t) -1)
max_latency = min_latency;
if (max_latency > ABSOLUTE_MAX_LATENCY || max_latency <= 0)
if (max_latency <= 0 ||
max_latency > ABSOLUTE_MAX_LATENCY)
max_latency = ABSOLUTE_MAX_LATENCY;
pa_assert(min_latency <= max_latency);