]> code.delx.au - pulseaudio/blob - src/pulsecore/time-smoother.c
make sure the smoother code can deal with incoming data that is out-of-order; start...
[pulseaudio] / src / pulsecore / time-smoother.c
1 /* $Id$ */
2
3 /***
4 This file is part of PulseAudio.
5
6 Copyright 2007 Lennart Poettering
7
8 PulseAudio is free software; you can redistribute it and/or modify
9 it under the terms of the GNU Lesser General Public License as
10 published by the Free Software Foundation; either version 2.1 of the
11 License, or (at your option) any later version.
12
13 PulseAudio is distributed in the hope that it will be useful, but
14 WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 Lesser General Public License for more details.
17
18 You should have received a copy of the GNU Lesser General Public
19 License along with PulseAudio; if not, write to the Free Software
20 Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
21 USA.
22 ***/
23
24 #ifdef HAVE_CONFIG_H
25 #include <config.h>
26 #endif
27
28 #include <stdio.h>
29
30 #include <pulse/sample.h>
31 #include <pulse/xmalloc.h>
32
33 #include <pulsecore/macro.h>
34
35 #include "time-smoother.h"
36
37 #define HISTORY_MAX 64
38
39 /*
40 * Implementation of a time smoothing algorithm to synchronize remote
41 * clocks to a local one. Evens out noise, adjusts to clock skew and
42 * allows cheap estimations of the remote time while clock updates may
43 * be seldom and recieved in non-equidistant intervals.
44 *
45 * Basically, we estimate the gradient of received clock samples in a
46 * certain history window (of size 'history_time') with linear
47 * regression. With that info we estimate the remote time in
48 * 'adjust_time' ahead and smoothen our current estimation function
49 * towards that point with a 3rd order polynomial interpolation with
50 * fitting derivatives. (more or less a b-spline)
51 *
52 * The larger 'history_time' is chosen the better we will surpress
53 * noise -- but we'll adjust to clock skew slower..
54 *
55 * The larger 'adjust_time' is chosen the smoother our estimation
56 * function will be -- but we'll adjust to clock skew slower, too.
57 *
58 * If 'monotonic' is TRUE the resulting estimation function is
59 * guaranteed to be monotonic.
60 */
61
62 struct pa_smoother {
63 pa_usec_t adjust_time, history_time;
64
65 pa_usec_t time_offset;
66
67 pa_usec_t px, py; /* Point p, where we want to reach stability */
68 double dp; /* Gradient we want at point p */
69
70 pa_usec_t ex, ey; /* Point e, which we estimated before and need to smooth to */
71 double de; /* Gradient we estimated for point e */
72 pa_usec_t ry; /* The original y value for ex */
73
74 /* History of last measurements */
75 pa_usec_t history_x[HISTORY_MAX], history_y[HISTORY_MAX];
76 unsigned history_idx, n_history;
77
78 /* To even out for monotonicity */
79 pa_usec_t last_y, last_x;
80
81 /* Cached parameters for our interpolation polynomial y=ax^3+b^2+cx */
82 double a, b, c;
83 pa_bool_t abc_valid;
84
85 pa_bool_t monotonic:1;
86 pa_bool_t paused:1;
87
88 pa_usec_t pause_time;
89
90 unsigned min_history;
91 };
92
93 pa_smoother* pa_smoother_new(pa_usec_t adjust_time, pa_usec_t history_time, pa_bool_t monotonic, unsigned min_history) {
94 pa_smoother *s;
95
96 pa_assert(adjust_time > 0);
97 pa_assert(history_time > 0);
98 pa_assert(min_history >= 2);
99 pa_assert(min_history <= HISTORY_MAX);
100
101 s = pa_xnew(pa_smoother, 1);
102 s->adjust_time = adjust_time;
103 s->history_time = history_time;
104 s->time_offset = 0;
105 s->monotonic = monotonic;
106
107 s->px = s->py = 0;
108 s->dp = 1;
109
110 s->ex = s->ey = s->ry = 0;
111 s->de = 1;
112
113 s->history_idx = 0;
114 s->n_history = 0;
115
116 s->last_y = s->last_x = 0;
117
118 s->abc_valid = FALSE;
119
120 s->paused = FALSE;
121
122 s->min_history = min_history;
123
124 return s;
125 }
126
127 void pa_smoother_free(pa_smoother* s) {
128 pa_assert(s);
129
130 pa_xfree(s);
131 }
132
133 #define REDUCE(x) \
134 do { \
135 x = (x) % HISTORY_MAX; \
136 } while(FALSE)
137
138 #define REDUCE_INC(x) \
139 do { \
140 x = ((x)+1) % HISTORY_MAX; \
141 } while(FALSE)
142
143
144 static void drop_old(pa_smoother *s, pa_usec_t x) {
145
146 /* Drop items from history which are too old, but make sure to
147 * always keep min_history in the history */
148
149 while (s->n_history > s->min_history) {
150
151 if (s->history_x[s->history_idx] + s->history_time >= x)
152 /* This item is still valid, and thus all following ones
153 * are too, so let's quit this loop */
154 break;
155
156 /* Item is too old, let's drop it */
157 REDUCE_INC(s->history_idx);
158
159 s->n_history --;
160 }
161 }
162
163 static void add_to_history(pa_smoother *s, pa_usec_t x, pa_usec_t y) {
164 unsigned j, i;
165 pa_assert(s);
166
167 /* First try to update an existing history entry */
168 i = s->history_idx;
169 for (j = s->n_history; j > 0; j--) {
170
171 if (s->history_x[i] == x) {
172 s->history_y[i] = y;
173 return;
174 }
175
176 REDUCE_INC(i);
177 }
178
179 /* Drop old entries */
180 drop_old(s, x);
181
182 /* Calculate position for new entry */
183 j = s->history_idx + s->n_history;
184 REDUCE(j);
185
186 /* Fill in entry */
187 s->history_x[j] = x;
188 s->history_y[j] = y;
189
190 /* Adjust counter */
191 s->n_history ++;
192
193 /* And make sure we don't store more entries than fit in */
194 if (s->n_history > HISTORY_MAX) {
195 s->history_idx += s->n_history - HISTORY_MAX;
196 REDUCE(s->history_idx);
197 s->n_history = HISTORY_MAX;
198 }
199 }
200
201 static double avg_gradient(pa_smoother *s, pa_usec_t x) {
202 unsigned i, j, c = 0;
203 int64_t ax = 0, ay = 0, k, t;
204 double r;
205
206 /* Too few measurements, assume gradient of 1 */
207 if (s->n_history < s->min_history)
208 return 1;
209
210 /* First, calculate average of all measurements */
211 i = s->history_idx;
212 for (j = s->n_history; j > 0; j--) {
213
214 ax += s->history_x[i];
215 ay += s->history_y[i];
216 c++;
217
218 REDUCE_INC(i);
219 }
220
221 pa_assert(c >= s->min_history);
222 ax /= c;
223 ay /= c;
224
225 /* Now, do linear regression */
226 k = t = 0;
227
228 i = s->history_idx;
229 for (j = s->n_history; j > 0; j--) {
230 int64_t dx, dy;
231
232 dx = (int64_t) s->history_x[i] - ax;
233 dy = (int64_t) s->history_y[i] - ay;
234
235 k += dx*dy;
236 t += dx*dx;
237
238 REDUCE_INC(i);
239 }
240
241 r = (double) k / t;
242
243 return (s->monotonic && r < 0) ? 0 : r;
244 }
245
246 static void calc_abc(pa_smoother *s) {
247 pa_usec_t ex, ey, px, py;
248 int64_t kx, ky;
249 double de, dp;
250
251 pa_assert(s);
252
253 if (s->abc_valid)
254 return;
255
256 /* We have two points: (ex|ey) and (px|py) with two gradients at
257 * these points de and dp. We do a polynomial
258 * interpolation of degree 3 with these 6 values */
259
260 ex = s->ex; ey = s->ey;
261 px = s->px; py = s->py;
262 de = s->de; dp = s->dp;
263
264 pa_assert(ex < px);
265
266 /* To increase the dynamic range and symplify calculation, we
267 * move these values to the origin */
268 kx = (int64_t) px - (int64_t) ex;
269 ky = (int64_t) py - (int64_t) ey;
270
271 /* Calculate a, b, c for y=ax^3+bx^2+cx */
272 s->c = de;
273 s->b = (((double) (3*ky)/kx - dp - 2*de)) / kx;
274 s->a = (dp/kx - 2*s->b - de/kx) / (3*kx);
275
276 s->abc_valid = TRUE;
277 }
278
279 static void estimate(pa_smoother *s, pa_usec_t x, pa_usec_t *y, double *deriv) {
280 pa_assert(s);
281 pa_assert(y);
282
283 if (x >= s->px) {
284 int64_t t;
285
286 /* The requested point is right of the point where we wanted
287 * to be on track again, thus just linearly estimate */
288
289 t = (int64_t) s->py + (int64_t) (s->dp * (x - s->px));
290
291 if (t < 0)
292 t = 0;
293
294 *y = (pa_usec_t) t;
295
296 if (deriv)
297 *deriv = s->dp;
298
299 } else {
300
301 /* Ok, we're not yet on track, thus let's interpolate, and
302 * make sure that the first derivative is smooth */
303
304 calc_abc(s);
305
306 /* Move to origin */
307 x -= s->ex;
308
309 /* Horner scheme */
310 *y = (pa_usec_t) ((double) x * (s->c + (double) x * (s->b + (double) x * s->a)));
311
312 /* Move back from origin */
313 *y += s->ey;
314
315 /* Horner scheme */
316 if (deriv)
317 *deriv = s->c + ((double) x * (s->b*2 + (double) x * s->a*3));
318 }
319
320 /* Guarantee monotonicity */
321 if (s->monotonic) {
322
323 if (deriv && *deriv < 0)
324 *deriv = 0;
325 }
326 }
327
328 void pa_smoother_put(pa_smoother *s, pa_usec_t x, pa_usec_t y) {
329 pa_usec_t ney;
330 double nde;
331 pa_bool_t is_new;
332
333 pa_assert(s);
334
335 /* Fix up x value */
336 if (s->paused)
337 x = s->pause_time;
338
339 x = PA_LIKELY(x >= s->time_offset) ? x - s->time_offset : 0;
340
341 is_new = x >= s->ex;
342
343 if (is_new) {
344 /* First, we calculate the position we'd estimate for x, so that
345 * we can adjust our position smoothly from this one */
346 estimate(s, x, &ney, &nde);
347 s->ex = x; s->ey = ney; s->de = nde;
348
349 s->ry = y;
350 }
351
352 /* Then, we add the new measurement to our history */
353 add_to_history(s, x, y);
354
355 /* And determine the average gradient of the history */
356 s->dp = avg_gradient(s, x);
357
358 /* And calculate when we want to be on track again */
359 s->px = s->ex + s->adjust_time;
360 s->py = s->ry + s->dp *s->adjust_time;
361
362 s->abc_valid = FALSE;
363
364 /* pa_log_debug("put(%llu | %llu) = %llu", (unsigned long long) (x + s->time_offset), (unsigned long long) x, (unsigned long long) y); */
365 }
366
367 pa_usec_t pa_smoother_get(pa_smoother *s, pa_usec_t x) {
368 pa_usec_t y;
369
370 pa_assert(s);
371
372 /* Fix up x value */
373 if (s->paused)
374 x = s->pause_time;
375
376 x = PA_LIKELY(x >= s->time_offset) ? x - s->time_offset : 0;
377
378 estimate(s, x, &y, NULL);
379
380 if (s->monotonic) {
381
382 /* Make sure the querier doesn't jump forth and back. */
383 pa_assert(x >= s->last_x);
384 s->last_x = x;
385
386 if (y < s->last_y)
387 y = s->last_y;
388 else
389 s->last_y = y;
390 }
391
392 /* pa_log_debug("get(%llu | %llu) = %llu", (unsigned long long) (x + s->time_offset), (unsigned long long) x, (unsigned long long) y); */
393
394 return y;
395 }
396
397 void pa_smoother_set_time_offset(pa_smoother *s, pa_usec_t offset) {
398 pa_assert(s);
399
400 s->time_offset = offset;
401
402 /* pa_log_debug("offset(%llu)", (unsigned long long) offset); */
403 }
404
405 void pa_smoother_pause(pa_smoother *s, pa_usec_t x) {
406 pa_assert(s);
407
408 if (s->paused)
409 return;
410
411 /* pa_log_debug("pause(%llu)", (unsigned long long) x); */
412
413 s->paused = TRUE;
414 s->pause_time = x;
415 }
416
417 void pa_smoother_resume(pa_smoother *s, pa_usec_t x) {
418 pa_assert(s);
419
420 if (!s->paused)
421 return;
422
423 pa_assert(x >= s->pause_time);
424
425 /* pa_log_debug("resume(%llu)", (unsigned long long) x); */
426
427 s->paused = FALSE;
428 s->time_offset += x - s->pause_time;
429 }
430
431 pa_usec_t pa_smoother_translate(pa_smoother *s, pa_usec_t x, pa_usec_t y_delay) {
432 pa_usec_t ney;
433 double nde;
434
435 pa_assert(s);
436
437 /* Fix up x value */
438 if (s->paused)
439 x = s->pause_time;
440
441 x = PA_LIKELY(x >= s->time_offset) ? x - s->time_offset : 0;
442
443 estimate(s, x, &ney, &nde);
444
445 /* Play safe and take the larger gradient, so that we wakeup
446 * earlier when this is used for sleeping */
447 if (s->dp > nde)
448 nde = s->dp;
449
450 /* pa_log_debug("translate(%llu) = %llu (%0.2f)", (unsigned long long) y_delay, (unsigned long long) ((double) y_delay / nde), nde); */
451
452 return (pa_usec_t) ((double) y_delay / nde);
453 }