You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

400 lines
13KB

  1. /*
  2. * Pulseaudio input
  3. * Copyright (c) 2011 Luca Barbato <lu_zero@gentoo.org>
  4. * Copyright 2004-2006 Lennart Poettering
  5. * Copyright (c) 2014 Michael Niedermayer <michaelni@gmx.at>
  6. *
  7. * This file is part of FFmpeg.
  8. *
  9. * FFmpeg is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU Lesser General Public
  11. * License as published by the Free Software Foundation; either
  12. * version 2.1 of the License, or (at your option) any later version.
  13. *
  14. * FFmpeg is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  17. * Lesser General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU Lesser General Public
  20. * License along with FFmpeg; if not, write to the Free Software
  21. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  22. */
  23. #include <pulse/rtclock.h>
  24. #include <pulse/error.h>
  25. #include "libavutil/internal.h"
  26. #include "libavutil/opt.h"
  27. #include "libavutil/time.h"
  28. #include "libavformat/avformat.h"
  29. #include "libavformat/internal.h"
  30. #include "pulse_audio_common.h"
  31. #include "timefilter.h"
  32. #define DEFAULT_CODEC_ID AV_NE(AV_CODEC_ID_PCM_S16BE, AV_CODEC_ID_PCM_S16LE)
  33. typedef struct PulseData {
  34. AVClass *class;
  35. char *server;
  36. char *name;
  37. char *stream_name;
  38. int sample_rate;
  39. int channels;
  40. int frame_size;
  41. int fragment_size;
  42. pa_threaded_mainloop *mainloop;
  43. pa_context *context;
  44. pa_stream *stream;
  45. size_t pa_frame_size;
  46. TimeFilter *timefilter;
  47. int last_period;
  48. int wallclock;
  49. } PulseData;
  50. #define CHECK_SUCCESS_GOTO(rerror, expression, label) \
  51. do { \
  52. if (!(expression)) { \
  53. rerror = AVERROR_EXTERNAL; \
  54. goto label; \
  55. } \
  56. } while (0)
  57. #define CHECK_DEAD_GOTO(p, rerror, label) \
  58. do { \
  59. if (!(p)->context || !PA_CONTEXT_IS_GOOD(pa_context_get_state((p)->context)) || \
  60. !(p)->stream || !PA_STREAM_IS_GOOD(pa_stream_get_state((p)->stream))) { \
  61. rerror = AVERROR_EXTERNAL; \
  62. goto label; \
  63. } \
  64. } while (0)
  65. static void context_state_cb(pa_context *c, void *userdata) {
  66. PulseData *p = userdata;
  67. switch (pa_context_get_state(c)) {
  68. case PA_CONTEXT_READY:
  69. case PA_CONTEXT_TERMINATED:
  70. case PA_CONTEXT_FAILED:
  71. pa_threaded_mainloop_signal(p->mainloop, 0);
  72. break;
  73. }
  74. }
  75. static void stream_state_cb(pa_stream *s, void * userdata) {
  76. PulseData *p = userdata;
  77. switch (pa_stream_get_state(s)) {
  78. case PA_STREAM_READY:
  79. case PA_STREAM_FAILED:
  80. case PA_STREAM_TERMINATED:
  81. pa_threaded_mainloop_signal(p->mainloop, 0);
  82. break;
  83. }
  84. }
  85. static void stream_request_cb(pa_stream *s, size_t length, void *userdata) {
  86. PulseData *p = userdata;
  87. pa_threaded_mainloop_signal(p->mainloop, 0);
  88. }
  89. static void stream_latency_update_cb(pa_stream *s, void *userdata) {
  90. PulseData *p = userdata;
  91. pa_threaded_mainloop_signal(p->mainloop, 0);
  92. }
  93. static av_cold int pulse_close(AVFormatContext *s)
  94. {
  95. PulseData *pd = s->priv_data;
  96. if (pd->mainloop)
  97. pa_threaded_mainloop_stop(pd->mainloop);
  98. if (pd->stream)
  99. pa_stream_unref(pd->stream);
  100. pd->stream = NULL;
  101. if (pd->context) {
  102. pa_context_disconnect(pd->context);
  103. pa_context_unref(pd->context);
  104. }
  105. pd->context = NULL;
  106. if (pd->mainloop)
  107. pa_threaded_mainloop_free(pd->mainloop);
  108. pd->mainloop = NULL;
  109. ff_timefilter_destroy(pd->timefilter);
  110. pd->timefilter = NULL;
  111. return 0;
  112. }
  113. static av_cold int pulse_read_header(AVFormatContext *s)
  114. {
  115. PulseData *pd = s->priv_data;
  116. AVStream *st;
  117. char *device = NULL;
  118. int ret;
  119. enum AVCodecID codec_id =
  120. s->audio_codec_id == AV_CODEC_ID_NONE ? DEFAULT_CODEC_ID : s->audio_codec_id;
  121. const pa_sample_spec ss = { ff_codec_id_to_pulse_format(codec_id),
  122. pd->sample_rate,
  123. pd->channels };
  124. pa_buffer_attr attr = { -1 };
  125. pa_channel_map cmap;
  126. const pa_buffer_attr *queried_attr;
  127. pa_channel_map_init_extend(&cmap, pd->channels, PA_CHANNEL_MAP_WAVEEX);
  128. st = avformat_new_stream(s, NULL);
  129. if (!st) {
  130. av_log(s, AV_LOG_ERROR, "Cannot add stream\n");
  131. return AVERROR(ENOMEM);
  132. }
  133. attr.fragsize = pd->fragment_size;
  134. if (s->url[0] != '\0' && strcmp(s->url, "default"))
  135. device = s->url;
  136. if (!(pd->mainloop = pa_threaded_mainloop_new())) {
  137. pulse_close(s);
  138. return AVERROR_EXTERNAL;
  139. }
  140. if (!(pd->context = pa_context_new(pa_threaded_mainloop_get_api(pd->mainloop), pd->name))) {
  141. pulse_close(s);
  142. return AVERROR_EXTERNAL;
  143. }
  144. pa_context_set_state_callback(pd->context, context_state_cb, pd);
  145. if (pa_context_connect(pd->context, pd->server, 0, NULL) < 0) {
  146. pulse_close(s);
  147. return AVERROR(pa_context_errno(pd->context));
  148. }
  149. pa_threaded_mainloop_lock(pd->mainloop);
  150. if (pa_threaded_mainloop_start(pd->mainloop) < 0) {
  151. ret = -1;
  152. goto unlock_and_fail;
  153. }
  154. for (;;) {
  155. pa_context_state_t state;
  156. state = pa_context_get_state(pd->context);
  157. if (state == PA_CONTEXT_READY)
  158. break;
  159. if (!PA_CONTEXT_IS_GOOD(state)) {
  160. ret = AVERROR(pa_context_errno(pd->context));
  161. goto unlock_and_fail;
  162. }
  163. /* Wait until the context is ready */
  164. pa_threaded_mainloop_wait(pd->mainloop);
  165. }
  166. if (!(pd->stream = pa_stream_new(pd->context, pd->stream_name, &ss, &cmap))) {
  167. ret = AVERROR(pa_context_errno(pd->context));
  168. goto unlock_and_fail;
  169. }
  170. pa_stream_set_state_callback(pd->stream, stream_state_cb, pd);
  171. pa_stream_set_read_callback(pd->stream, stream_request_cb, pd);
  172. pa_stream_set_write_callback(pd->stream, stream_request_cb, pd);
  173. pa_stream_set_latency_update_callback(pd->stream, stream_latency_update_cb, pd);
  174. ret = pa_stream_connect_record(pd->stream, device, &attr,
  175. PA_STREAM_INTERPOLATE_TIMING
  176. | (pd->fragment_size == -1 ? PA_STREAM_ADJUST_LATENCY : 0)
  177. |PA_STREAM_AUTO_TIMING_UPDATE);
  178. if (ret < 0) {
  179. ret = AVERROR(pa_context_errno(pd->context));
  180. goto unlock_and_fail;
  181. }
  182. for (;;) {
  183. pa_stream_state_t state;
  184. state = pa_stream_get_state(pd->stream);
  185. if (state == PA_STREAM_READY)
  186. break;
  187. if (!PA_STREAM_IS_GOOD(state)) {
  188. ret = AVERROR(pa_context_errno(pd->context));
  189. goto unlock_and_fail;
  190. }
  191. /* Wait until the stream is ready */
  192. pa_threaded_mainloop_wait(pd->mainloop);
  193. }
  194. /* Query actual fragment size */
  195. queried_attr = pa_stream_get_buffer_attr(pd->stream);
  196. if (!queried_attr || queried_attr->fragsize > INT_MAX/100) {
  197. ret = AVERROR_EXTERNAL;
  198. goto unlock_and_fail;
  199. }
  200. pd->fragment_size = queried_attr->fragsize;
  201. pd->pa_frame_size = pa_frame_size(&ss);
  202. pa_threaded_mainloop_unlock(pd->mainloop);
  203. /* take real parameters */
  204. st->codecpar->codec_type = AVMEDIA_TYPE_AUDIO;
  205. st->codecpar->codec_id = codec_id;
  206. st->codecpar->sample_rate = pd->sample_rate;
  207. st->codecpar->channels = pd->channels;
  208. avpriv_set_pts_info(st, 64, 1, 1000000); /* 64 bits pts in us */
  209. pd->timefilter = ff_timefilter_new(1000000.0 / pd->sample_rate,
  210. pd->fragment_size / pd->pa_frame_size, 1.5E-6);
  211. if (!pd->timefilter) {
  212. pulse_close(s);
  213. return AVERROR(ENOMEM);
  214. }
  215. return 0;
  216. unlock_and_fail:
  217. pa_threaded_mainloop_unlock(pd->mainloop);
  218. pulse_close(s);
  219. return ret;
  220. }
  221. static int pulse_read_packet(AVFormatContext *s, AVPacket *pkt)
  222. {
  223. PulseData *pd = s->priv_data;
  224. int ret;
  225. size_t read_length;
  226. const void *read_data = NULL;
  227. int64_t dts;
  228. pa_usec_t latency;
  229. int negative;
  230. ptrdiff_t pos = 0;
  231. pa_threaded_mainloop_lock(pd->mainloop);
  232. CHECK_DEAD_GOTO(pd, ret, unlock_and_fail);
  233. while (pos < pd->fragment_size) {
  234. int r;
  235. r = pa_stream_peek(pd->stream, &read_data, &read_length);
  236. CHECK_SUCCESS_GOTO(ret, r == 0, unlock_and_fail);
  237. if (read_length <= 0) {
  238. pa_threaded_mainloop_wait(pd->mainloop);
  239. CHECK_DEAD_GOTO(pd, ret, unlock_and_fail);
  240. } else if (!read_data) {
  241. /* There's a hole in the stream, skip it. We could generate
  242. * silence, but that wouldn't work for compressed streams. */
  243. r = pa_stream_drop(pd->stream);
  244. CHECK_SUCCESS_GOTO(ret, r == 0, unlock_and_fail);
  245. } else {
  246. if (!pos) {
  247. if (av_new_packet(pkt, pd->fragment_size) < 0) {
  248. ret = AVERROR(ENOMEM);
  249. goto unlock_and_fail;
  250. }
  251. dts = av_gettime();
  252. pa_operation_unref(pa_stream_update_timing_info(pd->stream, NULL, NULL));
  253. if (pa_stream_get_latency(pd->stream, &latency, &negative) >= 0) {
  254. if (negative) {
  255. dts += latency;
  256. } else
  257. dts -= latency;
  258. } else {
  259. av_log(s, AV_LOG_WARNING, "pa_stream_get_latency() failed\n");
  260. }
  261. }
  262. if (pkt->size - pos < read_length) {
  263. if (pos)
  264. break;
  265. pa_stream_drop(pd->stream);
  266. /* Oversized fragment??? */
  267. ret = AVERROR_EXTERNAL;
  268. goto unlock_and_fail;
  269. }
  270. memcpy(pkt->data + pos, read_data, read_length);
  271. pos += read_length;
  272. pa_stream_drop(pd->stream);
  273. }
  274. }
  275. pa_threaded_mainloop_unlock(pd->mainloop);
  276. av_shrink_packet(pkt, pos);
  277. if (pd->wallclock)
  278. pkt->pts = ff_timefilter_update(pd->timefilter, dts, pd->last_period);
  279. pd->last_period = pkt->size / pd->pa_frame_size;
  280. return 0;
  281. unlock_and_fail:
  282. av_packet_unref(pkt);
  283. pa_threaded_mainloop_unlock(pd->mainloop);
  284. return ret;
  285. }
  286. static int pulse_get_device_list(AVFormatContext *h, AVDeviceInfoList *device_list)
  287. {
  288. PulseData *s = h->priv_data;
  289. return ff_pulse_audio_get_devices(device_list, s->server, 0);
  290. }
  291. #define OFFSET(a) offsetof(PulseData, a)
  292. #define D AV_OPT_FLAG_DECODING_PARAM
  293. static const AVOption options[] = {
  294. { "server", "set PulseAudio server", OFFSET(server), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, D },
  295. { "name", "set application name", OFFSET(name), AV_OPT_TYPE_STRING, {.str = LIBAVFORMAT_IDENT}, 0, 0, D },
  296. { "stream_name", "set stream description", OFFSET(stream_name), AV_OPT_TYPE_STRING, {.str = "record"}, 0, 0, D },
  297. { "sample_rate", "set sample rate in Hz", OFFSET(sample_rate), AV_OPT_TYPE_INT, {.i64 = 48000}, 1, INT_MAX, D },
  298. { "channels", "set number of audio channels", OFFSET(channels), AV_OPT_TYPE_INT, {.i64 = 2}, 1, INT_MAX, D },
  299. { "frame_size", "set number of bytes per frame", OFFSET(frame_size), AV_OPT_TYPE_INT, {.i64 = 1024}, 1, INT_MAX, D },
  300. { "fragment_size", "set buffering size, affects latency and cpu usage", OFFSET(fragment_size), AV_OPT_TYPE_INT, {.i64 = -1}, -1, INT_MAX, D },
  301. { "wallclock", "set the initial pts using the current time", OFFSET(wallclock), AV_OPT_TYPE_INT, {.i64 = 1}, -1, 1, D },
  302. { NULL },
  303. };
  304. static const AVClass pulse_demuxer_class = {
  305. .class_name = "Pulse indev",
  306. .item_name = av_default_item_name,
  307. .option = options,
  308. .version = LIBAVUTIL_VERSION_INT,
  309. .category = AV_CLASS_CATEGORY_DEVICE_AUDIO_INPUT,
  310. };
  311. AVInputFormat ff_pulse_demuxer = {
  312. .name = "pulse",
  313. .long_name = NULL_IF_CONFIG_SMALL("Pulse audio input"),
  314. .priv_data_size = sizeof(PulseData),
  315. .read_header = pulse_read_header,
  316. .read_packet = pulse_read_packet,
  317. .read_close = pulse_close,
  318. .get_device_list = pulse_get_device_list,
  319. .flags = AVFMT_NOFILE,
  320. .priv_class = &pulse_demuxer_class,
  321. };