Merge remote-tracking branch 'qatar/master'

* qatar/master:
  lavc: remove "legacy" mpegvideo decoder.
  iv8: assemble packets to return complete frames
  pulse: documentation
  pulse: introduce pulseaudio input
  remove the zork pcm seek test

Conflicts:
	configure
	libavdevice/Makefile
	libavdevice/alldevices.c
	libavdevice/avdevice.h
	libavdevice/pulse.c

Merged-by: Michael Niedermayer <michaelni@gmx.at>
pull/2/head
Michael Niedermayer 13 years ago
commit 8d4e44993a
  1. 83
      doc/indevs.texi
  2. 37
      libavdevice/pulse.c
  3. 71
      libavformat/iv8.c

@ -390,6 +390,89 @@ ffmpeg -f oss -i /dev/dsp /tmp/oss.wav
For more information about OSS see: For more information about OSS see:
@url{http://manuals.opensound.com/usersguide/dsp.html} @url{http://manuals.opensound.com/usersguide/dsp.html}
@section pulse
pulseaudio input device.
To enable this input device during configuration you need libpulse-simple
installed in your system.
The filename to provide to the input device is a source device or the
string "default"
To list the pulse source devices and their properties you can invoke
the command @file{pactl list sources}.
@example
avconv -f pulse -i default /tmp/pulse.wav
@end example
@subsection @var{server} AVOption
The syntax is:
@example
-server @var{server name}
@end example
Connects to a specific server.
@subsection @var{name} AVOption
The syntax is:
@example
-name @var{application name}
@end example
Specify the application name pulse will use when showing active clients,
by default it is "libav"
@subsection @var{stream_name} AVOption
The syntax is:
@example
-stream_name @var{stream name}
@end example
Specify the stream name pulse will use when showing active streams,
by default it is "record"
@subsection @var{sample_rate} AVOption
The syntax is:
@example
-sample_rate @var{samplerate}
@end example
Specify the samplerate in Hz, by default 48kHz is used.
@subsection @var{channels} AVOption
The syntax is:
@example
-channels @var{N}
@end example
Specify the channels in use, by default 2 (stereo) is set.
@subsection @var{frame_size} AVOption
The syntax is:
@example
-frame_size @var{bytes}
@end example
Specify the number of byte per frame, by default it is set to 1024.
@subsection @var{fragment_size} AVOption
The syntax is:
@example
-fragment_size @var{bytes}
@end example
Specify the minimal buffering fragment in pulseaudio, it will affect the
audio latency. By default it is unset.
@section sndio @section sndio
sndio input device. sndio input device.

@ -21,11 +21,9 @@
/** /**
* @file * @file
* Pulseaudio input * PulseAudio input using the simple API.
* @author Luca Barbato <lu_zero@gentoo.org> * @author Luca Barbato <lu_zero@gentoo.org>
* *
* This avdevice decoder allows to capture audio from a Pulseaudio device using
* the simple api.
*/ */
#include <pulse/simple.h> #include <pulse/simple.h>
@ -95,9 +93,9 @@ static av_cold int pulse_read_header(AVFormatContext *s,
device = s->filename; device = s->filename;
pd->s = pa_simple_new(pd->server, pd->name, pd->s = pa_simple_new(pd->server, pd->name,
PA_STREAM_RECORD, PA_STREAM_RECORD,
device, pd->stream_name, &ss, device, pd->stream_name, &ss,
NULL, &attr, &ret); NULL, &attr, &ret);
if (!pd->s) { if (!pd->s) {
av_log(s, AV_LOG_ERROR, "pa_simple_new failed: %s\n", av_log(s, AV_LOG_ERROR, "pa_simple_new failed: %s\n",
@ -122,7 +120,7 @@ static int pulse_read_packet(AVFormatContext *s, AVPacket *pkt)
int res; int res;
pa_usec_t latency; pa_usec_t latency;
uint64_t frame_duration = uint64_t frame_duration =
(pd->frame_size*1000000LL)/(pd->sample_rate * pd->channels); (pd->frame_size*1000000LL) / (pd->sample_rate * pd->channels);
if (av_new_packet(pkt, pd->frame_size) < 0) { if (av_new_packet(pkt, pd->frame_size) < 0) {
return AVERROR(ENOMEM); return AVERROR(ENOMEM);
@ -145,10 +143,10 @@ static int pulse_read_packet(AVFormatContext *s, AVPacket *pkt)
pd->pts = -latency; pd->pts = -latency;
} }
pd->pts += frame_duration;
pkt->pts = pd->pts; pkt->pts = pd->pts;
pd->pts += frame_duration;
return 0; return 0;
} }
@ -163,20 +161,13 @@ static av_cold int pulse_close(AVFormatContext *s)
#define D AV_OPT_FLAG_DECODING_PARAM #define D AV_OPT_FLAG_DECODING_PARAM
static const AVOption options[] = { static const AVOption options[] = {
{ "server", "pulse server name", { "server", "pulse server name", OFFSET(server), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, D },
OFFSET(server), AV_OPT_TYPE_STRING, {.str = NULL}, 0, 0, D }, { "name", "application name", OFFSET(name), AV_OPT_TYPE_STRING, {.str = LIBAVFORMAT_IDENT}, 0, 0, D },
{ "name", "application name", { "stream_name", "stream description", OFFSET(stream_name), AV_OPT_TYPE_STRING, {.str = "record"}, 0, 0, D },
OFFSET(name), AV_OPT_TYPE_STRING, {.str = LIBAVFORMAT_IDENT}, 0, 0, D }, { "sample_rate", "sample rate in Hz", OFFSET(sample_rate), AV_OPT_TYPE_INT, {.dbl = 48000}, 1, INT_MAX, D },
{ "stream_name", "stream description", { "channels", "number of audio channels", OFFSET(channels), AV_OPT_TYPE_INT, {.dbl = 2}, 1, INT_MAX, D },
OFFSET(stream_name), AV_OPT_TYPE_STRING, {.str = "record"}, 0, 0, D }, { "frame_size", "number of bytes per frame", OFFSET(frame_size), AV_OPT_TYPE_INT, {.dbl = 1024}, 1, INT_MAX, D },
{ "sample_rate", "", { "fragment_size", "buffering size, affects latency and cpu usage", OFFSET(fragment_size), AV_OPT_TYPE_INT, {.dbl = -1}, -1, INT_MAX, D },
OFFSET(sample_rate), AV_OPT_TYPE_INT, {.dbl = 48000}, 1, INT_MAX, D },
{ "channels", "",
OFFSET(channels), AV_OPT_TYPE_INT, {.dbl = 2}, 1, INT_MAX, D },
{ "frame_size", "",
OFFSET(frame_size), AV_OPT_TYPE_INT, {.dbl = 1024}, 1, INT_MAX, D },
{ "fragment_size", "buffering size, affects latency and cpu usage",
OFFSET(fragment_size), AV_OPT_TYPE_INT, {.dbl = -1}, -1, INT_MAX, D },
{ NULL }, { NULL },
}; };

@ -55,33 +55,56 @@ static int read_header(AVFormatContext *s, AVFormatParameters *ap)
static int read_packet(AVFormatContext *s, AVPacket *pkt) static int read_packet(AVFormatContext *s, AVPacket *pkt)
{ {
int ret, size, pts, type; int ret, size, pts, type, flags;
retry: int first_pkt = 0;
type= avio_rb16(s->pb); // 257 or 258 int frame_complete = 0;
size= avio_rb16(s->pb);
while (!frame_complete) {
avio_rb16(s->pb); //some flags, 0x80 indicates end of frame
avio_rb16(s->pb); //packet number type = avio_rb16(s->pb); // 257 or 258
pts=avio_rb32(s->pb); size = avio_rb16(s->pb);
avio_rb32(s->pb); //6A 13 E3 88 flags = avio_rb16(s->pb); //some flags, 0x80 indicates end of frame
avio_rb16(s->pb); //packet number
size -= 12; pts = avio_rb32(s->pb);
if(size<1) avio_rb32(s->pb); //6A 13 E3 88
return -1;
frame_complete = flags & 0x80;
if(type==258){
avio_skip(s->pb, size); size -= 12;
goto retry; if (size < 1)
return -1;
if (type == 258) {
avio_skip(s->pb, size);
frame_complete = 0;
continue;
}
if (!first_pkt) {
ret = av_get_packet(s->pb, pkt, size);
if (ret < 0)
return ret;
first_pkt = 1;
pkt->pts = pts;
pkt->pos -= 16;
} else {
ret = av_append_packet(s->pb, pkt, size);
if (ret < 0) {
av_log(s, AV_LOG_ERROR, "failed to grow packet\n");
av_free_packet(pkt);
return ret;
}
}
if (ret < size) {
av_log(s, AV_LOG_ERROR, "Truncated packet! Read %d of %d bytes\n",
ret, size);
pkt->flags |= AV_PKT_FLAG_CORRUPT;
break;
}
} }
ret= av_get_packet(s->pb, pkt, size);
pkt->pts= pts;
pkt->pos-=16;
pkt->stream_index = 0; pkt->stream_index = 0;
return ret; return 0;
} }
AVInputFormat ff_iv8_demuxer = { AVInputFormat ff_iv8_demuxer = {

Loading…
Cancel
Save