class RTPSource
{
uint8_t* fFrameBuf; // 프레임 버퍼 - 한개 프레임 될때까지 채움
int fFrameBufPos; // 프레임 버퍼 인덱스 - 현재 프레임 버퍼 사이즈
FrameHandlerFunc fFrameHandlerFunc; // 콜백함수 - 한개 프레임이 완성되면 호출
void* fFrameHandlerFuncData; // 콜백함수 데이터
uint8_t* fExtraData; // SPS/PPS 정보
unsigned fExtraDataSize;
...
}
int trimStartCode(uint8_t *buf, int len)
{
uint8_t *ptr = buf;
if (len < 4) return 0;
// trying to find 0x00 0x00 ... 0x01 pattern
// find first 0x00 0x00 bytes
if (ptr[0] == 0x00 && ptr[1] == 0x00) {
// find first 0x01
while (*ptr == 0x00 && ptr < buf+len-1)
ptr++;
if (*ptr != 0x01) { // error - invalid stream
DPRINTF("invalid stream, 0x%02x\n", *ptr);
ptr = buf;
} else {
ptr++;
}
}
return ptr-buf;
}
void RTPSource::copyToFrameBuffer(uint8_t *buf, int len)
{
if (fFrameBufPos+len >= FRAME_BUFFER_SIZE) {
DPRINTF("RTP Frame Buffer overflow %s\n", fCodecName);
fFrameBufPos = 0;
}
memmove(&fFrameBuf[fFrameBufPos], buf, len);
fFrameBufPos += len;
}
void RTPSource::resetFrameBuf()
{
fFrameBufPos = 0;
}
uint64_t RTPSource::getMediaTimestamp(uint32_t timestamp)
{
uint64_t msec = 1000;
uint64_t time_msec = timestamp*msec/fTimestampFrequency;
return time_msec;
}
void H264RTPSource::putStartCode()
{
fFrameBuf[fFrameBufPos++] = 0x00;
fFrameBuf[fFrameBufPos++] = 0x00;
fFrameBuf[fFrameBufPos++] = 0x00;
fFrameBuf[fFrameBufPos++] = 0x01;
}
void H264RTPSource::processFrame(RTPPacketBuffer *packet)
{
uint8_t *buf = (uint8_t *)packet->payload();
int len = packet->payloadLen();
int offset = trimStartCode(buf, len);
buf = &buf[offset];
len -= offset;
uint8_t *buf_ptr = buf;
bool isCompleteFrame = false;
uint32_t media_timestamp = getMediaTimestamp(packet->timestamp());
uint8_t nalUnitType = (buf[0]&0x1F);
if (RTSPCommonEnv::nDebugFlag&DEBUG_FLAG_RTP_PAYLOAD)
DPRINTF("nal_type: %d, size: %d\n", nalUnitType, len);
if (!fIsStartFrame) {
if (fExtraData) {
putStartCode();
copyToFrameBuffer(fExtraData, fExtraDataSize);
}
fIsStartFrame = true;
}
switch (nalUnitType)
{
case 28: { // FU-A
uint8_t startBit = buf[1]&0x80;
uint8_t endBit = buf[1]&0x40;
if (startBit) {
buf_ptr++; len--;
buf[1] = (buf[0]&0xE0) + (buf[1]&0x1F);
putStartCode();
} else {
buf_ptr += 2; len -= 2;
}
copyToFrameBuffer(buf_ptr, len);
isCompleteFrame = (endBit != 0);
break;
}
case 5: { // IDR-Picture
putStartCode();
copyToFrameBuffer(buf_ptr, len);
isCompleteFrame = true;
break;
}
case 7: { // SPS
putStartCode();
copyToFrameBuffer(buf_ptr, len);
isCompleteFrame = false;
break;
}
case 8: { // PPS
putStartCode();
copyToFrameBuffer(buf_ptr, len);
isCompleteFrame = false;
break;
}
case 24: { // STAP-A
buf_ptr++; len--;
while (len > 3)
{
uint16_t staplen = (buf_ptr[0]<<8) | (buf_ptr[1]);
if (staplen > len) {
DPRINTF("STAP-A process error, staplen: %d, len\n", staplen, len);
break;
}
buf_ptr += 2; len -= 2;
nalUnitType = buf_ptr[0]&0x1F;
putStartCode();
copyToFrameBuffer(buf_ptr, staplen);
buf_ptr += staplen; len -= staplen;
if (fFrameHandlerFunc)
fFrameHandlerFunc(fFrameHandlerFuncData, fFrameType, media_timestamp, fFrameBuf, fFrameBufPos);
resetFrameBuf();
}
break;
}
default:
putStartCode();
copyToFrameBuffer(buf_ptr, len);
isCompleteFrame = true;
break;
}
if (isCompleteFrame) {
if (fFrameHandlerFunc)
fFrameHandlerFunc(fFrameHandlerFuncData, fFrameType, media_timestamp, fFrameBuf, fFrameBufPos);
resetFrameBuf();
}
}
2012년 6월 8일 금요일
H.264 RFC3984 NAL 패킷처리 - H.264 RFC3984 NAL Packet Handling
ffmpeg을 이용하여 멀티플랫폼 기반 미디어 스트리밍 라이브러리를 개발하고 있습니다.
피드 구독하기:
댓글 (Atom)
댓글 없음:
댓글 쓰기