cleanup Monitor::Capture. Cleanup the return values from Capture and CaptureAndRecord. <0 is error, 0 is things ok, but no video frame. > 0 means we have a frame. Plus google code style
parent
54d77519c0
commit
cab8c4cd5e
|
@ -128,10 +128,10 @@ int cURLCamera::Capture( Image &image ) {
|
|||
/* Grab the mutex to ensure exclusive access to the shared data */
|
||||
lock();
|
||||
|
||||
while (!frameComplete) {
|
||||
while ( !frameComplete ) {
|
||||
|
||||
/* If the work thread did a reset, reset our local variables */
|
||||
if(bReset) {
|
||||
if ( bReset ) {
|
||||
SubHeadersParsingComplete = false;
|
||||
frame_content_length = 0;
|
||||
frame_content_type.clear();
|
||||
|
@ -139,25 +139,25 @@ int cURLCamera::Capture( Image &image ) {
|
|||
bReset = false;
|
||||
}
|
||||
|
||||
if(mode == MODE_UNSET) {
|
||||
if ( mode == MODE_UNSET ) {
|
||||
/* Don't have a mode yet. Sleep while waiting for data */
|
||||
nRet = pthread_cond_wait(&data_available_cond,&shareddata_mutex);
|
||||
if(nRet != 0) {
|
||||
if ( nRet != 0 ) {
|
||||
Error("Failed waiting for available data condition variable: %s",strerror(nRet));
|
||||
return -20;
|
||||
}
|
||||
}
|
||||
|
||||
if(mode == MODE_STREAM) {
|
||||
if ( mode == MODE_STREAM ) {
|
||||
|
||||
/* Subheader parsing */
|
||||
while(!SubHeadersParsingComplete && !need_more_data) {
|
||||
while( !SubHeadersParsingComplete && !need_more_data ) {
|
||||
|
||||
size_t crlf_start, crlf_end, crlf_size;
|
||||
std::string subheader;
|
||||
|
||||
/* Check if the buffer contains something */
|
||||
if(databuffer.empty()) {
|
||||
if ( databuffer.empty() ) {
|
||||
/* Empty buffer, wait for data */
|
||||
need_more_data = true;
|
||||
break;
|
||||
|
@ -165,14 +165,14 @@ int cURLCamera::Capture( Image &image ) {
|
|||
|
||||
/* Find crlf start */
|
||||
crlf_start = memcspn(databuffer,"\r\n",databuffer.size());
|
||||
if(crlf_start == databuffer.size()) {
|
||||
if ( crlf_start == databuffer.size() ) {
|
||||
/* Not found, wait for more data */
|
||||
need_more_data = true;
|
||||
break;
|
||||
}
|
||||
|
||||
/* See if we have enough data for determining crlf length */
|
||||
if(databuffer.size() < crlf_start+5) {
|
||||
if ( databuffer.size() < crlf_start+5 ) {
|
||||
/* Need more data */
|
||||
need_more_data = true;
|
||||
break;
|
||||
|
@ -183,13 +183,13 @@ int cURLCamera::Capture( Image &image ) {
|
|||
crlf_size = (crlf_start + crlf_end) - crlf_start;
|
||||
|
||||
/* Is this the end of a previous stream? (This is just before the boundary) */
|
||||
if(crlf_start == 0) {
|
||||
if ( crlf_start == 0 ) {
|
||||
databuffer.consume(crlf_size);
|
||||
continue;
|
||||
}
|
||||
|
||||
/* Check for invalid CRLF size */
|
||||
if(crlf_size > 4) {
|
||||
if ( crlf_size > 4 ) {
|
||||
Error("Invalid CRLF length");
|
||||
}
|
||||
|
||||
|
@ -209,7 +209,7 @@ int cURLCamera::Capture( Image &image ) {
|
|||
|
||||
/* Find where the data in this header starts */
|
||||
size_t subheader_data_start = subheader.rfind(' ');
|
||||
if(subheader_data_start == std::string::npos) {
|
||||
if ( subheader_data_start == std::string::npos ) {
|
||||
subheader_data_start = subheader.find(':');
|
||||
}
|
||||
|
||||
|
|
|
@ -317,7 +317,7 @@ int FfmpegCamera::Capture( Image &image ) {
|
|||
} // end if packet.stream_index == mVideoStreamId
|
||||
zm_av_packet_unref( &packet );
|
||||
} // end while ! frameComplete
|
||||
return (0);
|
||||
return 1;
|
||||
} // FfmpegCamera::Capture
|
||||
|
||||
int FfmpegCamera::PostCapture() {
|
||||
|
@ -1010,7 +1010,7 @@ else if ( packet.pts && video_last_pts > packet.pts ) {
|
|||
Debug( 3, "Not framecomplete after av_read_frame" );
|
||||
} // end if frameComplete
|
||||
} else if ( packet.stream_index == mAudioStreamId ) { //FIXME best way to copy all other streams
|
||||
frameComplete = 1;
|
||||
frameComplete = 1;
|
||||
if ( videoStore ) {
|
||||
if ( record_audio ) {
|
||||
if ( have_video_keyframe ) {
|
||||
|
@ -1032,6 +1032,8 @@ else if ( packet.pts && video_last_pts > packet.pts ) {
|
|||
} else {
|
||||
Debug(4, "Have audio packet, but not recording atm" );
|
||||
}
|
||||
zm_av_packet_unref( &packet );
|
||||
return 0;
|
||||
} else {
|
||||
#if LIBAVUTIL_VERSION_CHECK(56, 23, 0, 23, 0)
|
||||
Debug( 3, "Some other stream index %d, %s", packet.stream_index, av_get_media_type_string( mFormatContext->streams[packet.stream_index]->codecpar->codec_type) );
|
||||
|
@ -1043,7 +1045,7 @@ else if ( packet.pts && video_last_pts > packet.pts ) {
|
|||
// the packet contents are ref counted... when queuing, we allocate another packet and reference it with that one, so we should always need to unref here, which should not affect the queued version.
|
||||
zm_av_packet_unref( &packet );
|
||||
} // end while ! frameComplete
|
||||
return (frameCount);
|
||||
return frameCount;
|
||||
} // end FfmpegCamera::CaptureAndRecord
|
||||
|
||||
|
||||
|
|
|
@ -23,8 +23,7 @@
|
|||
#if HAVE_LIBVLC
|
||||
|
||||
// Do all the buffer checking work here to avoid unnecessary locking
|
||||
void* LibvlcLockBuffer(void* opaque, void** planes)
|
||||
{
|
||||
void* LibvlcLockBuffer(void* opaque, void** planes) {
|
||||
LibvlcPrivateData* data = (LibvlcPrivateData*)opaque;
|
||||
data->mutex.lock();
|
||||
|
||||
|
@ -36,15 +35,12 @@ void* LibvlcLockBuffer(void* opaque, void** planes)
|
|||
return NULL;
|
||||
}
|
||||
|
||||
void LibvlcUnlockBuffer(void* opaque, void* picture, void *const *planes)
|
||||
{
|
||||
void LibvlcUnlockBuffer(void* opaque, void* picture, void *const *planes) {
|
||||
LibvlcPrivateData* data = (LibvlcPrivateData*)opaque;
|
||||
|
||||
bool newFrame = false;
|
||||
for(uint32_t i = 0; i < data->bufferSize; i++)
|
||||
{
|
||||
if(data->buffer[i] != data->prevBuffer[i])
|
||||
{
|
||||
for( uint32_t i = 0; i < data->bufferSize; i++ ) {
|
||||
if ( data->buffer[i] != data->prevBuffer[i] ) {
|
||||
newFrame = true;
|
||||
break;
|
||||
}
|
||||
|
@ -54,8 +50,7 @@ void LibvlcUnlockBuffer(void* opaque, void* picture, void *const *planes)
|
|||
time_t now;
|
||||
time(&now);
|
||||
// Return frames slightly faster than 1fps (if time() supports greater than one second resolution)
|
||||
if(newFrame || difftime(now, data->prevTime) >= 0.8)
|
||||
{
|
||||
if ( newFrame || difftime(now, data->prevTime) >= 0.8 ) {
|
||||
data->prevTime = now;
|
||||
data->newImage.updateValueSignal(true);
|
||||
}
|
||||
|
@ -90,58 +85,46 @@ LibvlcCamera::LibvlcCamera( int p_id, const std::string &p_path, const std::stri
|
|||
Panic("Unexpected colours: %d",colours);
|
||||
}
|
||||
|
||||
if ( capture )
|
||||
{
|
||||
if ( capture ) {
|
||||
Initialise();
|
||||
}
|
||||
}
|
||||
|
||||
LibvlcCamera::~LibvlcCamera()
|
||||
{
|
||||
if ( capture )
|
||||
{
|
||||
LibvlcCamera::~LibvlcCamera() {
|
||||
if ( capture ) {
|
||||
Terminate();
|
||||
}
|
||||
if(mLibvlcMediaPlayer != NULL)
|
||||
{
|
||||
if ( mLibvlcMediaPlayer != NULL ) {
|
||||
libvlc_media_player_release(mLibvlcMediaPlayer);
|
||||
mLibvlcMediaPlayer = NULL;
|
||||
}
|
||||
if(mLibvlcMedia != NULL)
|
||||
{
|
||||
if ( mLibvlcMedia != NULL ) {
|
||||
libvlc_media_release(mLibvlcMedia);
|
||||
mLibvlcMedia = NULL;
|
||||
}
|
||||
if(mLibvlcInstance != NULL)
|
||||
{
|
||||
if ( mLibvlcInstance != NULL ) {
|
||||
libvlc_release(mLibvlcInstance);
|
||||
mLibvlcInstance = NULL;
|
||||
}
|
||||
if (mOptArgV != NULL)
|
||||
{
|
||||
if ( mOptArgV != NULL ) {
|
||||
delete[] mOptArgV;
|
||||
}
|
||||
}
|
||||
|
||||
void LibvlcCamera::Initialise()
|
||||
{
|
||||
void LibvlcCamera::Initialise() {
|
||||
}
|
||||
|
||||
void LibvlcCamera::Terminate()
|
||||
{
|
||||
void LibvlcCamera::Terminate() {
|
||||
libvlc_media_player_stop(mLibvlcMediaPlayer);
|
||||
if(mLibvlcData.buffer != NULL)
|
||||
{
|
||||
if(mLibvlcData.buffer != NULL) {
|
||||
zm_freealigned(mLibvlcData.buffer);
|
||||
}
|
||||
if(mLibvlcData.prevBuffer != NULL)
|
||||
{
|
||||
if(mLibvlcData.prevBuffer != NULL) {
|
||||
zm_freealigned(mLibvlcData.prevBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
int LibvlcCamera::PrimeCapture()
|
||||
{
|
||||
int LibvlcCamera::PrimeCapture() {
|
||||
Info("Priming capture from %s", mPath.c_str());
|
||||
|
||||
StringVector opVect = split(Options(), ",");
|
||||
|
@ -154,8 +137,7 @@ int LibvlcCamera::PrimeCapture()
|
|||
else if ( Method() == "rtpRtspHttp" )
|
||||
opVect.push_back("--rtsp-http");
|
||||
|
||||
if (opVect.size() > 0)
|
||||
{
|
||||
if ( opVect.size() > 0 ) {
|
||||
mOptArgV = new char*[opVect.size()];
|
||||
Debug(2, "Number of Options: %d",opVect.size());
|
||||
for (size_t i=0; i< opVect.size(); i++) {
|
||||
|
@ -166,7 +148,7 @@ int LibvlcCamera::PrimeCapture()
|
|||
}
|
||||
|
||||
mLibvlcInstance = libvlc_new (opVect.size(), (const char* const*)mOptArgV);
|
||||
if(mLibvlcInstance == NULL)
|
||||
if ( mLibvlcInstance == NULL )
|
||||
Fatal("Unable to create libvlc instance due to: %s", libvlc_errmsg());
|
||||
|
||||
mLibvlcMedia = libvlc_media_new_location(mLibvlcInstance, mPath.c_str());
|
||||
|
@ -189,17 +171,15 @@ int LibvlcCamera::PrimeCapture()
|
|||
|
||||
libvlc_media_player_play(mLibvlcMediaPlayer);
|
||||
|
||||
return(0);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int LibvlcCamera::PreCapture()
|
||||
{
|
||||
int LibvlcCamera::PreCapture() {
|
||||
return(0);
|
||||
}
|
||||
|
||||
// Should not return -1 as cancels capture. Always wait for image if available.
|
||||
int LibvlcCamera::Capture( Image &image )
|
||||
{
|
||||
int LibvlcCamera::Capture( Image &image ) {
|
||||
while(!mLibvlcData.newImage.getValueImmediate())
|
||||
mLibvlcData.newImage.getUpdatedValue(1);
|
||||
|
||||
|
@ -208,25 +188,15 @@ int LibvlcCamera::Capture( Image &image )
|
|||
mLibvlcData.newImage.setValueImmediate(false);
|
||||
mLibvlcData.mutex.unlock();
|
||||
|
||||
return (0);
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Should not return -1 as cancels capture. Always wait for image if available.
|
||||
int LibvlcCamera::CaptureAndRecord(Image &image, timeval recording, char* event_directory)
|
||||
{
|
||||
while(!mLibvlcData.newImage.getValueImmediate())
|
||||
mLibvlcData.newImage.getUpdatedValue(1);
|
||||
|
||||
mLibvlcData.mutex.lock();
|
||||
image.Assign(width, height, colours, subpixelorder, mLibvlcData.buffer, width * height * mBpp);
|
||||
mLibvlcData.newImage.setValueImmediate(false);
|
||||
mLibvlcData.mutex.unlock();
|
||||
|
||||
int LibvlcCamera::CaptureAndRecord(Image &image, timeval recording, char* event_directory) {
|
||||
return (0);
|
||||
}
|
||||
|
||||
int LibvlcCamera::PostCapture()
|
||||
{
|
||||
int LibvlcCamera::PostCapture() {
|
||||
return(0);
|
||||
}
|
||||
|
||||
|
|
|
@ -2878,6 +2878,12 @@ int Monitor::Capture() {
|
|||
captureResult = camera->CaptureAndRecord(*(next_buffer.image),
|
||||
video_store_data->recording,
|
||||
video_store_data->event_file );
|
||||
// CaptureAndRecord returns # of frames captured I think
|
||||
//if ( ( videowriter == H264PASSTHROUGH ) && ( captureResult > 0 ) ) {
|
||||
if ( captureResult > 0 ) {
|
||||
//video_store_data->frameNumber = captureResult;
|
||||
captureResult = 0;
|
||||
}
|
||||
} else {
|
||||
captureResult = camera->Capture(*(next_buffer.image));
|
||||
}
|
||||
|
@ -2889,33 +2895,28 @@ int Monitor::Capture() {
|
|||
|
||||
} else {
|
||||
//Check if FFMPEG camera
|
||||
if ( (videowriter == H264PASSTHROUGH ) && camera->SupportsNativeVideo() ) {
|
||||
if ( (videowriter == H264PASSTHROUGH) && camera->SupportsNativeVideo() ) {
|
||||
//Warning("ZMC: Recording: %d", video_store_data->recording);
|
||||
captureResult = camera->CaptureAndRecord(*capture_image, video_store_data->recording, video_store_data->event_file);
|
||||
}else{
|
||||
// Should return -1 on error, like loss of signal. Should return 0 if ok but no video frame. > 0 for received a frame.
|
||||
captureResult = camera->CaptureAndRecord(
|
||||
*capture_image,
|
||||
video_store_data->recording,
|
||||
video_store_data->event_file
|
||||
);
|
||||
} else {
|
||||
/* Capture directly into image buffer, avoiding the need to memcpy() */
|
||||
captureResult = camera->Capture(*capture_image);
|
||||
}
|
||||
}
|
||||
|
||||
// CaptureAndRecord returns # of frames captured I think
|
||||
if ( ( videowriter == H264PASSTHROUGH ) && ( captureResult > 0 ) ) {
|
||||
//video_store_data->frameNumber = captureResult;
|
||||
captureResult = 0;
|
||||
}
|
||||
|
||||
if ( captureResult != 0 ) {
|
||||
if ( captureResult < 0 ) {
|
||||
// Unable to capture image for temporary reason
|
||||
// Fake a signal loss image
|
||||
Rgb signalcolor;
|
||||
signalcolor = rgb_convert(signal_check_colour, ZM_SUBPIX_ORDER_BGR); /* HTML colour code is actually BGR in memory, we want RGB */
|
||||
capture_image->Fill(signalcolor);
|
||||
captureResult = 0;
|
||||
} else {
|
||||
captureResult = 1;
|
||||
}
|
||||
|
||||
if ( captureResult == 1 ) {
|
||||
} else if ( captureResult > 0 ) {
|
||||
|
||||
/* Deinterlacing */
|
||||
if ( deinterlacing_value == 1 ) {
|
||||
|
@ -2978,40 +2979,38 @@ int Monitor::Capture() {
|
|||
shared_data->last_write_time = image_buffer[index].timestamp->tv_sec;
|
||||
|
||||
image_count++;
|
||||
}
|
||||
|
||||
if ( image_count && fps_report_interval && !(image_count%fps_report_interval) ) {
|
||||
time_t now = image_buffer[index].timestamp->tv_sec;
|
||||
fps = double(fps_report_interval)/(now-last_fps_time);
|
||||
//Info( "%d -> %d -> %d", fps_report_interval, now, last_fps_time );
|
||||
//Info( "%d -> %d -> %lf -> %lf", now-last_fps_time, fps_report_interval/(now-last_fps_time), double(fps_report_interval)/(now-last_fps_time), fps );
|
||||
Info( "%s: %d - Capturing at %.2lf fps", name, image_count, fps );
|
||||
last_fps_time = now;
|
||||
static char sql[ZM_SQL_SML_BUFSIZ];
|
||||
snprintf( sql, sizeof(sql), "UPDATE Monitors SET CaptureFPS = '%.2lf' WHERE Id = '%d'", fps, id );
|
||||
if ( mysql_query( &dbconn, sql ) ) {
|
||||
Error( "Can't run query: %s", mysql_error( &dbconn ) );
|
||||
}
|
||||
if ( image_count && fps_report_interval && !(image_count%fps_report_interval) ) {
|
||||
time_t now = image_buffer[index].timestamp->tv_sec;
|
||||
fps = double(fps_report_interval)/(now-last_fps_time);
|
||||
//Info( "%d -> %d -> %d", fps_report_interval, now, last_fps_time );
|
||||
//Info( "%d -> %d -> %lf -> %lf", now-last_fps_time, fps_report_interval/(now-last_fps_time), double(fps_report_interval)/(now-last_fps_time), fps );
|
||||
Info( "%s: %d - Capturing at %.2lf fps", name, image_count, fps );
|
||||
last_fps_time = now;
|
||||
static char sql[ZM_SQL_SML_BUFSIZ];
|
||||
snprintf( sql, sizeof(sql), "UPDATE Monitors SET CaptureFPS = '%.2lf' WHERE Id = '%d'", fps, id );
|
||||
if ( mysql_query( &dbconn, sql ) ) {
|
||||
Error( "Can't run query: %s", mysql_error( &dbconn ) );
|
||||
}
|
||||
}
|
||||
|
||||
// Icon: I'm not sure these should be here. They have nothing to do with capturing
|
||||
if ( shared_data->action & GET_SETTINGS ) {
|
||||
shared_data->brightness = camera->Brightness();
|
||||
shared_data->hue = camera->Hue();
|
||||
shared_data->colour = camera->Colour();
|
||||
shared_data->contrast = camera->Contrast();
|
||||
shared_data->action &= ~GET_SETTINGS;
|
||||
}
|
||||
if ( shared_data->action & SET_SETTINGS ) {
|
||||
camera->Brightness( shared_data->brightness );
|
||||
camera->Hue( shared_data->hue );
|
||||
camera->Colour( shared_data->colour );
|
||||
camera->Contrast( shared_data->contrast );
|
||||
shared_data->action &= ~SET_SETTINGS;
|
||||
}
|
||||
return( 0 );
|
||||
} // end if captureResults == 1 which is success I think
|
||||
shared_data->signal = false;
|
||||
return( -1 );
|
||||
// Icon: I'm not sure these should be here. They have nothing to do with capturing
|
||||
if ( shared_data->action & GET_SETTINGS ) {
|
||||
shared_data->brightness = camera->Brightness();
|
||||
shared_data->hue = camera->Hue();
|
||||
shared_data->colour = camera->Colour();
|
||||
shared_data->contrast = camera->Contrast();
|
||||
shared_data->action &= ~GET_SETTINGS;
|
||||
}
|
||||
if ( shared_data->action & SET_SETTINGS ) {
|
||||
camera->Brightness( shared_data->brightness );
|
||||
camera->Hue( shared_data->hue );
|
||||
camera->Colour( shared_data->colour );
|
||||
camera->Contrast( shared_data->contrast );
|
||||
shared_data->action &= ~SET_SETTINGS;
|
||||
}
|
||||
return captureResult;
|
||||
}
|
||||
|
||||
void Monitor::TimestampImage( Image *ts_image, const struct timeval *ts_time ) const {
|
||||
|
|
|
@ -1063,23 +1063,18 @@ int RemoteCameraHttp::GetResponse()
|
|||
return( 0 );
|
||||
}
|
||||
|
||||
int RemoteCameraHttp::PreCapture()
|
||||
{
|
||||
if ( sd < 0 )
|
||||
{
|
||||
int RemoteCameraHttp::PreCapture() {
|
||||
if ( sd < 0 ) {
|
||||
Connect();
|
||||
if ( sd < 0 )
|
||||
{
|
||||
if ( sd < 0 ) {
|
||||
Error( "Unable to connect to camera" );
|
||||
return( -1 );
|
||||
}
|
||||
mode = SINGLE_IMAGE;
|
||||
buffer.clear();
|
||||
}
|
||||
if ( mode == SINGLE_IMAGE )
|
||||
{
|
||||
if ( SendRequest() < 0 )
|
||||
{
|
||||
if ( mode == SINGLE_IMAGE ) {
|
||||
if ( SendRequest() < 0 ) {
|
||||
Error( "Unable to send request" );
|
||||
Disconnect();
|
||||
return( -1 );
|
||||
|
@ -1088,50 +1083,43 @@ int RemoteCameraHttp::PreCapture()
|
|||
return( 0 );
|
||||
}
|
||||
|
||||
int RemoteCameraHttp::Capture( Image &image )
|
||||
{
|
||||
int RemoteCameraHttp::Capture( Image &image ) {
|
||||
int content_length = GetResponse();
|
||||
if ( content_length == 0 )
|
||||
{
|
||||
if ( content_length == 0 ) {
|
||||
Warning( "Unable to capture image, retrying" );
|
||||
return( 1 );
|
||||
return 0;
|
||||
}
|
||||
if ( content_length < 0 )
|
||||
{
|
||||
if ( content_length < 0 ) {
|
||||
Error( "Unable to get response, disconnecting" );
|
||||
Disconnect();
|
||||
return( -1 );
|
||||
return -1;
|
||||
}
|
||||
switch( format )
|
||||
{
|
||||
switch( format ) {
|
||||
case JPEG :
|
||||
{
|
||||
if ( !image.DecodeJpeg( buffer.extract( content_length ), content_length, colours, subpixelorder ) )
|
||||
{
|
||||
if ( !image.DecodeJpeg( buffer.extract( content_length ), content_length, colours, subpixelorder ) ) {
|
||||
Error( "Unable to decode jpeg" );
|
||||
Disconnect();
|
||||
return( -1 );
|
||||
return -1;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case X_RGB :
|
||||
{
|
||||
if ( content_length != (long)image.Size() )
|
||||
{
|
||||
if ( content_length != (long)image.Size() ) {
|
||||
Error( "Image length mismatch, expected %d bytes, content length was %d", image.Size(), content_length );
|
||||
Disconnect();
|
||||
return( -1 );
|
||||
return -1;
|
||||
}
|
||||
image.Assign( width, height, colours, subpixelorder, buffer, imagesize );
|
||||
break;
|
||||
}
|
||||
case X_RGBZ :
|
||||
{
|
||||
if ( !image.Unzip( buffer.extract( content_length ), content_length ) )
|
||||
{
|
||||
if ( !image.Unzip( buffer.extract( content_length ), content_length ) ) {
|
||||
Error( "Unable to unzip RGB image" );
|
||||
Disconnect();
|
||||
return( -1 );
|
||||
return -1;
|
||||
}
|
||||
image.Assign( width, height, colours, subpixelorder, buffer, imagesize );
|
||||
break;
|
||||
|
@ -1140,13 +1128,12 @@ int RemoteCameraHttp::Capture( Image &image )
|
|||
{
|
||||
Error( "Unexpected image format encountered" );
|
||||
Disconnect();
|
||||
return( -1 );
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
return( 0 );
|
||||
return 1;
|
||||
}
|
||||
|
||||
int RemoteCameraHttp::PostCapture()
|
||||
{
|
||||
return( 0 );
|
||||
int RemoteCameraHttp::PostCapture() {
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -266,15 +266,15 @@ int RemoteCameraRtsp::Capture( Image &image ) {
|
|||
|
||||
/* Request a writeable buffer of the target image */
|
||||
directbuffer = image.WriteBuffer(width, height, colours, subpixelorder);
|
||||
if(directbuffer == NULL) {
|
||||
if ( directbuffer == NULL ) {
|
||||
Error("Failed requesting writeable buffer for the captured image.");
|
||||
return (-1);
|
||||
return -1;
|
||||
}
|
||||
|
||||
while ( true ) {
|
||||
buffer.clear();
|
||||
if ( !rtspThread->isRunning() )
|
||||
return (-1);
|
||||
return -1;
|
||||
|
||||
if ( rtspThread->getFrame( buffer ) ) {
|
||||
Debug( 3, "Read frame %d bytes", buffer.size() );
|
||||
|
@ -282,21 +282,21 @@ int RemoteCameraRtsp::Capture( Image &image ) {
|
|||
Hexdump( 4, buffer.head(), 16 );
|
||||
|
||||
if ( !buffer.size() )
|
||||
return( -1 );
|
||||
return -1;
|
||||
|
||||
if(mCodecContext->codec_id == AV_CODEC_ID_H264) {
|
||||
if ( mCodecContext->codec_id == AV_CODEC_ID_H264 ) {
|
||||
// SPS and PPS frames should be saved and appended to IDR frames
|
||||
int nalType = (buffer.head()[3] & 0x1f);
|
||||
|
||||
// SPS The SPS NAL unit contains parameters that apply to a series of consecutive coded video pictures
|
||||
if(nalType == 7) {
|
||||
if ( nalType == 7 ) {
|
||||
lastSps = buffer;
|
||||
continue;
|
||||
} else if(nalType == 8) {
|
||||
} else if ( nalType == 8 ) {
|
||||
// PPS The PPS NAL unit contains parameters that apply to the decoding of one or more individual pictures inside a coded video sequence
|
||||
lastPps = buffer;
|
||||
continue;
|
||||
} else if(nalType == 5) {
|
||||
} else if ( nalType == 5 ) {
|
||||
// IDR
|
||||
buffer += lastSps;
|
||||
buffer += lastPps;
|
||||
|
@ -357,13 +357,13 @@ int RemoteCameraRtsp::Capture( Image &image ) {
|
|||
zm_av_packet_unref( &packet );
|
||||
} /* getFrame() */
|
||||
|
||||
if(frameComplete)
|
||||
return (0);
|
||||
if ( frameComplete )
|
||||
return 1;
|
||||
|
||||
} // end while true
|
||||
|
||||
// can never get here.
|
||||
return (0);
|
||||
return 0;
|
||||
}
|
||||
|
||||
//Function to handle capture and store
|
||||
|
|
|
@ -311,7 +311,7 @@ void StreamBase::openComms() {
|
|||
strncpy( rem_addr.sun_path, rem_sock_path, sizeof(rem_addr.sun_path) );
|
||||
rem_addr.sun_family = AF_UNIX;
|
||||
} // end if connKey > 0
|
||||
Debug(3, "comms open" );
|
||||
Debug(2, "comms open" );
|
||||
}
|
||||
|
||||
void StreamBase::closeComms() {
|
||||
|
|
|
@ -7,7 +7,7 @@ function Monitor( monitorData ) {
|
|||
this.status = null;
|
||||
this.alarmState = STATE_IDLE;
|
||||
this.lastAlarmState = STATE_IDLE;
|
||||
this.streamCmdParms = "view=request&request=stream&connkey="+this.connKey;
|
||||
this.streamCmdParms = 'view=request&request=stream&connkey='+this.connKey;
|
||||
this.onclick = monitorData.onclick;
|
||||
if ( auth_hash )
|
||||
this.streamCmdParms += '&auth='+auth_hash;
|
||||
|
@ -112,8 +112,24 @@ function Monitor( monitorData ) {
|
|||
//this.streamCmdReq.cancel();
|
||||
this.streamCmdReq.send( this.streamCmdParms+"&command="+CMD_QUERY );
|
||||
};
|
||||
this.onError = function( text, error ) {
|
||||
console.log('onerror: ' + text + ' error:'+error);
|
||||
};
|
||||
this.onFailure = function( xhr ) {
|
||||
console.log('onFailure: ' );
|
||||
console.log(xhr );
|
||||
};
|
||||
|
||||
this.streamCmdReq = new Request.JSON( { url: this.server_url, method: 'get', timeout: 1000+AJAX_TIMEOUT, onSuccess: this.getStreamCmdResponse.bind( this ), onTimeout: this.streamCmdQuery.bind( this, true ), link: 'cancel' } );
|
||||
this.streamCmdReq = new Request.JSON( {
|
||||
url: this.server_url,
|
||||
method: 'get',
|
||||
timeout: 1000+AJAX_TIMEOUT,
|
||||
onSuccess: this.getStreamCmdResponse.bind( this ),
|
||||
onTimeout: this.streamCmdQuery.bind( this, true ),
|
||||
onError: this.onError.bind(this),
|
||||
onFailure: this.onFailure.bind(this),
|
||||
link: 'cancel'
|
||||
} );
|
||||
|
||||
requestQueue.addRequest( "cmdReq"+this.id, this.streamCmdReq );
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue