Permalink
Fetching contributors…
Cannot retrieve contributors at this time
6151 lines (5567 sloc) 221 KB
/*
* Capture video from Raspberry Pi Camera and audio from ALSA,
* encode them to H.264/AAC, and mux them to MPEG-TS.
*
* H.264 encoder: Raspberry Pi H.264 hardware encoder (via OpenMAX IL)
* AAC encoder : fdk-aac (via libavcodec)
* MPEG-TS muxer: libavformat
*/
#if defined(__cplusplus)
extern "C" {
#endif
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <signal.h>
#include <fcntl.h>
#include <errno.h>
#include <math.h>
#include <pthread.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <sys/statvfs.h>
#include <sys/socket.h>
#include <sys/un.h>
#include <alsa/asoundlib.h>
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/mathematics.h>
#include <getopt.h>
#include "bcm_host.h"
#include "ilclient.h"
#include "hooks.h"
#include "mpegts.h"
#include "httplivestreaming.h"
#include "state.h"
#include "log.h"
#include "text.h"
#include "dispmanx.h"
#include "timestamp.h"
#include "subtitle.h"
#define PROGRAM_NAME "picam"
#define PROGRAM_VERSION "1.4.6"
// Audio-only stream is created if this is 1 (for debugging)
#define AUDIO_ONLY 0
// ALSA buffer size for capture will be multiplied by this number
#define ALSA_BUFFER_MULTIPLY 100
// ALSA buffer size for playback will be multiplied by this number (max: 16)
#define ALSA_PLAYBACK_BUFFER_MULTIPLY 10
// If this is 1, PTS will be reset to zero when it exceeds PTS_MODULO
#define ENABLE_PTS_WRAP_AROUND 0
#if ENABLE_PTS_WRAP_AROUND
// Both PTS and DTS are 33 bit and wraps around to zero
#define PTS_MODULO 8589934592
#endif
// Internal flag indicates that audio is available for read
#define AVAIL_AUDIO 2
// If this value is increased, audio gets faster than video
#define N_BUFFER_COUNT_ACTUAL 1
// If this value is increased, video gets faster than audio
#define AUDIO_BUFFER_CHUNKS 0
// How much PTS difference between audio and video is
// considered to be too large
#define PTS_DIFF_TOO_LARGE 45000 // 90000 == 1 second
// enum
#define EXPOSURE_AUTO 0
#define EXPOSURE_NIGHT 1
// Number of packets to chase recording for each cycle
#define REC_CHASE_PACKETS 10
// Which color (YUV) is used to fill blank borders
#define FILL_COLOR_Y 0
#define FILL_COLOR_U 128
#define FILL_COLOR_V 128
// Whether or not to pass pBuffer from camera to video_encode directly
#define ENABLE_PBUFFER_OPTIMIZATION_HACK 1
#if ENABLE_PBUFFER_OPTIMIZATION_HACK
static OMX_BUFFERHEADERTYPE *video_encode_input_buf = NULL;
static OMX_U8 *video_encode_input_buf_pBuffer_orig = NULL;
#endif
#define ENABLE_AUTO_GOP_SIZE_CONTROL_FOR_VFR 1
// OpenMAX IL ports
static const int CAMERA_PREVIEW_PORT = 70;
static const int CAMERA_CAPTURE_PORT = 71;
static const int CAMERA_INPUT_PORT = 73;
static const int CLOCK_OUTPUT_1_PORT = 80;
static const int VIDEO_RENDER_INPUT_PORT = 90;
static const int VIDEO_ENCODE_INPUT_PORT = 200;
static const int VIDEO_ENCODE_OUTPUT_PORT = 201;
// Directory to put recorded MPEG-TS files
static char *rec_dir = "rec";
static char *rec_tmp_dir = "rec/tmp";
static char *rec_archive_dir = "rec/archive";
// Whether or not to enable clock OMX component
static const int is_clock_enabled = 1;
// Pace of PTS
typedef enum {
PTS_SPEED_NORMAL,
PTS_SPEED_UP,
PTS_SPEED_DOWN,
} pts_mode_t;
typedef struct EncodedPacket {
int64_t pts;
uint8_t *data;
int size;
int stream_index;
int flags;
} EncodedPacket;
static const int log_level_default = LOG_LEVEL_INFO;
static int sensor_mode;
static const int sensor_mode_default = -1;
static int video_width;
static const int video_width_default = 1280;
static int video_width_32;
static int video_height;
static const int video_height_default = 720;
static int video_height_16;
static float video_fps;
static const float video_fps_default = 30.0f;
static int video_pts_step;
static const int video_pts_step_default = 0;
static int video_gop_size;
static const int video_gop_size_default = 0;
static int video_rotation;
static const int video_rotation_default = 0;
static int video_hflip;
static const int video_hflip_default = 0;
static int video_vflip;
static const int video_vflip_default = 0;
static long video_bitrate;
static const long video_bitrate_default = 2000 * 1000; // 2 Mbps
static char video_avc_profile[21];
static const char *video_avc_profile_default = "constrained_baseline";
typedef struct video_avc_profile_option {
char *name;
OMX_VIDEO_AVCPROFILETYPE profile;
} video_avc_profile_option;
static const video_avc_profile_option video_avc_profile_options[] = {
{ .name = "constrained_baseline", .profile = OMX_VIDEO_AVCProfileConstrainedBaseline },
{ .name = "baseline", .profile = OMX_VIDEO_AVCProfileBaseline },
{ .name = "main", .profile = OMX_VIDEO_AVCProfileMain },
{ .name = "high", .profile = OMX_VIDEO_AVCProfileHigh },
};
static char video_avc_level[4];
static const char *video_avc_level_default = "3.1";
typedef struct video_avc_level_option {
char *name;
OMX_VIDEO_AVCLEVELTYPE level;
} video_avc_level_option;
static const video_avc_level_option video_avc_level_options[] = {
{ .name = "1", .level = OMX_VIDEO_AVCLevel1 },
{ .name = "1b", .level = OMX_VIDEO_AVCLevel1b },
{ .name = "1.1", .level = OMX_VIDEO_AVCLevel11 },
{ .name = "1.2", .level = OMX_VIDEO_AVCLevel12 },
{ .name = "1.3", .level = OMX_VIDEO_AVCLevel13 },
{ .name = "2", .level = OMX_VIDEO_AVCLevel2 },
{ .name = "2.1", .level = OMX_VIDEO_AVCLevel21 },
{ .name = "2.2", .level = OMX_VIDEO_AVCLevel22 },
{ .name = "3", .level = OMX_VIDEO_AVCLevel3 },
{ .name = "3.1", .level = OMX_VIDEO_AVCLevel31 },
{ .name = "3.2", .level = OMX_VIDEO_AVCLevel32 },
{ .name = "4", .level = OMX_VIDEO_AVCLevel4 },
{ .name = "4.1", .level = OMX_VIDEO_AVCLevel41 },
{ .name = "4.2", .level = OMX_VIDEO_AVCLevel42 },
{ .name = "5", .level = OMX_VIDEO_AVCLevel5 },
{ .name = "5.1", .level = OMX_VIDEO_AVCLevel51 },
};
static int video_qp_min;
static const int video_qp_min_default = -1;
static int video_qp_max;
static const int video_qp_max_default = -1;
static int video_qp_initial;
static const int video_qp_initial_default = -1;
static int video_slice_dquant;
static const int video_slice_dquant_default = -1;
static char alsa_dev[256];
static const char *alsa_dev_default = "hw:0,0";
static char audio_preview_dev[256];
static const char *audio_preview_dev_default = "plughw:0,0";
static long audio_bitrate;
static const long audio_bitrate_default = 40000; // 40 Kbps
static int is_audio_channels_specified = 0;
static int audio_channels;
static int audio_preview_channels;
static const int audio_channels_default = 1; // mono
static int audio_sample_rate;
static const int audio_sample_rate_default = 48000;
static int is_hlsout_enabled;
static const int is_hlsout_enabled_default = 0;
static char hls_output_dir[256];
static const char *hls_output_dir_default = "/run/shm/video";
static int is_rtspout_enabled;
static const int is_rtspout_enabled_default = 0;
static char rtsp_video_control_path[256];
static const char *rtsp_video_control_path_default = "/tmp/node_rtsp_rtmp_videoControl";
static char rtsp_audio_control_path[256];
static const char *rtsp_audio_control_path_default = "/tmp/node_rtsp_rtmp_audioControl";
static char rtsp_video_data_path[256];
static const char *rtsp_video_data_path_default = "/tmp/node_rtsp_rtmp_videoData";
static char rtsp_audio_data_path[256];
static const char *rtsp_audio_data_path_default = "/tmp/node_rtsp_rtmp_audioData";
static int is_tcpout_enabled;
static const int is_tcpout_enabled_default = 0;
static char tcp_output_dest[256];
static int is_auto_exposure_enabled;
static const int is_auto_exposure_enabled_default = 0;
static int is_vfr_enabled; // whether variable frame rate is enabled
static const int is_vfr_enabled_default = 0;
static float auto_exposure_threshold;
static const float auto_exposure_threshold_default = 5.0f;
static float roi_left;
static const float roi_left_default = 0.0f;
static float roi_top;
static const float roi_top_default = 0.0f;
static float roi_width;
static const float roi_width_default = 1.0f;
static float roi_height;
static const float roi_height_default = 1.0f;
static char white_balance[13];
static const char *white_balance_default = "auto";
typedef struct white_balance_option {
char *name;
OMX_WHITEBALCONTROLTYPE control;
} white_balance_option;
static const white_balance_option white_balance_options[] = {
{ .name = "off", .control = OMX_WhiteBalControlOff },
{ .name = "auto", .control = OMX_WhiteBalControlAuto },
{ .name = "sun", .control = OMX_WhiteBalControlSunLight },
{ .name = "cloudy", .control = OMX_WhiteBalControlCloudy },
{ .name = "shade", .control = OMX_WhiteBalControlShade },
{ .name = "tungsten", .control = OMX_WhiteBalControlTungsten },
{ .name = "fluorescent", .control = OMX_WhiteBalControlFluorescent },
{ .name = "incandescent", .control = OMX_WhiteBalControlIncandescent },
{ .name = "flash", .control = OMX_WhiteBalControlFlash },
{ .name = "horizon", .control = OMX_WhiteBalControlHorizon },
};
static char exposure_control[14];
static const char *exposure_control_default = "auto";
typedef struct exposure_control_option {
char *name;
OMX_EXPOSURECONTROLTYPE control;
} exposure_control_option;
static const exposure_control_option exposure_control_options[] = {
{ .name = "off", .control = OMX_ExposureControlOff },
{ .name = "auto", .control = OMX_ExposureControlAuto },
{ .name = "night", .control = OMX_ExposureControlNight },
{ .name = "nightpreview", .control = OMX_ExposureControlNightWithPreview },
{ .name = "backlight", .control = OMX_ExposureControlBackLight },
{ .name = "spotlight", .control = OMX_ExposureControlSpotLight },
{ .name = "sports", .control = OMX_ExposureControlSports },
{ .name = "snow", .control = OMX_ExposureControlSnow },
{ .name = "beach", .control = OMX_ExposureControlBeach },
{ .name = "verylong", .control = OMX_ExposureControlVeryLong },
{ .name = "fixedfps", .control = OMX_ExposureControlFixedFps },
{ .name = "antishake", .control = OMX_ExposureControlAntishake },
{ .name = "fireworks", .control = OMX_ExposureControlFireworks },
{ .name = "largeaperture", .control = OMX_ExposureControlLargeAperture },
{ .name = "smallaperture", .control = OMX_ExposureControlSmallAperture },
};
// Red gain used when AWB is off
static float awb_red_gain;
static const float awb_red_gain_default = 0.0f;
// Blue gain used when AWB is off
static float awb_blue_gain;
static const float awb_blue_gain_default = 0.0f;
static char exposure_metering[8];
static const char *exposure_metering_default = "average";
typedef struct exposure_metering_option {
char *name;
OMX_METERINGTYPE metering;
} exposure_metering_option;
static const exposure_metering_option exposure_metering_options[] = {
{ .name = "average", .metering = OMX_MeteringModeAverage },
{ .name = "spot", .metering = OMX_MeteringModeSpot },
{ .name = "matrix", .metering = OMX_MeteringModeMatrix },
{ .name = "backlit", .metering = OMX_MeteringModeBacklit },
};
static int manual_exposure_compensation = 0; // EV compensation
static float exposure_compensation;
static int manual_exposure_aperture = 0; // f-number
static float exposure_aperture;
static int manual_exposure_shutter_speed = 0; // in microseconds
static unsigned int exposure_shutter_speed;
static int manual_exposure_sensitivity = 0; // ISO
static unsigned int exposure_sensitivity;
static char state_dir[256];
static const char *state_dir_default = "state";
static char hooks_dir[256];
static const char *hooks_dir_default = "hooks";
static float audio_volume_multiply;
static const float audio_volume_multiply_default = 1.0f;
static int audio_min_value;
static int audio_max_value;
static int is_hls_encryption_enabled;
static const int is_hls_encryption_enabled_default = 0;
static char hls_encryption_key_uri[256];
static const char *hls_encryption_key_uri_default = "stream.key";
static uint8_t hls_encryption_key[16] = {
0x75, 0xb0, 0xa8, 0x1d, 0xe1, 0x74, 0x87, 0xc8,
0x8a, 0x47, 0x50, 0x7a, 0x7e, 0x1f, 0xdf, 0x73,
};
static uint8_t hls_encryption_key_default[16] = {
0x75, 0xb0, 0xa8, 0x1d, 0xe1, 0x74, 0x87, 0xc8,
0x8a, 0x47, 0x50, 0x7a, 0x7e, 0x1f, 0xdf, 0x73,
};
static uint8_t hls_encryption_iv[16] = {
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
};
static uint8_t hls_encryption_iv_default[16] = {
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f,
};
static int is_preview_enabled;
static const int is_preview_enabled_default = 0;
static int is_previewrect_enabled;
static const int is_previewrect_enabled_default = 0;
static int preview_x;
static int preview_y;
static int preview_width;
static int preview_height;
static int preview_opacity;
static uint32_t blank_background_color;
static const int preview_opacity_default = 255;
static int record_buffer_keyframes;
static const int record_buffer_keyframes_default = 5;
static int is_timestamp_enabled = 0;
static char timestamp_format[128];
static const char *timestamp_format_default = "%a %b %d %l:%M:%S %p";
static LAYOUT_ALIGN timestamp_layout;
static const LAYOUT_ALIGN timestamp_layout_default = LAYOUT_ALIGN_BOTTOM | LAYOUT_ALIGN_RIGHT;
static int timestamp_horizontal_margin;
static const int timestamp_horizontal_margin_default = 10;
static int timestamp_vertical_margin;
static const int timestamp_vertical_margin_default = 10;
static int timestamp_pos_x;
static int timestamp_pos_y;
static int is_timestamp_abs_pos_enabled = 0;
static TEXT_ALIGN timestamp_text_align;
static const TEXT_ALIGN timestamp_text_align_default = TEXT_ALIGN_LEFT;
static char timestamp_font_name[128];
static const char *timestamp_font_name_default = "FreeMono:style=Bold";
static char timestamp_font_file[1024];
static int timestamp_font_face_index;
static const int timestamp_font_face_index_default = 0;
static float timestamp_font_points;
static const float timestamp_font_points_default = 14.0f;
static int timestamp_font_dpi;
static const int timestamp_font_dpi_default = 96;
static int timestamp_color;
static const int timestamp_color_default = 0xffffff;
static int timestamp_stroke_color;
static const int timestamp_stroke_color_default = 0x000000;
static float timestamp_stroke_width;
static const float timestamp_stroke_width_default = 1.3f;
static int timestamp_letter_spacing;
static const int timestamp_letter_spacing_default = 0;
// how many keyframes should we look back for the next recording
static int recording_look_back_keyframes;
static int64_t video_current_pts = 0;
static int64_t audio_current_pts = 0;
static int64_t last_pts = 0;
static int64_t time_for_last_pts = 0; // Used in VFR mode
pts_mode_t pts_mode = PTS_SPEED_NORMAL;
// Counter for PTS speed up/down
static int speed_up_count = 0;
static int speed_down_count = 0;
static int audio_pts_step_base;
#if AUDIO_BUFFER_CHUNKS > 0
uint16_t *audio_buffer[AUDIO_BUFFER_CHUNKS];
int audio_buffer_index = 0;
int is_audio_buffer_filled = 0;
#endif
// If this value is 1, audio capturing is always disabled.
static int disable_audio_capturing = 0;
static pthread_t audio_nop_thread;
static int fr_q16;
// Function prototypes
static int camera_set_white_balance(char *wb);
static int camera_set_exposure_control(char *ex);
static int camera_set_custom_awb_gains();
static void encode_and_send_image();
static void encode_and_send_audio();
void start_record();
void stop_record();
#if ENABLE_AUTO_GOP_SIZE_CONTROL_FOR_VFR
static void set_gop_size(int gop_size);
#endif
static long long video_frame_count = 0;
static long long audio_frame_count = 0;
static int64_t video_start_time;
static int64_t audio_start_time;
static int is_video_recording_started = 0;
static int is_audio_recording_started = 0;
static uint8_t *last_video_buffer = NULL;
static size_t last_video_buffer_size = 0;
static int keyframes_count = 0;
static int video_pending_drop_frames = 0;
static int audio_pending_drop_frames = 0;
static COMPONENT_T *video_encode = NULL;
static COMPONENT_T *component_list[5];
static int n_component_list = 0;
static ILCLIENT_T *ilclient;
static ILCLIENT_T *cam_client;
static COMPONENT_T *camera_component = NULL;
static COMPONENT_T *render_component = NULL;
static COMPONENT_T *clock_component = NULL;
static TUNNEL_T tunnel[3]; // Must be null-terminated
static int n_tunnel = 0;
static AVFormatContext *tcp_ctx;
static pthread_mutex_t tcp_mutex = PTHREAD_MUTEX_INITIALIZER;
static int current_exposure_mode = EXPOSURE_AUTO;
static int keepRunning = 1;
static int frame_count = 0;
static int current_audio_frames = 0;
static uint8_t *codec_configs[2];
static int codec_config_sizes[2];
static int codec_config_total_size = 0;
static int n_codec_configs = 0;
static struct timespec tsBegin = { .tv_sec = 0, .tv_nsec = 0 };
static AVFormatContext *rec_format_ctx;
static int flush_recording_seconds = 5; // Flush recording data every 5 seconds
static time_t rec_start_time;
static HTTPLiveStreaming *hls;
// NAL unit type 9
static uint8_t access_unit_delimiter[] = {
0x00, 0x00, 0x00, 0x01, 0x09, 0xf0,
};
static int access_unit_delimiter_length = 6;
// sound
static snd_pcm_t *capture_handle;
static snd_pcm_t *audio_preview_handle;
static snd_pcm_hw_params_t *alsa_hw_params;
static uint16_t *samples;
static AVFrame *av_frame;
static int audio_fd_count;
static struct pollfd *poll_fds; // file descriptors for polling audio
static int is_first_audio;
static int period_size;
static int audio_buffer_size;
static int is_audio_preview_enabled;
static const int is_audio_preview_enabled_default = 0;
static int is_audio_preview_device_opened = 0;
// threads
static pthread_mutex_t mutex_writing = PTHREAD_MUTEX_INITIALIZER;
// UNIX domain sockets
static int sockfd_video;
static int sockfd_video_control;
static int sockfd_audio;
static int sockfd_audio_control;
static uint8_t *encbuf = NULL;
static int encbuf_size = -1;
static pthread_t rec_thread;
static pthread_cond_t rec_cond = PTHREAD_COND_INITIALIZER;
static pthread_mutex_t rec_mutex = PTHREAD_MUTEX_INITIALIZER;
static pthread_mutex_t rec_write_mutex = PTHREAD_MUTEX_INITIALIZER;
static int rec_thread_needs_write = 0;
static int rec_thread_needs_exit = 0;
static int rec_thread_frame = 0;
static int rec_thread_needs_flush = 0;
EncodedPacket **encoded_packets; // circular buffer
static int current_encoded_packet = -1;
static int *keyframe_pointers;
static int current_keyframe_pointer = -1;
static int is_keyframe_pointers_filled = 0;
static int encoded_packets_size;
// hooks
static pthread_t hooks_thread;
char recording_filepath[256];
char recording_tmp_filepath[256];
char recording_archive_filepath[1024];
char recording_basename[256];
char recording_dest_dir[1024];
int is_recording = 0;
static MpegTSCodecSettings codec_settings;
static int is_audio_muted = 0;
// Will be set to 1 when the camera finishes capturing
static int is_camera_finished = 0;
#if ENABLE_AUTO_GOP_SIZE_CONTROL_FOR_VFR
// Variables for variable frame rate
static int64_t last_keyframe_pts = 0;
static int frames_since_last_keyframe = 0;
#endif
static float min_fps = -1.0f;
static float max_fps = -1.0f;
// Query camera capabilities and exit
static int query_and_exit = 0;
static pthread_mutex_t camera_finish_mutex = PTHREAD_MUTEX_INITIALIZER;
static pthread_cond_t camera_finish_cond = PTHREAD_COND_INITIALIZER;
static char errbuf[1024];
static void unmute_audio() {
log_info("unmute");
is_audio_muted = 0;
}
static void mute_audio() {
log_info("mute");
is_audio_muted = 1;
}
// Check if disk usage is >= 95%
static int is_disk_almost_full() {
struct statvfs stat;
statvfs("/", &stat);
int used_percent = ceil( (stat.f_blocks - stat.f_bfree) * 100.0f / stat.f_blocks);
log_info("disk_usage=%d%% ", used_percent);
if (used_percent >= 95) {
return 1;
} else {
return 0;
}
}
static void mark_keyframe_packet() {
current_keyframe_pointer++;
if (current_keyframe_pointer >= record_buffer_keyframes) {
current_keyframe_pointer = 0;
if (!is_keyframe_pointers_filled) {
is_keyframe_pointers_filled = 1;
}
}
keyframe_pointers[current_keyframe_pointer] = current_encoded_packet;
}
static void prepare_encoded_packets() {
int audio_fps = audio_sample_rate / 1 / period_size;
encoded_packets_size = (video_fps + 1) * record_buffer_keyframes * 2 +
(audio_fps + 1) * record_buffer_keyframes * 2 + 100;
int malloc_size = sizeof(EncodedPacket *) * encoded_packets_size;
encoded_packets = malloc(malloc_size);
if (encoded_packets == NULL) {
log_error("error: cannot allocate memory for encoded_packets\n");
exit(EXIT_FAILURE);
}
memset(encoded_packets, 0, malloc_size);
}
static int write_encoded_packets(int max_packets, int origin_pts) {
int ret;
AVPacket avpkt;
EncodedPacket *enc_pkt;
av_init_packet(&avpkt);
int wrote_packets = 0;
pthread_mutex_lock(&rec_write_mutex);
while (1) {
wrote_packets++;
enc_pkt = encoded_packets[rec_thread_frame];
avpkt.pts = avpkt.dts = enc_pkt->pts - origin_pts;
avpkt.data = enc_pkt->data;
avpkt.size = enc_pkt->size;
avpkt.stream_index = enc_pkt->stream_index;
avpkt.flags = enc_pkt->flags;
ret = av_write_frame(rec_format_ctx, &avpkt);
if (ret < 0) {
av_strerror(ret, errbuf, sizeof(errbuf));
log_error("error: write_encoded_packets: av_write_frame: %s\n", errbuf);
}
if (++rec_thread_frame == encoded_packets_size) {
rec_thread_frame = 0;
}
if (rec_thread_frame == current_encoded_packet) {
break;
}
if (wrote_packets == max_packets) {
break;
}
}
pthread_mutex_unlock(&rec_write_mutex);
av_free_packet(&avpkt);
return wrote_packets;
}
static void add_encoded_packet(int64_t pts, uint8_t *data, int size, int stream_index, int flags) {
EncodedPacket *packet;
if (++current_encoded_packet == encoded_packets_size) {
current_encoded_packet = 0;
}
packet = encoded_packets[current_encoded_packet];
if (packet != NULL) {
int next_keyframe_pointer = current_keyframe_pointer + 1;
if (next_keyframe_pointer >= record_buffer_keyframes) {
next_keyframe_pointer = 0;
}
if (current_encoded_packet == keyframe_pointers[next_keyframe_pointer]) {
log_warn("warning: Record buffer is starving. Recorded file may not start from keyframe. Try reducing the value of --gopsize.\n");
}
av_freep(&packet->data);
} else {
packet = malloc(sizeof(EncodedPacket));
if (packet == NULL) {
perror("malloc for EncodedPacket");
return;
}
encoded_packets[current_encoded_packet] = packet;
}
packet->pts = pts;
packet->data = data;
packet->size = size;
packet->stream_index = stream_index;
packet->flags = flags;
}
static void free_encoded_packets() {
int i;
EncodedPacket *packet;
for (i = 0; i < encoded_packets_size; i++) {
packet = encoded_packets[i];
if (packet != NULL) {
av_freep(&packet->data);
free(packet);
}
}
}
void setup_av_frame(AVFormatContext *format_ctx) {
AVCodecContext *audio_codec_ctx;
int ret;
int buffer_size;
#if AUDIO_ONLY
audio_codec_ctx = format_ctx->streams[0]->codec;
#else
audio_codec_ctx = format_ctx->streams[1]->codec;
#endif
av_frame = av_frame_alloc();
if (!av_frame) {
log_error("error: av_frame_alloc failed\n");
exit(EXIT_FAILURE);
}
av_frame->sample_rate = audio_codec_ctx->sample_rate;
log_debug("sample_rate: %d\n", audio_codec_ctx->sample_rate);
av_frame->nb_samples = audio_codec_ctx->frame_size;
log_debug("nb_samples: %d\n", audio_codec_ctx->frame_size);
av_frame->format = audio_codec_ctx->sample_fmt;
log_debug("sample_fmt: %d\n", audio_codec_ctx->sample_fmt);
av_frame->channel_layout = audio_codec_ctx->channel_layout;
log_debug("audio_codec_ctx->channel_layout: %" PRIu64 "\n", audio_codec_ctx->channel_layout);
log_debug("av_frame->channel_layout: %" PRIu64 "\n", av_frame->channel_layout);
log_debug("audio_codec_ctx->channels: %d\n", audio_codec_ctx->channels);
log_debug("av_frame->channels: %d\n", av_frame->channels);
buffer_size = av_samples_get_buffer_size(NULL, audio_codec_ctx->channels,
audio_codec_ctx->frame_size, audio_codec_ctx->sample_fmt, 0);
samples = av_malloc(buffer_size);
if (!samples) {
log_error("error: av_malloc for samples failed\n");
exit(EXIT_FAILURE);
}
log_debug("allocated %d bytes for audio samples\n", buffer_size);
#if AUDIO_BUFFER_CHUNKS > 0
int i;
for (i = 0; i < AUDIO_BUFFER_CHUNKS; i++) {
audio_buffer[i] = av_malloc(buffer_size);
if (!audio_buffer[i]) {
log_error("error: av_malloc for audio_buffer[%d] failed\n", i);
exit(EXIT_FAILURE);
}
}
#endif
period_size = buffer_size / audio_channels / sizeof(short);
audio_pts_step_base = 90000.0f * period_size / audio_sample_rate;
log_debug("audio_pts_step_base: %d\n", audio_pts_step_base);
ret = avcodec_fill_audio_frame(av_frame, audio_codec_ctx->channels, audio_codec_ctx->sample_fmt,
(const uint8_t*)samples, buffer_size, 0);
if (ret < 0) {
av_strerror(ret, errbuf, sizeof(errbuf));
log_error("error: avcodec_fill_audio_frame failed: %s\n", errbuf);
exit(EXIT_FAILURE);
}
}
// Create dir if it does not exist
int create_dir(const char *dir) {
struct stat st;
int err;
err = stat(dir, &st);
if (err == -1) {
if (errno == ENOENT) {
// create directory
if (mkdir(dir, 0755) == 0) { // success
log_info("created directory: ./%s\n", dir);
} else { // error
log_error("error creating directory ./%s: %s\n",
dir, strerror(errno));
return -1;
}
} else {
perror("stat directory");
return -1;
}
} else {
if (!S_ISDIR(st.st_mode)) {
log_error("error: ./%s is not a directory\n", dir);
return -1;
}
}
if (access(dir, R_OK) != 0) {
log_error("error: cannot access directory ./%s: %s\n",
dir, strerror(errno));
return -1;
}
return 0;
}
void *rec_thread_stop(int skip_cleanup) {
FILE *fsrc, *fdest;
int read_len;
uint8_t *copy_buf;
log_info("stop rec\n");
if (!skip_cleanup) {
copy_buf = malloc(BUFSIZ);
if (copy_buf == NULL) {
perror("malloc for copy_buf");
pthread_exit(0);
}
pthread_mutex_lock(&rec_write_mutex);
mpegts_close_stream(rec_format_ctx);
mpegts_destroy_context(rec_format_ctx);
pthread_mutex_unlock(&rec_write_mutex);
log_debug("copy ");
fsrc = fopen(recording_tmp_filepath, "r");
if (fsrc == NULL) {
log_error("error: failed to open %s: %s\n",
recording_tmp_filepath, strerror(errno));
}
fdest = fopen(recording_archive_filepath, "a");
if (fdest == NULL) {
log_error("error: failed to open %s: %s\n",
recording_archive_filepath, strerror(errno));
fclose(fsrc);
}
while (1) {
read_len = fread(copy_buf, 1, BUFSIZ, fsrc);
if (read_len > 0) {
fwrite(copy_buf, 1, read_len, fdest);
}
if (read_len != BUFSIZ) {
break;
}
}
if (feof(fsrc)) {
fclose(fsrc);
fclose(fdest);
} else {
log_error("error: rec_thread_stop: not an EOF?: %s\n", strerror(errno));
}
// Create a symlink
char symlink_dest_path[1024];
size_t rec_dir_len = strlen(rec_dir);
struct stat file_stat;
// If recording_archive_filepath starts with "rec/", then remove it
if (strncmp(recording_archive_filepath, rec_dir, rec_dir_len) == 0 &&
recording_archive_filepath[rec_dir_len] == '/') {
snprintf(symlink_dest_path, sizeof(symlink_dest_path),
recording_archive_filepath + rec_dir_len + 1);
} else if (recording_archive_filepath[0] == '/') { // absolute path
snprintf(symlink_dest_path, sizeof(symlink_dest_path),
recording_archive_filepath);
} else { // relative path
char cwd[1024];
if (getcwd(cwd, sizeof(cwd)) == NULL) {
log_error("error: failed to get current working directory: %s\n",
strerror(errno));
cwd[0] = '.';
cwd[1] = '.';
cwd[2] = '\0';
}
snprintf(symlink_dest_path, sizeof(symlink_dest_path),
"%s/%s", cwd, recording_archive_filepath);
}
log_debug("symlink(%s, %s)\n", symlink_dest_path, recording_filepath);
if (lstat(recording_filepath, &file_stat) == 0) { // file (symlink) exists
log_info("replacing existing symlink: %s\n", recording_filepath);
unlink(recording_filepath);
}
if (symlink(symlink_dest_path, recording_filepath) != 0) {
log_error("error: cannot create symlink from %s to %s: %s\n",
symlink_dest_path, recording_filepath, strerror(errno));
}
// unlink tmp file
log_debug("unlink");
unlink(recording_tmp_filepath);
state_set(state_dir, "last_rec", recording_filepath);
free(copy_buf);
}
is_recording = 0;
state_set(state_dir, "record", "false");
pthread_exit(0);
}
void flush_record() {
rec_thread_needs_flush = 1;
}
void stop_record() {
rec_thread_needs_exit = 1;
}
void check_record_duration() {
time_t now;
if (is_recording) {
now = time(NULL);
if (now - rec_start_time > flush_recording_seconds) {
flush_record();
}
}
}
void *rec_thread_start() {
time_t rawtime;
struct tm *timeinfo;
AVPacket av_pkt;
int wrote_packets;
int is_caught_up = 0;
int unique_number = 1;
int64_t rec_start_pts, rec_end_pts;
char state_buf[256];
EncodedPacket *enc_pkt;
int filename_decided = 0;
uint8_t *copy_buf;
FILE *fsrc, *fdest;
int read_len;
char *dest_dir;
int has_error;
has_error = 0;
copy_buf = malloc(BUFSIZ);
if (copy_buf == NULL) {
perror("malloc for copy_buf");
pthread_exit(0);
}
time(&rawtime);
timeinfo = localtime(&rawtime);
rec_start_time = time(NULL);
rec_start_pts = -1;
if (recording_dest_dir[0] != 0) {
dest_dir = recording_dest_dir;
} else {
dest_dir = rec_archive_dir;
}
if (recording_basename[0] != 0) { // basename is already decided
snprintf(recording_filepath, sizeof(recording_filepath),
"%s/%s", rec_dir, recording_basename);
snprintf(recording_archive_filepath, sizeof(recording_archive_filepath),
"%s/%s", dest_dir, recording_basename);
snprintf(recording_tmp_filepath, sizeof(recording_tmp_filepath),
"%s/%s", rec_tmp_dir, recording_basename);
filename_decided = 1;
} else {
strftime(recording_basename, sizeof(recording_basename), "%Y-%m-%d_%H-%M-%S", timeinfo);
snprintf(recording_filepath, sizeof(recording_filepath),
"%s/%s.ts", rec_dir, recording_basename);
if (access(recording_filepath, F_OK) != 0) { // filename is decided
sprintf(recording_basename + strlen(recording_basename), ".ts"); // add ".ts"
snprintf(recording_archive_filepath, sizeof(recording_archive_filepath),
"%s/%s", dest_dir, recording_basename);
snprintf(recording_tmp_filepath, sizeof(recording_tmp_filepath),
"%s/%s", rec_tmp_dir, recording_basename);
filename_decided = 1;
}
while (!filename_decided) {
unique_number++;
snprintf(recording_filepath, sizeof(recording_filepath),
"%s/%s-%d.ts", rec_dir, recording_basename, unique_number);
if (access(recording_filepath, F_OK) != 0) { // filename is decided
sprintf(recording_basename + strlen(recording_basename), "-%d.ts", unique_number);
snprintf(recording_archive_filepath, sizeof(recording_archive_filepath),
"%s/%s", dest_dir, recording_basename);
snprintf(recording_tmp_filepath, sizeof(recording_tmp_filepath),
"%s/%s", rec_tmp_dir, recording_basename);
filename_decided = 1;
}
}
}
// Remove existing file
if (unlink(recording_archive_filepath) == 0) {
log_info("removed existing file: %s\n", recording_archive_filepath);
}
pthread_mutex_lock(&rec_write_mutex);
rec_format_ctx = mpegts_create_context(&codec_settings);
mpegts_open_stream(rec_format_ctx, recording_tmp_filepath, 0);
is_recording = 1;
log_info("start rec to %s\n", recording_archive_filepath);
state_set(state_dir, "record", "true");
pthread_mutex_unlock(&rec_write_mutex);
int look_back_keyframes;
if (recording_look_back_keyframes != -1) {
look_back_keyframes = recording_look_back_keyframes;
} else {
look_back_keyframes = record_buffer_keyframes;
}
int start_keyframe_pointer;
if (!is_keyframe_pointers_filled) { // first cycle has not been finished
if (look_back_keyframes - 1 > current_keyframe_pointer) { // not enough pre-start buffer
start_keyframe_pointer = 0;
} else {
start_keyframe_pointer = current_keyframe_pointer - look_back_keyframes + 1;
}
} else { // at least one cycle has been passed
start_keyframe_pointer = current_keyframe_pointer - look_back_keyframes + 1;
}
// turn negative into positive
while (start_keyframe_pointer < 0) {
start_keyframe_pointer += record_buffer_keyframes;
}
rec_thread_frame = keyframe_pointers[start_keyframe_pointer];
enc_pkt = encoded_packets[rec_thread_frame];
rec_start_pts = enc_pkt->pts;
write_encoded_packets(REC_CHASE_PACKETS, rec_start_pts);
av_init_packet(&av_pkt);
while (!rec_thread_needs_exit) {
pthread_mutex_lock(&rec_mutex);
while (!rec_thread_needs_write) {
pthread_cond_wait(&rec_cond, &rec_mutex);
}
pthread_mutex_unlock(&rec_mutex);
if (rec_thread_frame != current_encoded_packet) {
wrote_packets = write_encoded_packets(REC_CHASE_PACKETS, rec_start_pts);
if (wrote_packets <= 2) {
if (!is_caught_up) {
log_debug("caught up");
is_caught_up = 1;
}
}
}
check_record_duration();
if (rec_thread_needs_flush) {
log_debug("F");
mpegts_close_stream_without_trailer(rec_format_ctx);
fsrc = fopen(recording_tmp_filepath, "r");
if (fsrc == NULL) {
log_error("error: failed to open %s: %s\n",
recording_tmp_filepath, strerror(errno));
has_error = 1;
break;
}
fdest = fopen(recording_archive_filepath, "a");
if (fdest == NULL) {
log_error("error: failed to open %s: %s\n",
recording_archive_filepath, strerror(errno));
has_error = 1;
break;
}
while (1) {
read_len = fread(copy_buf, 1, BUFSIZ, fsrc);
if (read_len > 0) {
fwrite(copy_buf, 1, read_len, fdest);
}
if (read_len != BUFSIZ) {
break;
}
}
if (feof(fsrc)) {
fclose(fsrc);
fclose(fdest);
} else {
log_error("error: rec_thread_start: not an EOF?: %s\n", strerror(errno));
}
mpegts_open_stream_without_header(rec_format_ctx, recording_tmp_filepath, 0);
rec_thread_needs_flush = 0;
rec_start_time = time(NULL);
}
rec_thread_needs_write = 0;
}
free(copy_buf);
av_free_packet(&av_pkt);
int prev_frame = rec_thread_frame - 1;
if (prev_frame == -1) {
prev_frame = encoded_packets_size - 1;
}
enc_pkt = encoded_packets[prev_frame];
rec_end_pts = enc_pkt->pts;
snprintf(state_buf, sizeof(state_buf), "duration_pts=%" PRId64 "\nduration_sec=%f\n",
rec_end_pts - rec_start_pts,
(rec_end_pts - rec_start_pts) / 90000.0f);
state_set(state_dir, recording_basename, state_buf);
return rec_thread_stop(has_error);
}
void start_record() {
if (is_recording) {
log_warn("recording is already started\n");
return;
}
if (is_disk_almost_full()) {
log_error("error: disk is almost full, recording not started\n");
return;
}
rec_thread_needs_exit = 0;
pthread_create(&rec_thread, NULL, rec_thread_start, NULL);
}
// set record_buffer_keyframes to newsize
static int set_record_buffer_keyframes(int newsize) {
int i;
void *result;
int malloc_size;
if (is_recording) {
log_error("error: recordbuf cannot be changed while recording\n");
return -1;
}
if (newsize < 1) {
log_error("error changing recordbuf to %d (must be >= 1)\n", newsize);
return -1;
}
if (newsize == record_buffer_keyframes) { // no change
log_debug("recordbuf does not change: current=%d new=%d\n",
record_buffer_keyframes, newsize);
return -1;
}
for (i = 0; i < encoded_packets_size; i++) {
EncodedPacket *packet = encoded_packets[i];
if (packet != NULL) {
av_freep(&packet->data);
free(packet);
}
}
// reset encoded_packets
int audio_fps = audio_sample_rate / 1 / period_size;
int new_encoded_packets_size = (video_fps + 1) * newsize * 2 +
(audio_fps + 1) * newsize * 2 + 100;
malloc_size = sizeof(EncodedPacket *) * new_encoded_packets_size;
result = realloc(encoded_packets, malloc_size);
int success = 0;
if (result == NULL) {
log_error("error: failed to set encoded_packets to %d while trying to allocate "
"%d bytes of memory\n", newsize, malloc_size);
// fallback to old size
malloc_size = sizeof(EncodedPacket *) * encoded_packets_size;
} else {
encoded_packets = result;
encoded_packets_size = new_encoded_packets_size;
success = 1;
}
memset(encoded_packets, 0, malloc_size);
if (success) {
// reset keyframe_pointers
malloc_size = sizeof(int) * newsize;
result = realloc(keyframe_pointers, malloc_size);
if (result == NULL) {
log_error("error: failed to set keyframe_pointers to %d while trying to allocate "
"%d bytes of memory\n", newsize, malloc_size);
// fallback to old size
malloc_size = sizeof(int) * record_buffer_keyframes;
} else {
keyframe_pointers = result;
record_buffer_keyframes = newsize;
}
} else {
malloc_size = sizeof(int) * record_buffer_keyframes;
}
memset(keyframe_pointers, 0, malloc_size);
current_encoded_packet = -1;
current_keyframe_pointer = -1;
is_keyframe_pointers_filled = 0;
return 0;
}
// parse the contents of hooks/start_record
static void parse_start_record_file(char *full_filename) {
char buf[1024];
recording_basename[0] = 0; // empties the basename used for this recording
recording_dest_dir[0] = 0; // empties the directory the result file will be put in
recording_look_back_keyframes = -1;
FILE *fp = fopen(full_filename, "r");
if (fp != NULL) {
while (fgets(buf, sizeof(buf), fp)) {
char *sep_p = strchr(buf, '='); // separator (name=value)
if (sep_p == NULL) { // we couldn't find '='
log_error("error parsing line in %s: %s\n",
full_filename, buf);
continue;
}
if (strncmp(buf, "recordbuf", sep_p - buf) == 0) {
// read a number
char *end;
int value = strtol(sep_p + 1, &end, 10);
if (end == sep_p + 1 || errno == ERANGE) { // parse error
log_error("error parsing line in %s: %s\n",
full_filename, buf);
continue;
}
if (value > record_buffer_keyframes) {
log_error("error: per-recording recordbuf (%d) cannot be greater than "
"global recordbuf (%d); using %d\n"
"hint: try increasing global recordbuf with \"--recordbuf %d\" or "
"\"echo %d > hooks/set_recordbuf\"\n",
value, record_buffer_keyframes, record_buffer_keyframes,
value, value);
continue;
}
recording_look_back_keyframes = value;
log_info("using recordbuf=%d for this recording\n", recording_look_back_keyframes);
} else if (strncmp(buf, "dir", sep_p - buf) == 0) { // directory
size_t len = strcspn(sep_p + 1, "\r\n");
if (len > sizeof(recording_dest_dir) - 1) {
len = sizeof(recording_dest_dir) - 1;
}
strncpy(recording_dest_dir, sep_p + 1, len);
recording_dest_dir[len] = '\0';
// Create the directory if it does not exist
create_dir(recording_dest_dir);
} else if (strncmp(buf, "filename", sep_p - buf) == 0) { // basename
size_t len = strcspn(sep_p + 1, "\r\n");
if (len > sizeof(recording_basename) - 1) {
len = sizeof(recording_basename) - 1;
}
strncpy(recording_basename, sep_p + 1, len);
recording_basename[len] = '\0';
} else {
log_error("failed to parse line in %s: %s\n",
full_filename, buf);
}
}
fclose(fp);
}
}
/**
* Reads a file and returns the contents.
* file_contents argument will be set to the pointer to the
* newly-allocated memory block that holds the NULL-terminated contents.
* It must be freed by caller after use.
*/
static int read_file(const char *filepath, char **file_contents, size_t *file_contents_len) {
FILE *fp;
long filesize;
char *buf;
size_t result;
fp = fopen(filepath, "rb");
if (fp == NULL) {
return -1;
}
// obtain file size
fseek(fp, 0L, SEEK_END);
filesize = ftell(fp);
fseek(fp, 0L, SEEK_SET);
buf = malloc(filesize + 1);
if (buf == NULL) {
log_error("read_file: error reading %s: failed to allocate memory (%d bytes)", filepath, filesize + 1);
fclose(fp);
return -1;
}
result = fread(buf, 1, filesize, fp);
if (result != filesize) {
log_error("read_file: error reading %s", filepath);
fclose(fp);
free(buf);
return -1;
}
fclose(fp);
buf[filesize] = '\0';
*file_contents = buf;
*file_contents_len = filesize + 1;
return 0;
}
void on_file_create(char *filename, char *content) {
if (strcmp(filename, "start_record") == 0) {
char buf[256];
// parse the contents of hooks/start_record
snprintf(buf, sizeof(buf), "%s/%s", hooks_dir, filename);
parse_start_record_file(buf);
start_record();
} else if (strcmp(filename, "stop_record") == 0) {
stop_record();
} else if (strcmp(filename, "mute") == 0) {
mute_audio();
} else if (strcmp(filename, "unmute") == 0) {
unmute_audio();
} else if (strcmp(filename, "wbred") == 0) {
char buf[256];
snprintf(buf, sizeof(buf), "%s/%s", hooks_dir, filename);
char *file_buf;
size_t file_buf_len;
if (read_file(buf, &file_buf, &file_buf_len) == 0) {
if (file_buf != NULL) {
// read a number
char *end;
double value = strtod(file_buf, &end);
if (end == file_buf || errno == ERANGE) { // parse error
log_error("error parsing file %s\n", buf);
} else { // parse ok
awb_red_gain = value;
if (camera_set_custom_awb_gains() == 0) {
log_info("changed red gain to %.2f\n", awb_red_gain);
} else {
log_error("error: failed to set wbred\n");
}
}
free(file_buf);
}
}
} else if (strcmp(filename, "wbblue") == 0) {
char buf[256];
snprintf(buf, sizeof(buf), "%s/%s", hooks_dir, filename);
char *file_buf;
size_t file_buf_len;
if (read_file(buf, &file_buf, &file_buf_len) == 0) {
if (file_buf != NULL) {
// read a number
char *end;
double value = strtod(file_buf, &end);
if (end == file_buf || errno == ERANGE) { // parse error
log_error("error parsing file %s\n", buf);
} else { // parse ok
awb_blue_gain = value;
if (camera_set_custom_awb_gains() == 0) {
log_info("changed blue gain to %.2f\n", awb_blue_gain);
} else {
log_error("error: failed to set wbblue\n");
}
}
free(file_buf);
}
}
} else if (strncmp(filename, "wb_", 3) == 0) { // e.g. wb_sun
char *wb_mode = filename + 3;
int matched = 0;
int i;
for (i = 0; i < sizeof(white_balance_options) / sizeof(white_balance_option); i++) {
if (strcmp(white_balance_options[i].name, wb_mode) == 0) {
strncpy(white_balance, wb_mode, sizeof(white_balance) - 1);
white_balance[sizeof(white_balance) - 1] = '\0';
matched = 1;
break;
}
}
if (matched) {
if (camera_set_white_balance(white_balance) == 0) {
log_info("changed the white balance to %s\n", white_balance);
} else {
log_error("error: failed to set the white balance to %s\n", white_balance);
}
} else {
log_error("hook error: invalid white balance: %s\n", wb_mode);
log_error("(valid values: ");
int size = sizeof(white_balance_options) / sizeof(white_balance_option);
for (i = 0; i < size; i++) {
log_error("%s", white_balance_options[i].name);
if (i + 1 == size) { // the last item
log_error(")\n");
} else {
log_error("/");
}
}
}
} else if (strncmp(filename, "ex_", 3) == 0) { // e.g. ex_night
char *ex_mode = filename + 3;
int matched = 0;
int i;
if (!is_vfr_enabled) {
log_warn("warn: Use --vfr or --ex in order to ex_* hook to properly take effect\n");
}
for (i = 0; i < sizeof(exposure_control_options) / sizeof(exposure_control_option); i++) {
if (strcmp(exposure_control_options[i].name, ex_mode) == 0) {
strncpy(exposure_control, ex_mode, sizeof(exposure_control) - 1);
exposure_control[sizeof(exposure_control) - 1] = '\0';
matched = 1;
break;
}
}
if (matched) {
if (camera_set_exposure_control(exposure_control) == 0) {
log_info("changed the exposure control to %s\n", exposure_control);
} else {
log_error("error: failed to set the exposure control to %s\n", exposure_control);
}
} else {
log_error("hook error: invalid exposure control: %s\n", ex_mode);
log_error("(valid values: ");
int size = sizeof(exposure_control_options) / sizeof(exposure_control_option);
for (i = 0; i < size; i++) {
log_error("%s", exposure_control_options[i].name);
if (i + 1 == size) { // the last item
log_error(")\n");
} else {
log_error("/");
}
}
}
} else if (strcmp(filename, "set_recordbuf") == 0) { // set global recordbuf
char buf[256];
snprintf(buf, sizeof(buf), "%s/%s", hooks_dir, filename);
char *file_buf;
size_t file_buf_len;
if (read_file(buf, &file_buf, &file_buf_len) == 0) {
if (file_buf != NULL) {
// read a number
char *end;
int value = strtol(file_buf, &end, 10);
if (end == file_buf || errno == ERANGE) { // parse error
log_error("error parsing file %s\n", buf);
} else { // parse ok
if (set_record_buffer_keyframes(value) == 0) {
log_info("recordbuf set to %d; existing record buffer cleared\n", value);
}
}
free(file_buf);
}
}
} else if (strcmp(filename, "subtitle") == 0) {
// The followings are default values for the subtitle
char line[1024];
char text[1024];
size_t text_len = 0;
char font_name[128] = { 0x00 };
long face_index = 0;
char font_file[256] = { 0x00 };
int color = 0xffffff;
int stroke_color = 0x000000;
float font_points = 28.0f;
int font_dpi = 96;
float stroke_width = 1.0f;
int letter_spacing = 0;
float line_height_multiply = 1.0f;
float tab_scale = 1.0f;
int abspos_x = 0;
int abspos_y = 0;
float duration = 7.0f;
int is_abspos_specified = 0;
LAYOUT_ALIGN layout_align = LAYOUT_ALIGN_BOTTOM | LAYOUT_ALIGN_CENTER;
TEXT_ALIGN text_align = TEXT_ALIGN_CENTER;
int horizontal_margin = 0;
int vertical_margin = 35;
int in_preview = 1;
int in_video = 1;
char filepath[256];
snprintf(filepath, sizeof(filepath), "%s/%s", hooks_dir, filename);
FILE *fp;
fp = fopen(filepath, "r");
if (fp == NULL) {
log_error("subtitle error: cannot open file: %s\n", filepath);
} else {
// read key=value lines
while (fgets(line, sizeof(line), fp)) {
// remove newline at the end of the line
size_t line_len = strlen(line);
if (line[line_len-1] == '\n') {
line[line_len-1] = '\0';
line_len--;
}
if (line_len == 0) { // blank line
continue;
}
if (line[0] == '#') { // comment line
continue;
}
char *delimiter_p = strchr(line, '=');
if (delimiter_p != NULL) {
int key_len = delimiter_p - line;
if (strncmp(line, "text=", key_len+1) == 0) {
text_len = line_len - 5; // 5 == strlen("text") + 1
if (text_len >= sizeof(text) - 1) {
text_len = sizeof(text) - 1;
}
strncpy(text, delimiter_p + 1, text_len);
text[text_len] = '\0';
} else if (strncmp(line, "font_name=", key_len+1) == 0) {
strncpy(font_name, delimiter_p + 1, sizeof(font_name) - 1);
font_name[sizeof(font_name) - 1] = '\0';
} else if (strncmp(line, "font_file=", key_len+1) == 0) {
strncpy(font_file, delimiter_p + 1, sizeof(font_file) - 1);
font_file[sizeof(font_file) - 1] = '\0';
} else if (strncmp(line, "face_index=", key_len+1) == 0) {
char *end;
long value = strtol(delimiter_p+1, &end, 10);
if (end == delimiter_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid face_index: %s\n", delimiter_p+1);
return;
}
face_index = value;
} else if (strncmp(line, "pt=", key_len+1) == 0) {
char *end;
double value = strtod(delimiter_p+1, &end);
if (end == delimiter_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid pt: %s\n", delimiter_p+1);
return;
}
font_points = value;
} else if (strncmp(line, "dpi=", key_len+1) == 0) {
char *end;
long value = strtol(delimiter_p+1, &end, 10);
if (end == delimiter_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid dpi: %s\n", delimiter_p+1);
return;
}
font_dpi = value;
} else if (strncmp(line, "horizontal_margin=", key_len+1) == 0) {
char *end;
long value = strtol(delimiter_p+1, &end, 10);
if (end == delimiter_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid horizontal_margin: %s\n", delimiter_p+1);
return;
}
horizontal_margin = value;
} else if (strncmp(line, "vertical_margin=", key_len+1) == 0) {
char *end;
long value = strtol(delimiter_p+1, &end, 10);
if (end == delimiter_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid vertical_margin: %s\n", delimiter_p+1);
return;
}
vertical_margin = value;
} else if (strncmp(line, "duration=", key_len+1) == 0) {
char *end;
double value = strtod(delimiter_p+1, &end);
if (end == delimiter_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid duration: %s\n", delimiter_p+1);
return;
}
duration = value;
} else if (strncmp(line, "color=", key_len+1) == 0) {
char *end;
long value = strtol(delimiter_p+1, &end, 16);
if (end == delimiter_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid color: %s\n", delimiter_p+1);
return;
}
if (value < 0) {
log_error("subtitle error: invalid color: %d (must be >= 0)\n", value);
return;
}
color = value;
} else if (strncmp(line, "stroke_color=", key_len+1) == 0) {
char *end;
long value = strtol(delimiter_p+1, &end, 16);
if (end == delimiter_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid stroke_color: %s\n", delimiter_p+1);
return;
}
if (value < 0) {
log_error("subtitle error: invalid stroke_color: %d (must be >= 0)\n", value);
return;
}
stroke_color = value;
} else if (strncmp(line, "stroke_width=", key_len+1) == 0) {
char *end;
double value = strtod(delimiter_p+1, &end);
if (end == delimiter_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid stroke_width: %s\n", delimiter_p+1);
return;
}
stroke_width = value;
} else if (strncmp(line, "letter_spacing=", key_len+1) == 0) {
char *end;
long value = strtol(delimiter_p+1, &end, 10);
if (end == delimiter_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid letter_spacing: %s\n", delimiter_p+1);
return;
}
letter_spacing = value;
} else if (strncmp(line, "line_height=", key_len+1) == 0) {
char *end;
double value = strtod(delimiter_p+1, &end);
if (end == delimiter_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid line_height: %s\n", delimiter_p+1);
return;
}
line_height_multiply = value;
} else if (strncmp(line, "tab_scale=", key_len+1) == 0) {
char *end;
double value = strtod(delimiter_p+1, &end);
if (end == delimiter_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid tab_scale: %s\n", delimiter_p+1);
return;
}
tab_scale = value;
} else if (strncmp(line, "pos=", key_len+1) == 0) { // absolute position
char *comma_p = strchr(delimiter_p+1, ',');
if (comma_p == NULL) {
log_error("subtitle error: invalid pos format: %s (should be <x>,<y>)\n", delimiter_p+1);
return;
}
char *end;
long value = strtol(delimiter_p+1, &end, 10);
if (end == delimiter_p+1 || end != comma_p || errno == ERANGE) { // parse error
log_error("subtitle error: invalid pos x: %s\n", delimiter_p+1);
return;
}
abspos_x = value;
value = strtol(comma_p+1, &end, 10);
if (end == comma_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid pos y: %s\n", comma_p+1);
return;
}
abspos_y = value;
is_abspos_specified = 1;
} else if (strncmp(line, "layout_align=", key_len+1) == 0) { // layout align
char *comma_p;
char *search_p = delimiter_p + 1;
int param_len;
layout_align = 0;
while (1) {
comma_p = strchr(search_p, ',');
if (comma_p == NULL) {
param_len = line + line_len - search_p;
} else {
param_len = comma_p - search_p;
}
if (strncmp(search_p, "top", param_len) == 0) {
layout_align |= LAYOUT_ALIGN_TOP;
} else if (strncmp(search_p, "middle", param_len) == 0) {
layout_align |= LAYOUT_ALIGN_MIDDLE;
} else if (strncmp(search_p, "bottom", param_len) == 0) {
layout_align |= LAYOUT_ALIGN_BOTTOM;
} else if (strncmp(search_p, "left", param_len) == 0) {
layout_align |= LAYOUT_ALIGN_LEFT;
} else if (strncmp(search_p, "center", param_len) == 0) {
layout_align |= LAYOUT_ALIGN_CENTER;
} else if (strncmp(search_p, "right", param_len) == 0) {
layout_align |= LAYOUT_ALIGN_RIGHT;
} else {
log_error("subtitle error: invalid layout_align found at: %s\n", search_p);
return;
}
if (comma_p == NULL || line + line_len - 1 - comma_p <= 0) { // no remaining chars
break;
}
search_p = comma_p + 1;
}
} else if (strncmp(line, "text_align=", key_len+1) == 0) { // text align
char *comma_p;
char *search_p = delimiter_p + 1;
int param_len;
text_align = 0;
while (1) {
comma_p = strchr(search_p, ',');
if (comma_p == NULL) {
param_len = line + line_len - search_p;
} else {
param_len = comma_p - search_p;
}
if (strncmp(search_p, "left", param_len) == 0) {
text_align |= TEXT_ALIGN_LEFT;
} else if (strncmp(search_p, "center", param_len) == 0) {
text_align |= TEXT_ALIGN_CENTER;
} else if (strncmp(search_p, "right", param_len) == 0) {
text_align |= TEXT_ALIGN_RIGHT;
} else {
log_error("subtitle error: invalid text_align found at: %s\n", search_p);
return;
}
if (comma_p == NULL || line + line_len - 1 - comma_p <= 0) { // no remaining chars
break;
}
search_p = comma_p + 1;
}
} else if (strncmp(line, "in_preview=", key_len+1) == 0) {
char *end;
double value = strtod(delimiter_p+1, &end);
if (end == delimiter_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid in_preview: %s\n", delimiter_p+1);
return;
}
in_preview = (value != 0);
} else if (strncmp(line, "in_video=", key_len+1) == 0) {
char *end;
double value = strtod(delimiter_p+1, &end);
if (end == delimiter_p+1 || *end != '\0' || errno == ERANGE) { // parse error
log_error("subtitle error: invalid in_video: %s\n", delimiter_p+1);
return;
}
in_video = (value != 0);
} else {
log_error("subtitle error: cannot parse line: %s\n", line);
}
} else {
log_error("subtitle error: cannot find delimiter: %s\n", line);
}
}
if (text_len > 0) {
// replace literal \n with newline
int i;
int is_escape_active = 0;
int omitted_bytes = 0;
char replaced_text[1024];
char *replaced_text_ptr = replaced_text;
for (i = 0; i < text_len; i++) {
if (text[i] == '\\') { // escape char
if (is_escape_active) { // double escape char
*replaced_text_ptr++ = '\\';
} else { // start of escape sequence
omitted_bytes++;
}
is_escape_active = !is_escape_active;
} else if (text[i] == 'n') {
if (is_escape_active) { // n after escape char
*replaced_text_ptr = '\n';
is_escape_active = 0;
} else { // n after non-escape char
*replaced_text_ptr = text[i];
}
replaced_text_ptr++;
} else if (text[i] == 't') {
if (is_escape_active) { // t after escape char
*replaced_text_ptr = '\t';
is_escape_active = 0;
} else { // t after non-escape char
*replaced_text_ptr = text[i];
}
replaced_text_ptr++;
} else {
if (is_escape_active) {
is_escape_active = 0;
}
*replaced_text_ptr++ = text[i];
}
}
text_len -= omitted_bytes;
replaced_text[text_len] = '\0';
if (font_file[0] != 0x00) {
subtitle_init(font_file, face_index, font_points, font_dpi);
} else {
subtitle_init_with_font_name(font_name, font_points, font_dpi);
}
subtitle_set_color(color);
subtitle_set_stroke_color(stroke_color);
subtitle_set_stroke_width(stroke_width);
subtitle_set_visibility(in_preview, in_video);
subtitle_set_letter_spacing(letter_spacing);
subtitle_set_line_height_multiply(line_height_multiply);
subtitle_set_tab_scale(tab_scale);
if (is_abspos_specified) {
subtitle_set_position(abspos_x, abspos_y);
} else {
subtitle_set_layout(layout_align,
horizontal_margin, vertical_margin);
}
subtitle_set_align(text_align);
// show subtitle for 7 seconds
subtitle_show(replaced_text, text_len, duration);
} else {
subtitle_clear();
}
fclose(fp);
}
} else {
log_error("error: invalid hook: %s\n", filename);
}
}
// Send audio packet to node-rtsp-rtmp-server
static void send_audio_start_time() {
if (is_rtspout_enabled) {
int payload_size = 9;
int64_t logical_start_time = audio_start_time;
uint8_t sendbuf[12] = {
// payload size
(payload_size >> 16) & 0xff,
(payload_size >> 8) & 0xff,
payload_size & 0xff,
// packet type (0x01 == audio start time)
0x01,
// payload
logical_start_time >> 56,
(logical_start_time >> 48) & 0xff,
(logical_start_time >> 40) & 0xff,
(logical_start_time >> 32) & 0xff,
(logical_start_time >> 24) & 0xff,
(logical_start_time >> 16) & 0xff,
(logical_start_time >> 8) & 0xff,
logical_start_time & 0xff,
};
if (send(sockfd_audio_control, sendbuf, 12, 0) == -1) {
perror("send audio start time");
exit(EXIT_FAILURE);
}
} // if (is_rtspout_enabled)
}
// Send video packet to node-rtsp-rtmp-server
static void send_video_start_time() {
if (is_rtspout_enabled) {
int payload_size = 11;
uint8_t sendbuf[14] = {
// payload size
(payload_size >> 16) & 0xff,
(payload_size >> 8) & 0xff,
payload_size & 0xff,
// payload
// packet type
0x00,
// stream name
'l', 'i', 'v', 'e', '/', 'p', 'i', 'c', 'a', 'm',
};
if (send(sockfd_video_control, sendbuf, sizeof(sendbuf), 0) == -1) {
perror("send video start time");
exit(EXIT_FAILURE);
}
} // if (is_rtspout_enabled)
}
static void setup_socks() {
if (is_rtspout_enabled) {
struct sockaddr_un remote_video;
struct sockaddr_un remote_audio;
int len;
struct sockaddr_un remote_video_control;
struct sockaddr_un remote_audio_control;
log_debug("connecting to UNIX domain sockets\n");
// Setup sockfd_video
if ((sockfd_video = socket(AF_UNIX, SOCK_STREAM, 0)) == -1) {
perror("socket video");
exit(EXIT_FAILURE);
}
remote_video.sun_family = AF_UNIX;
strcpy(remote_video.sun_path, rtsp_video_data_path);
len = strlen(remote_video.sun_path) + sizeof(remote_video.sun_family);
if (connect(sockfd_video, (struct sockaddr *)&remote_video, len) == -1) {
log_error("error: failed to connect to video data socket (%s): %s\n"
"perhaps RTSP server (https://github.com/iizukanao/node-rtsp-rtmp-server) is not running?\n",
rtsp_video_data_path, strerror(errno));
exit(EXIT_FAILURE);
}
// Setup sockfd_video_control
if ((sockfd_video_control = socket(AF_UNIX, SOCK_STREAM, 0)) == -1) {
perror("socket video_control");
exit(EXIT_FAILURE);
}
remote_video_control.sun_family = AF_UNIX;
strcpy(remote_video_control.sun_path, rtsp_video_control_path);
len = strlen(remote_video_control.sun_path) + sizeof(remote_video_control.sun_family);
if (connect(sockfd_video_control, (struct sockaddr *)&remote_video_control, len) == -1) {
log_error("error: failed to connect to video control socket (%s): %s\n"
"perhaps RTSP server (https://github.com/iizukanao/node-rtsp-rtmp-server) is not running?\n",
rtsp_video_control_path, strerror(errno));
exit(EXIT_FAILURE);
}
// Setup sockfd_audio
if ((sockfd_audio = socket(AF_UNIX, SOCK_STREAM, 0)) == -1) {
perror("socket audio");
exit(EXIT_FAILURE);
}
remote_audio.sun_family = AF_UNIX;
strcpy(remote_audio.sun_path, rtsp_audio_data_path);
len = strlen(remote_audio.sun_path) + sizeof(remote_audio.sun_family);
if (connect(sockfd_audio, (struct sockaddr *)&remote_audio, len) == -1) {
log_error("error: failed to connect to audio data socket (%s): %s\n"
"perhaps RTSP server (https://github.com/iizukanao/node-rtsp-rtmp-server) is not running?\n",
rtsp_audio_data_path, strerror(errno));
exit(EXIT_FAILURE);
}
// Setup sockfd_audio_control
if ((sockfd_audio_control = socket(AF_UNIX, SOCK_STREAM, 0)) == -1) {
perror("socket audio_control");
exit(EXIT_FAILURE);
}
remote_audio_control.sun_family = AF_UNIX;
strcpy(remote_audio_control.sun_path, rtsp_audio_control_path);
len = strlen(remote_audio_control.sun_path) + sizeof(remote_audio_control.sun_family);
if (connect(sockfd_audio_control, (struct sockaddr *)&remote_audio_control, len) == -1) {
log_error("error: failed to connect to audio control socket (%s): %s\n"
"perhaps RTSP server (https://github.com/iizukanao/node-rtsp-rtmp-server) is not running?\n",
rtsp_audio_control_path, strerror(errno));
exit(EXIT_FAILURE);
}
} // if (is_rtspout_enabled)
}
static void teardown_socks() {
if (is_rtspout_enabled) {
close(sockfd_video);
close(sockfd_video_control);
close(sockfd_audio);
close(sockfd_audio_control);
}
}
static int64_t get_next_audio_pts() {
int64_t pts;
audio_frame_count++;
// We use audio timing as the base clock,
// so we do not modify PTS here.
pts = audio_current_pts + audio_pts_step_base;
audio_current_pts = pts;
return pts;
}
// Return next video PTS for variable frame rate
static int64_t get_next_video_pts_vfr() {
video_frame_count++;
if (time_for_last_pts == 0) {
video_current_pts = 0;
} else {
struct timespec ts;
clock_gettime(CLOCK_MONOTONIC, &ts);
video_current_pts = last_pts
+ (ts.tv_sec * INT64_C(1000000000) + ts.tv_nsec - time_for_last_pts) // diff_time
* .00009f; // nanoseconds to PTS
}
return video_current_pts;
}
// Return next video PTS for constant frame rate
static int64_t get_next_video_pts_cfr() {
int64_t pts;
video_frame_count++;
int pts_diff = audio_current_pts - video_current_pts - video_pts_step;
int tolerance = (video_pts_step + audio_pts_step_base) * 2;
if (pts_diff >= PTS_DIFF_TOO_LARGE) {
// video PTS is too slow
log_debug("vR%d", pts_diff);
pts = audio_current_pts;
} else if (pts_diff >= tolerance) {
if (pts_mode != PTS_SPEED_UP) {
// speed up video PTS
speed_up_count++;
pts_mode = PTS_SPEED_UP;
log_debug("vSPEED_UP(%d)", pts_diff);
}
// Catch up with audio PTS if the delay is too large.
pts = video_current_pts + video_pts_step + 150;
} else if (pts_diff <= -tolerance) {
if (pts_mode != PTS_SPEED_DOWN) {
// speed down video PTS
pts_mode = PTS_SPEED_DOWN;
speed_down_count++;
log_debug("vSPEED_DOWN(%d)", pts_diff);
}
pts = video_current_pts + video_pts_step - 150;
} else {
pts = video_current_pts + video_pts_step;
if (pts_diff < 2000 && pts_diff > -2000) {
if (pts_mode != PTS_SPEED_NORMAL) {
// video PTS has caught up with audio PTS
log_debug("vNORMAL");
pts_mode = PTS_SPEED_NORMAL;
}
} else {
if (pts_mode == PTS_SPEED_UP) {
pts += 150;
} else if (pts_mode == PTS_SPEED_DOWN) {
pts -= 150;
}
}
}
video_current_pts = pts;
return pts;
}
// Return next PTS for video stream
static int64_t get_next_video_pts() {
if (is_vfr_enabled) { // variable frame rate
return get_next_video_pts_vfr();
} else { // constant frame rate
return get_next_video_pts_cfr();
}
}
static int64_t get_next_audio_write_time() {
if (audio_frame_count == 0) {
return LLONG_MIN;
}
return audio_start_time + audio_frame_count * 1000000000.0f / ((float)audio_sample_rate / (float)period_size);
}
static void print_audio_timing() {
struct timespec ts;
clock_gettime(CLOCK_MONOTONIC, &ts);
int64_t cur_time = ts.tv_sec * INT64_C(1000000000) + ts.tv_nsec;
int64_t video_pts = video_current_pts;
int64_t audio_pts = audio_current_pts;
int64_t avdiff = audio_pts - video_pts;
// The following equation causes int64 overflow:
// (cur_time - audio_start_time) * INT64_C(90000) / INT64_C(1000000000);
int64_t clock_pts = (cur_time - audio_start_time) * .00009f;
log_debug(" a-v=%lld c-a=%lld u=%d d=%d pts=%" PRId64,
avdiff, clock_pts - audio_pts, speed_up_count, speed_down_count, last_pts);
}
static void send_audio_frame(uint8_t *databuf, int databuflen, int64_t pts) {
if (is_rtspout_enabled) {
int payload_size = databuflen + 7; // +1(packet type) +6(pts)
int total_size = payload_size + 3; // more 3 bytes for payload length
uint8_t *sendbuf = malloc(total_size);
if (sendbuf == NULL) {
log_error("error: cannot allocate memory for audio sendbuf: size=%d", total_size);
return;
}
// payload header
sendbuf[0] = (payload_size >> 16) & 0xff;
sendbuf[1] = (payload_size >> 8) & 0xff;
sendbuf[2] = payload_size & 0xff;
// payload
sendbuf[3] = 0x03; // packet type (0x03 == audio data)
sendbuf[4] = (pts >> 40) & 0xff;
sendbuf[5] = (pts >> 32) & 0xff;
sendbuf[6] = (pts >> 24) & 0xff;
sendbuf[7] = (pts >> 16) & 0xff;
sendbuf[8] = (pts >> 8) & 0xff;
sendbuf[9] = pts & 0xff;
memcpy(sendbuf + 10, databuf, databuflen);
if (send(sockfd_audio, sendbuf, total_size, 0) == -1) {
perror("send audio data");
}
free(sendbuf);
} // if (is_rtspout_enabled)
}
static void send_video_frame(uint8_t *databuf, int databuflen, int64_t pts) {
if (is_rtspout_enabled) {
int payload_size = databuflen + 7; // +1(packet type) +6(pts)
int total_size = payload_size + 3; // more 3 bytes for payload length
uint8_t *sendbuf = malloc(total_size);
if (sendbuf == NULL) {
log_error("error: cannot allocate memory for video sendbuf: size=%d", total_size);
return;
}
// payload header
sendbuf[0] = (payload_size >> 16) & 0xff;
sendbuf[1] = (payload_size >> 8) & 0xff;
sendbuf[2] = payload_size & 0xff;
// payload
sendbuf[3] = 0x02; // packet type (0x02 == video data)
sendbuf[4] = (pts >> 40) & 0xff;
sendbuf[5] = (pts >> 32) & 0xff;
sendbuf[6] = (pts >> 24) & 0xff;
sendbuf[7] = (pts >> 16) & 0xff;
sendbuf[8] = (pts >> 8) & 0xff;
sendbuf[9] = pts & 0xff;
memcpy(sendbuf + 10, databuf, databuflen);
if (send(sockfd_video, sendbuf, total_size, 0) == -1) {
perror("send video data");
}
free(sendbuf);
} // if (is_rtspout_enabled)
}
// send keyframe (nal_unit_type 5)
static int send_keyframe(uint8_t *data, size_t data_len, int consume_time) {
uint8_t *buf, *ptr;
int total_size, ret, i;
AVPacket pkt;
int64_t pts;
total_size = access_unit_delimiter_length + codec_config_total_size + data_len;
ptr = buf = av_malloc(total_size);
if (buf == NULL) {
log_error("error: send_keyframe: cannot allocate memory for buf (%d bytes)\n", total_size);
exit(EXIT_FAILURE);
}
// One entire access unit should be passed to av_write_frame().
// If access unit delimiter (AUD) is not present on top of access unit,
// libavformat/mpegtsenc.c automatically inserts AUD.
// Improperly inserted AUD makes whole video unplayable on QuickTime.
// Although VLC can play those files.
// One access unit should contain exactly one video frame (primary coded picture).
// See spec p.5 "3.1 access unit"
// access unit delimiter (nal_unit_type 9)
memcpy(ptr, access_unit_delimiter, access_unit_delimiter_length);
ptr += access_unit_delimiter_length;
// codec configs (nal_unit_type 7 and 8)
for (i = 0; i < n_codec_configs; i++) {
memcpy(ptr, codec_configs[i], codec_config_sizes[i]);
ptr += codec_config_sizes[i];
}
// I frame (nal_unit_type 5)
memcpy(ptr, data, data_len);
av_init_packet(&pkt);
pkt.stream_index = hls->format_ctx->streams[0]->index;
pkt.flags |= AV_PKT_FLAG_KEY;
pkt.data = buf;
pkt.size = total_size;
if (consume_time) {
pts = get_next_video_pts();
} else {
pts = video_current_pts;
}
#if ENABLE_AUTO_GOP_SIZE_CONTROL_FOR_VFR
if (is_vfr_enabled) {
int64_t pts_between_keyframes = pts - last_keyframe_pts;
if (pts_between_keyframes < 80000) { // < .89 seconds
// Frame rate is running faster than we thought
int ideal_video_gop_size = (frames_since_last_keyframe + 1)
* 90000.0f / pts_between_keyframes;
if (ideal_video_gop_size > video_gop_size) {
video_gop_size = ideal_video_gop_size;
log_debug("increase gop_size to %d ", ideal_video_gop_size);
set_gop_size(video_gop_size);
}
}
last_keyframe_pts = pts;
frames_since_last_keyframe = 0;
}
#endif
send_video_frame(data, data_len, pts);
#if ENABLE_PTS_WRAP_AROUND
pts = pts % PTS_MODULO;
#endif
last_pts = pts;
if (is_vfr_enabled) {
struct timespec ts;
clock_gettime(CLOCK_MONOTONIC, &ts);
time_for_last_pts = ts.tv_sec * INT64_C(1000000000) + ts.tv_nsec;
}
// PTS (presentation time stamp): Timestamp when a decoder should play this frame
// DTS (decoding time stamp): Timestamp when a decoder should decode this frame
// DTS should be smaller than or equal to PTS.
pkt.pts = pkt.dts = pts;
uint8_t *copied_data = av_malloc(total_size);
memcpy(copied_data, buf, total_size);
pthread_mutex_lock(&rec_write_mutex);
add_encoded_packet(pts, copied_data, total_size, pkt.stream_index, pkt.flags);
mark_keyframe_packet();
pthread_mutex_unlock(&rec_write_mutex);
if (is_recording) {
pthread_mutex_lock(&rec_mutex);
rec_thread_needs_write = 1;
pthread_cond_signal(&rec_cond);
pthread_mutex_unlock(&rec_mutex);
}
if (is_tcpout_enabled) {
pthread_mutex_lock(&tcp_mutex);
av_write_frame(tcp_ctx, &pkt);
pthread_mutex_unlock(&tcp_mutex);
}
if (is_hlsout_enabled) {
pthread_mutex_lock(&mutex_writing);
int split;
if (video_frame_count == 1) {
split = 0;
} else {
split = 1;
}
ret = hls_write_packet(hls, &pkt, split);
pthread_mutex_unlock(&mutex_writing);
if (ret < 0) {
av_strerror(ret, errbuf, sizeof(errbuf));
log_error("keyframe write error (hls): %s\n", errbuf);
log_error("please check if the disk is full\n");
}
}
free(buf);
av_free_packet(&pkt);
return ret;
}
// send P frame (nal_unit_type 1)
static int send_pframe(uint8_t *data, size_t data_len, int consume_time) {
uint8_t *buf;
int total_size, ret;
AVPacket pkt;
int64_t pts;
if (data_len == 0) {
log_debug("Z");
return 0;
}
total_size = access_unit_delimiter_length + data_len;
buf = av_malloc(total_size);
if (buf == NULL) {
log_fatal("error: send_pframe malloc failed: size=%d\n", total_size);
return 0;
}
// access unit delimiter (nal_unit_type 9)
memcpy(buf, access_unit_delimiter, access_unit_delimiter_length);
// P frame (nal_unit_type 1)
memcpy(buf + access_unit_delimiter_length, data, data_len);
av_init_packet(&pkt);
pkt.stream_index = hls->format_ctx->streams[0]->index;
pkt.data = buf;
pkt.size = total_size;
if (consume_time) {
pts = get_next_video_pts();
} else {
pts = video_current_pts;
}
#if ENABLE_AUTO_GOP_SIZE_CONTROL_FOR_VFR
if (is_vfr_enabled) {
if (video_current_pts - last_keyframe_pts >= 100000) { // >= 1.11 seconds
// Frame rate is running slower than we thought
int ideal_video_gop_size = frames_since_last_keyframe;
if (ideal_video_gop_size == 0) {
ideal_video_gop_size = 1;
}
if (ideal_video_gop_size < video_gop_size) {
video_gop_size = ideal_video_gop_size;
log_debug("decrease gop_size to %d ", video_gop_size);
set_gop_size(video_gop_size);
}
}
frames_since_last_keyframe++;
}
#endif
send_video_frame(data, data_len, pts);
#if ENABLE_PTS_WRAP_AROUND
pts = pts % PTS_MODULO;
#endif
last_pts = pts;
if (is_vfr_enabled) {
struct timespec ts;
clock_gettime(CLOCK_MONOTONIC, &ts);
time_for_last_pts = ts.tv_sec * INT64_C(1000000000) + ts.tv_nsec;
}
pkt.pts = pkt.dts = pts;
uint8_t *copied_data = av_malloc(total_size);
memcpy(copied_data, buf, total_size);
pthread_mutex_lock(&rec_write_mutex);
add_encoded_packet(pts, copied_data, total_size, pkt.stream_index, pkt.flags);
pthread_mutex_unlock(&rec_write_mutex);
if (is_recording) {
pthread_mutex_lock(&rec_mutex);
rec_thread_needs_write = 1;
pthread_cond_signal(&rec_cond);
pthread_mutex_unlock(&rec_mutex);
}
if (is_tcpout_enabled) {
pthread_mutex_lock(&tcp_mutex);
av_write_frame(tcp_ctx, &pkt);
pthread_mutex_unlock(&tcp_mutex);
}
if (is_hlsout_enabled) {
pthread_mutex_lock(&mutex_writing);
ret = hls_write_packet(hls, &pkt, 0);
pthread_mutex_unlock(&mutex_writing);
if (ret < 0) {
av_strerror(ret, errbuf, sizeof(errbuf));
log_error("P frame write error (hls): %s\n", errbuf);
log_error("please check if the disk is full\n");
}
}
free(buf);
av_free_packet(&pkt);
return ret;
}
// Callback function that is called when an error has occurred
static int xrun_recovery(snd_pcm_t *handle, int error) {
switch(error) {
case -EPIPE: // Buffer overrun
log_error("microphone error: buffer overrun\n");
if ((error = snd_pcm_prepare(handle)) < 0) {
log_error("microphone error: buffer overrrun cannot be recovered, "
"snd_pcm_prepare failed: %s\n", snd_strerror(error));
}
return 0;
break;
case -ESTRPIPE: // Microphone is suspended
log_error("microphone error: suspended\n");
// Wait until the suspend flag is cleared
while ((error = snd_pcm_resume(handle)) == -EAGAIN) {
sleep(1);
}
if (error < 0) {
if ((error = snd_pcm_prepare(handle)) < 0) {
log_error("microphone error: suspend cannot be recovered, "
"snd_pcm_prepare failed: %s\n", snd_strerror(error));
}
}
return 0;
break;
case -EBADFD: // PCM descriptor is wrong
log_error("microphone error: EBADFD\n");
break;
default:
log_error("microphone error: unknown, error = %d\n",error);
break;
}
return error;
}
// Wait for data using poll
static int wait_for_poll(snd_pcm_t *device, struct pollfd *target_fds, unsigned int audio_fd_count) {
unsigned short revents;
int avail_flags = 0;
int ret;
while (1) {
ret = poll(target_fds, audio_fd_count, -1); // -1 means block
if (ret < 0) {
if (keepRunning) {
log_error("audio poll error: %d\n", ret);
}
return ret;
} else {
snd_pcm_poll_descriptors_revents(device, target_fds, audio_fd_count, &revents);
if (revents & POLLERR) {
return -EIO;
}
if (revents & POLLIN) { // Data is ready for read
avail_flags |= AVAIL_AUDIO;
}
if (avail_flags) {
return avail_flags;
}
}
}
}
static int open_audio_capture_device() {
int err;
log_debug("opening ALSA device for capture: %s\n", alsa_dev);
err = snd_pcm_open(&capture_handle, alsa_dev, SND_PCM_STREAM_CAPTURE, 0);
if (err < 0) {
log_error("error: cannot open audio capture device '%s': %s\n",
alsa_dev, snd_strerror(err));
log_error("hint: specify correct ALSA device with '--alsadev <dev>'\n");
return -1;
}
return 0;
}
static int open_audio_preview_device() {
int err;
snd_pcm_hw_params_t *audio_preview_params;
log_debug("opening ALSA device for playback (preview): %s\n", audio_preview_dev);
err = snd_pcm_open(&audio_preview_handle, audio_preview_dev, SND_PCM_STREAM_PLAYBACK, SND_PCM_NONBLOCK);
if (err < 0) {
log_error("error: cannot open audio playback (preview) device '%s': %s\n",
audio_preview_dev, snd_strerror(err));
log_error("hint: specify correct ALSA device with '--audiopreviewdev <dev>'\n");
exit(EXIT_FAILURE);
}
err = snd_pcm_hw_params_malloc(&audio_preview_params);
if (err < 0) {
log_fatal("error: cannot allocate hardware parameter structure for audio preview: %s\n",
snd_strerror(err));
exit(EXIT_FAILURE);
}
// fill hw_params with a full configuration space for a PCM.
err = snd_pcm_hw_params_any(audio_preview_handle, audio_preview_params);
if (err < 0) {
log_fatal("error: cannot initialize hardware parameter structure for audio preview: %s\n",
snd_strerror(err));
exit(EXIT_FAILURE);
}
// enable rate resampling
unsigned int enable_resampling = 1;
err = snd_pcm_hw_params_set_rate_resample(audio_preview_handle, audio_preview_params, enable_resampling);
if (err < 0) {
log_fatal("error: cannot enable rate resampling for audio preview: %s\n",
snd_strerror(err));
exit(EXIT_FAILURE);
}
err = snd_pcm_hw_params_set_access(audio_preview_handle, audio_preview_params,
SND_PCM_ACCESS_MMAP_INTERLEAVED);
if (err < 0) {
log_fatal("error: cannot set access type for audio preview: %s\n",
snd_strerror(err));
exit(EXIT_FAILURE);
}
// SND_PCM_FORMAT_S16_LE => PCM 16 bit signed little endian
err = snd_pcm_hw_params_set_format(audio_preview_handle, audio_preview_params, SND_PCM_FORMAT_S16_LE);
if (err < 0) {
log_fatal("error: cannot set sample format for audio preview: %s\n",
snd_strerror(err));
exit(EXIT_FAILURE);
}
audio_preview_channels = audio_channels;
err = snd_pcm_hw_params_set_channels(audio_preview_handle, audio_preview_params, audio_preview_channels);
if (err < 0) {
log_fatal("error: cannot set channel count for audio preview: %s\n",
snd_strerror(err));
exit(EXIT_FAILURE);
}
// set the sample rate
unsigned int rate = audio_sample_rate;
err = snd_pcm_hw_params_set_rate_near(audio_preview_handle, audio_preview_params, &rate, 0);
if (err < 0) {
log_fatal("error: cannot set sample rate for audio preview: %s\n",
snd_strerror(err));
exit(EXIT_FAILURE);
}
// set the buffer size
err = snd_pcm_hw_params_set_buffer_size(audio_preview_handle, audio_preview_params,
audio_buffer_size * ALSA_PLAYBACK_BUFFER_MULTIPLY);
if (err < 0) {
log_fatal("error: failed to set buffer size for audio preview: audio_buffer_size=%d error=%s\n",
audio_buffer_size, snd_strerror(err));
exit(EXIT_FAILURE);
}
int dir;
// set the period size
err = snd_pcm_hw_params_set_period_size_near(audio_preview_handle, audio_preview_params,
(snd_pcm_uframes_t *)&period_size, &dir);
if (err < 0) {
log_fatal("error: failed to set period size for audio preview: %s\n",
snd_strerror(err));
exit(EXIT_FAILURE);
}
// apply the hardware configuration
err = snd_pcm_hw_params (audio_preview_handle, audio_preview_params);
if (err < 0) {
log_fatal("error: cannot set PCM hardware parameters for audio preview: %s\n",
snd_strerror(err));
exit(EXIT_FAILURE);
}
// end of configuration
snd_pcm_hw_params_free(audio_preview_params);
// dump the configuration of capture_handle
if (log_get_level() <= LOG_LEVEL_DEBUG) {
snd_output_t *output;
err = snd_output_stdio_attach(&output, stdout, 0);
if (err < 0) {
log_error("snd_output_stdio_attach failed: %s\n", snd_strerror(err));
return 0;
}
log_debug("audio preview device:\n");
snd_pcm_dump(audio_preview_handle, output);
}
return 0;
}
// Configure the microphone before main setup
static void preconfigure_microphone() {
int err;
// allocate an invalid snd_pcm_hw_params_t using standard malloc
err = snd_pcm_hw_params_malloc(&alsa_hw_params);
if (err < 0) {
log_fatal("error: cannot allocate hardware parameter structure (%s)\n",
snd_strerror(err));
exit(EXIT_FAILURE);
}
// fill hw_params with a full configuration space for a PCM.
err = snd_pcm_hw_params_any(capture_handle, alsa_hw_params);
if (err < 0) {
log_fatal("error: cannot initialize hardware parameter structure (%s)\n",
snd_strerror(err));
exit(EXIT_FAILURE);
}
// set the number of channels
err = snd_pcm_hw_params_set_channels(capture_handle, alsa_hw_params, audio_channels);
if (err < 0) {
if (audio_channels == 1) {
if (is_audio_channels_specified) {
log_info("cannot use mono audio; trying stereo\n");
} else {
log_debug("cannot use mono audio; trying stereo\n");
}
audio_channels = 2;
} else {
if (is_audio_channels_specified) {
log_info("cannot use stereo audio; trying mono\n");
} else {
log_debug("cannot use stereo audio; trying mono\n");
}
audio_channels = 1;
}
err = snd_pcm_hw_params_set_channels(capture_handle, alsa_hw_params, audio_channels);
if (err < 0) {
log_fatal("error: cannot set channel count for microphone (%s)\n", snd_strerror(err));
exit(EXIT_FAILURE);
}
}
log_debug("final audio_channels: %d\n", audio_channels);
}
static int configure_audio_capture_device() {
// ALSA
int err;
// libavcodec
#if AUDIO_ONLY
AVCodecContext *ctx = hls->format_ctx->streams[0]->codec;
#else
AVCodecContext *ctx = hls->format_ctx->streams[1]->codec;
#endif
int buffer_size;
// ALSA poll mmap
snd_pcm_uframes_t real_buffer_size; // real buffer size in frames
int dir;
buffer_size = av_samples_get_buffer_size(NULL, ctx->channels,
ctx->frame_size, ctx->sample_fmt, 0);
// use mmap
err = snd_pcm_hw_params_set_access(capture_handle, alsa_hw_params,
SND_PCM_ACCESS_MMAP_INTERLEAVED);
if (err < 0) {
log_fatal("error: cannot set access type (%s)\n", snd_strerror(err));
exit(EXIT_FAILURE);
}
// SND_PCM_FORMAT_S16_LE => PCM 16 bit signed little endian
err = snd_pcm_hw_params_set_format(capture_handle, alsa_hw_params, SND_PCM_FORMAT_S16_LE);
if (err < 0) {
log_fatal("error: cannot set sample format (%s)\n", snd_strerror(err));
exit(EXIT_FAILURE);
}
// set the sample rate
unsigned int rate = audio_sample_rate;
err = snd_pcm_hw_params_set_rate_near(capture_handle, alsa_hw_params, &rate, 0);
if (err < 0) {
log_fatal("error: cannot set sample rate (%s)\n", snd_strerror(err));
exit(EXIT_FAILURE);
}
unsigned int actual_rate;
int actual_dir;
err = snd_pcm_hw_params_get_rate(alsa_hw_params, &actual_rate, &actual_dir);
if (err < 0) {
log_fatal("error: failed to get sample rate from microphone (%s)\n", snd_strerror(err));
exit(EXIT_FAILURE);
}
log_debug("actual sample rate=%u dir=%d\n", actual_rate, actual_dir);
if (actual_rate != audio_sample_rate) {
log_fatal("error: failed to set sample rate for microphone to %d (got %d)\n",
audio_sample_rate, actual_rate);
exit(EXIT_FAILURE);
}
// set the buffer size
int alsa_buffer_multiply = ALSA_BUFFER_MULTIPLY;
err = snd_pcm_hw_params_set_buffer_size(capture_handle, alsa_hw_params,
buffer_size * alsa_buffer_multiply);
while (err < 0) {
log_debug("failed to set buffer size for microphone: buffer_size=%d multiply=%d\n", buffer_size, alsa_buffer_multiply);
alsa_buffer_multiply /= 2;
if (alsa_buffer_multiply == 0) {
break;
}
log_debug("trying smaller buffer size for microphone: buffer_size=%d multiply=%d\n", buffer_size, alsa_buffer_multiply);
err = snd_pcm_hw_params_set_buffer_size(capture_handle, alsa_hw_params,
buffer_size * alsa_buffer_multiply);
}
if (err < 0) {
log_fatal("error: failed to set buffer size for microphone: buffer_size=%d multiply=%d (%s)\n", buffer_size, alsa_buffer_multiply, snd_strerror(err));
exit(EXIT_FAILURE);
}
// check the value of the buffer size
err = snd_pcm_hw_params_get_buffer_size(alsa_hw_params, &real_buffer_size);
if (err < 0) {
log_fatal("error: failed to get buffer size from microphone (%s)\n", snd_strerror(err));
exit(EXIT_FAILURE);
}
log_debug("microphone: buffer size: %d frames (channels=%d buffer_size=%d multiply=%d)\n", (int)real_buffer_size, audio_channels, buffer_size, alsa_buffer_multiply);
audio_buffer_size = buffer_size;
log_debug("microphone: setting period size to %d\n", period_size);
dir = 0;
// set the period size
err = snd_pcm_hw_params_set_period_size_near(capture_handle, alsa_hw_params,
(snd_pcm_uframes_t *)&period_size, &dir);
if (err < 0) {
log_fatal("error: failed to set period size for microphone (%s)\n", snd_strerror(err));
exit(EXIT_FAILURE);
}
snd_pcm_uframes_t actual_period_size;
err = snd_pcm_hw_params_get_period_size(alsa_hw_params, &actual_period_size, &dir);
if (err < 0) {
log_fatal("error: failed to get period size from microphone (%s)\n", snd_strerror(err));
exit(EXIT_FAILURE);
}
log_debug("actual_period_size=%lu dir=%d\n", actual_period_size, dir);
// apply the hardware configuration
err = snd_pcm_hw_params (capture_handle, alsa_hw_params);
if (err < 0) {
log_fatal("error: cannot set PCM hardware parameters for microphone (%s)\n", snd_strerror(err));
exit(EXIT_FAILURE);
}
// end of configuration
snd_pcm_hw_params_free(alsa_hw_params);
err = snd_pcm_prepare(capture_handle);
if (err < 0) {
log_fatal("error: cannot prepare audio interface for use (%s)\n", snd_strerror(err));
exit(EXIT_FAILURE);
}
audio_fd_count = snd_pcm_poll_descriptors_count(capture_handle);
if (audio_fd_count <= 0) {
log_error("microphone error: invalid poll descriptors count\n");
return audio_fd_count;
}
poll_fds = malloc(sizeof(struct pollfd) * audio_fd_count);
if (poll_fds == NULL) {
log_fatal("error: cannot allocate memory for poll_fds\n");
exit(EXIT_FAILURE);
}
// get poll descriptors
err = snd_pcm_poll_descriptors(capture_handle, poll_fds, audio_fd_count);
if (err < 0) {
log_error("microphone error: unable to obtain poll descriptors for capture: %s\n", snd_strerror(err));
return err;
}
is_first_audio = 1;
// dump the configuration of capture_handle
if (log_get_level() <= LOG_LEVEL_DEBUG) {
snd_output_t *output;
err = snd_output_stdio_attach(&output, stdout, 0);
if (err < 0) {
log_error("snd_output_stdio_attach failed: %s\n", snd_strerror(err));
return 0;
}
log_debug("audio capture device:\n");
snd_pcm_dump(capture_handle, output);
}
return 0;
}
static void teardown_audio_encode() {
#if AUDIO_ONLY
AVCodecContext *ctx = hls->format_ctx->streams[0]->codec;
#else
AVCodecContext *ctx = hls->format_ctx->streams[1]->codec;
#endif
int got_output, i, ret;
AVPacket pkt;
// get the delayed frames
for (got_output = 1; got_output; i++) {
av_init_packet(&pkt);
pkt.data = NULL; // packet data will be allocated by the encoder
pkt.size = 0;
ret = avcodec_encode_audio2(ctx, &pkt, NULL, &got_output);
av_free_packet(&pkt);
if (ret < 0) {
av_strerror(ret, errbuf, sizeof(errbuf));
log_error("error encoding frame: %s\n", errbuf);
break;
}
}
av_freep(&samples);
#if AUDIO_BUFFER_CHUNKS > 0
for (i = 0; i < AUDIO_BUFFER_CHUNKS; i++) {
av_freep(&audio_buffer[i]);
}
#endif
av_frame_free(&av_frame);
}
static void teardown_audio_capture_device() {
snd_pcm_close (capture_handle);
free(poll_fds);
}
static void teardown_audio_preview_device() {
snd_pcm_close(audio_preview_handle);
}
/* Return 1 if the difference is negative, otherwise 0. */
static int timespec_subtract(struct timespec *result, struct timespec *t2, struct timespec *t1) {
long long diff = (t2->tv_nsec + INT64_C(1000000000) * t2->tv_sec) - (t1->tv_nsec + INT64_C(1000000000) * t1->tv_sec);
result->tv_sec = diff / 1000000000;
result->tv_nsec = diff % 1000000000;
return (diff<0);
}
void stopSignalHandler(int signo) {
keepRunning = 0;
log_debug("stop requested (signal=%d)\n", signo);
}
static void shutdown_video() {
int i;
log_debug("shutdown_video\n");
for (i = 0; i < n_codec_configs; i++) {
free(codec_configs[i]);
}
timestamp_shutdown();
subtitle_shutdown();
text_teardown();
}
static void shutdown_openmax() {
int i;
if (is_preview_enabled || is_clock_enabled) {
log_debug("shutdown_openmax: ilclient_flush_tunnels\n");
ilclient_flush_tunnels(tunnel, 0);
}
// Disable port buffers
log_debug("shutdown_openmax: disable port buffer for camera %d\n", CAMERA_CAPTURE_PORT);
ilclient_disable_port_buffers(camera_component, CAMERA_CAPTURE_PORT, NULL, NULL, NULL);
log_debug("shutdown_openmax: disable port buffer for video_encode %d\n", VIDEO_ENCODE_INPUT_PORT);
ilclient_disable_port_buffers(video_encode, VIDEO_ENCODE_INPUT_PORT, NULL, NULL, NULL);
log_debug("shutdown_openmax: disable port buffer for video_encode %d\n", VIDEO_ENCODE_OUTPUT_PORT);
ilclient_disable_port_buffers(video_encode, VIDEO_ENCODE_OUTPUT_PORT, NULL, NULL, NULL);
if (is_preview_enabled || is_clock_enabled) {
for (i = 0; i < n_tunnel; i++) {
log_debug("shutdown_openmax: disable tunnel[%d]\n", i);
ilclient_disable_tunnel(&tunnel[i]);
}
log_debug("shutdown_openmax: teardown tunnels\n");
ilclient_teardown_tunnels(tunnel);
}
log_debug("shutdown_openmax: state transition to idle\n");
ilclient_state_transition(component_list, OMX_StateIdle);
log_debug("shutdown_openmax: state transition to loaded\n");
ilclient_state_transition(component_list, OMX_StateLoaded);
log_debug("shutdown_openmax: ilclient_cleanup_components\n");
ilclient_cleanup_components(component_list);
log_debug("shutdown_openmax: OMX_Deinit\n");
OMX_Deinit();
log_debug("shutdown_openmax: ilclient_destroy cam_client\n");
ilclient_destroy(cam_client);
log_debug("shutdown_openmax: ilclient_destroy ilclient\n");
ilclient_destroy(ilclient);
}
static void set_gop_size(int gop_size) {
OMX_VIDEO_CONFIG_AVCINTRAPERIOD avc_intra_period;
OMX_ERRORTYPE error;
memset(&avc_intra_period, 0, sizeof(OMX_VIDEO_CONFIG_AVCINTRAPERIOD));
avc_intra_period.nSize = sizeof(OMX_VIDEO_CONFIG_AVCINTRAPERIOD);
avc_intra_period.nVersion.nVersion = OMX_VERSION;
avc_intra_period.nPortIndex = VIDEO_ENCODE_OUTPUT_PORT;
// Distance between two IDR frames
avc_intra_period.nIDRPeriod = gop_size;
// It seems this value has no effect for the encoding.
avc_intra_period.nPFrames = gop_size;
error = OMX_SetParameter(ILC_GET_HANDLE(video_encode),
OMX_IndexConfigVideoAVCIntraPeriod, &avc_intra_period);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set video_encode %d AVC intra period: 0x%x\n", VIDEO_ENCODE_OUTPUT_PORT, error);
exit(EXIT_FAILURE);
}
}
static void query_sensor_mode() {
OMX_CONFIG_CAMERASENSORMODETYPE sensor_mode;
OMX_ERRORTYPE error;
int num_modes;
int i;
memset(&sensor_mode, 0, sizeof(OMX_CONFIG_CAMERASENSORMODETYPE));
sensor_mode.nSize = sizeof(OMX_CONFIG_CAMERASENSORMODETYPE);
sensor_mode.nVersion.nVersion = OMX_VERSION;
sensor_mode.nPortIndex = OMX_ALL;
sensor_mode.nModeIndex = 0;
error = OMX_GetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexConfigCameraSensorModes, &sensor_mode);
if (error != OMX_ErrorNone) {
log_error("error: failed to get camera sensor mode: 0x%x\n", error);
return;
}
num_modes = sensor_mode.nNumModes;
for (i = 0; i < num_modes; i++) {
log_info("\n[camera sensor mode %d]\n", i);
sensor_mode.nModeIndex = i;
error = OMX_GetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexConfigCameraSensorModes, &sensor_mode);
if (error != OMX_ErrorNone) {
log_error("error: failed to get camera sensor mode: 0x%x\n", error);
return;
}
log_info("nWidth: %u\n", sensor_mode.nWidth);
log_info("nHeight: %u\n", sensor_mode.nHeight);
log_info("nPaddingRight: %u\n", sensor_mode.nPaddingRight);
log_info("nPaddingDown: %u\n", sensor_mode.nPaddingDown);
log_info("eColorFormat: %d\n", sensor_mode.eColorFormat);
log_info("nFrameRateMax: %u (%.2f fps)\n",
sensor_mode.nFrameRateMax, sensor_mode.nFrameRateMax / 256.0f);
log_info("nFrameRateMin: %u (%.2f fps)\n",
sensor_mode.nFrameRateMin, sensor_mode.nFrameRateMin / 256.0f);
}
}
static void set_framerate_range(float min_fps, float max_fps) {
OMX_PARAM_BRCMFRAMERATERANGETYPE framerate_range;
OMX_ERRORTYPE error;
if (min_fps == -1.0f && max_fps == -1.0f) {
return;
}
memset(&framerate_range, 0, sizeof(OMX_PARAM_BRCMFRAMERATERANGETYPE));
framerate_range.nSize = sizeof(OMX_PARAM_BRCMFRAMERATERANGETYPE);
framerate_range.nVersion.nVersion = OMX_VERSION;
framerate_range.nPortIndex = CAMERA_CAPTURE_PORT;
error = OMX_GetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexParamBrcmFpsRange, &framerate_range);
if (error != OMX_ErrorNone) {
log_error("error: failed to get framerate range: 0x%x\n", error);
return;
}
if (min_fps != -1.0f) {
framerate_range.xFramerateLow = min_fps * 65536; // in Q16 format
}
if (max_fps != -1.0f) {
framerate_range.xFramerateHigh = max_fps * 65536; // in Q16 format
}
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexParamBrcmFpsRange, &framerate_range);
if (error != OMX_ErrorNone) {
log_error("error: failed to set framerate range: 0x%x\n", error);
return;
}
}
static void set_exposure_to_auto() {
OMX_CONFIG_EXPOSURECONTROLTYPE exposure_type;
OMX_ERRORTYPE error;
memset(&exposure_type, 0, sizeof(OMX_CONFIG_EXPOSURECONTROLTYPE));
exposure_type.nSize = sizeof(OMX_CONFIG_EXPOSURECONTROLTYPE);
exposure_type.nVersion.nVersion = OMX_VERSION;
exposure_type.nPortIndex = OMX_ALL;
exposure_type.eExposureControl = OMX_ExposureControlAuto;
log_debug("exposure mode: auto\n");
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexConfigCommonExposure, &exposure_type);
if (error != OMX_ErrorNone) {
log_error("error: failed to set camera exposure to auto: 0x%x\n", error);
}
current_exposure_mode = EXPOSURE_AUTO;
}
static void set_exposure_to_night() {
OMX_CONFIG_EXPOSURECONTROLTYPE exposure_type;
OMX_ERRORTYPE error;
memset(&exposure_type, 0, sizeof(OMX_CONFIG_EXPOSURECONTROLTYPE));
exposure_type.nSize = sizeof(OMX_CONFIG_EXPOSURECONTROLTYPE);
exposure_type.nVersion.nVersion = OMX_VERSION;
exposure_type.nPortIndex = OMX_ALL;
exposure_type.eExposureControl = OMX_ExposureControlNight;
log_debug("exposure mode: night\n");
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexConfigCommonExposure, &exposure_type);
if (error != OMX_ErrorNone) {
log_error("error: failed to set camera exposure to night: 0x%x\n", error);
}
current_exposure_mode = EXPOSURE_NIGHT;
}
static void auto_select_exposure(int width, int height, uint8_t *data, float fps) {
const int width32 = ((width + 31) & ~31); // nearest smaller number that is multiple of 32
const int height16 = ((height + 15) & ~15); // nearest smaller number that is multiple of 16
int i = width32 * height16 / 4; // * (3 / 2) / 6
uint8_t *py = data;
int total_y = 0;
int read_width = 0;
int line_num = 1;
int count = 0;
while (i--) {
total_y += *py++;
count++;
if (++read_width >= width) {
if (width32 > width) {
py += width32 - width;
}
read_width = 0;
if (++line_num > height) {
break;
}
}
}
if (count == 0) {
return;
}
float average_y = (float)total_y / (float)count;
// Approximate exposure time
float msec_per_frame = 1000.0f / fps;
float y_per_10msec = average_y * 10.0f / msec_per_frame;
log_debug(" y=%.1f", y_per_10msec);
if (y_per_10msec < auto_exposure_threshold) { // in the dark
if (current_exposure_mode == EXPOSURE_AUTO) {
log_debug(" ");
set_exposure_to_night();
}
} else if (y_per_10msec >= auto_exposure_threshold) { // in the light
if (current_exposure_mode == EXPOSURE_NIGHT) {
log_debug(" ");
set_exposure_to_auto();
}
}
}
static void cam_fill_buffer_done(void *data, COMPONENT_T *comp) {
OMX_BUFFERHEADERTYPE *out;
OMX_ERRORTYPE error;
out = ilclient_get_output_buffer(camera_component, CAMERA_CAPTURE_PORT, 1);
if (out != NULL) {
if (out->nFilledLen > 0) {
last_video_buffer = out->pBuffer;
last_video_buffer_size = out->nFilledLen;
if (out->nFlags & OMX_BUFFERFLAG_ENDOFFRAME) {
if (is_video_recording_started == 0) {
is_video_recording_started = 1;
if (is_audio_recording_started == 1) {
struct timespec ts;
clock_gettime(CLOCK_MONOTONIC, &ts);
video_start_time = audio_start_time = ts.tv_sec * INT64_C(1000000000) + ts.tv_nsec;
send_video_start_time();
send_audio_start_time();
log_info("capturing started\n");
}
}
if (is_audio_recording_started == 1) {
if (video_pending_drop_frames > 0) {
log_debug("dV");
video_pending_drop_frames--;
} else {
log_debug(".");
timestamp_update();
subtitle_update();
int is_text_changed = text_draw_all(last_video_buffer, video_width_32, video_height_16, 1); // is_video = 1
if (is_text_changed && is_preview_enabled) {
// the text has actually changed, redraw preview subtitle overlay
dispmanx_update_text_overlay();
}
encode_and_send_image();
}
}
} else {
log_warn("\nnot an end of a frame\n");
}
} else {
log_warn("got zero bytes\n");
}
out->nFilledLen = 0;
} else {
log_warn("out is NULL\n");
}
if (keepRunning) {
error = OMX_FillThisBuffer(ILC_GET_HANDLE(camera_component), out);
if (error != OMX_ErrorNone) {
log_error("error filling camera buffer (2): 0x%x\n", error);
}
} else {
// Return the buffer (without this, ilclient_disable_port_buffers will hang)
error = OMX_FillThisBuffer(ILC_GET_HANDLE(camera_component), out);
if (error != OMX_ErrorNone) {
log_error("error filling camera buffer (3): 0x%x\n", error);
}
// Clear the callback
ilclient_set_fill_buffer_done_callback(cam_client, NULL, 0);
#if ENABLE_PBUFFER_OPTIMIZATION_HACK
// Revert pBuffer value of video_encode input buffer
if (video_encode_input_buf != NULL) {
log_debug("Reverting pBuffer to its original value\n");
video_encode_input_buf->pBuffer = video_encode_input_buf_pBuffer_orig;
}
#endif
// Notify the main thread that the camera is stopped
pthread_mutex_lock(&camera_finish_mutex);
is_camera_finished = 1;
pthread_cond_signal(&camera_finish_cond);
pthread_mutex_unlock(&camera_finish_mutex);
}
}
// Set red and blue gains used when AWB is off
static int camera_set_custom_awb_gains() {
OMX_CONFIG_CUSTOMAWBGAINSTYPE custom_awb_gains;
OMX_ERRORTYPE error;
// NOTE: OMX_IndexConfigCameraSettings is read-only
memset(&custom_awb_gains, 0, sizeof(OMX_CONFIG_CUSTOMAWBGAINSTYPE));
custom_awb_gains.nSize = sizeof(OMX_CONFIG_CUSTOMAWBGAINSTYPE);
custom_awb_gains.nVersion.nVersion = OMX_VERSION;
custom_awb_gains.xGainR = round(awb_red_gain * 65536); // Q16
custom_awb_gains.xGainB = round(awb_blue_gain * 65536); // Q16
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexConfigCustomAwbGains, &custom_awb_gains);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set camera custom awb gains: 0x%x\n", error);
return -1;
}
return 0;
}
static int camera_set_exposure_value() {
OMX_CONFIG_EXPOSUREVALUETYPE exposure_value;
OMX_ERRORTYPE error;
int i;
memset(&exposure_value, 0, sizeof(OMX_CONFIG_EXPOSUREVALUETYPE));
exposure_value.nSize = sizeof(OMX_CONFIG_EXPOSUREVALUETYPE);
exposure_value.nVersion.nVersion = OMX_VERSION;
exposure_value.nPortIndex = OMX_ALL;
error = OMX_GetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexConfigCommonExposureValue, &exposure_value);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to get camera exposure value: 0x%x\n", error);
exit(EXIT_FAILURE);
}
OMX_METERINGTYPE metering = OMX_EVModeMax;
for (i = 0; i < sizeof(exposure_metering_options) / sizeof(exposure_metering_option); i++) {
if (strcmp(exposure_metering_options[i].name, exposure_metering) == 0) {
metering = exposure_metering_options[i].metering;
break;
}
}
if (metering == OMX_EVModeMax) {
log_error("error: invalid exposure metering value: %s\n", exposure_metering);
return -1;
}
// default: OMX_MeteringModeAverage
exposure_value.eMetering = metering;
if (manual_exposure_compensation) {
// OMX_S32 Q16; default: 0
exposure_value.xEVCompensation = round(exposure_compensation * 65536 / 6.0f);
}
if (manual_exposure_aperture) {
// Apparently this has no practical effect
// OMX_U32 Q16; default: 0
exposure_value.nApertureFNumber = round(exposure_aperture * 65536);
// default: OMX_FALSE
exposure_value.bAutoAperture = OMX_FALSE;
}
if (manual_exposure_shutter_speed) {
// OMX_U32; default: 0
exposure_value.nShutterSpeedMsec = exposure_shutter_speed;
// default: OMX_TRUE
exposure_value.bAutoShutterSpeed = OMX_FALSE;
}
if (manual_exposure_sensitivity) {
// OMX_U32; default: 0
exposure_value.nSensitivity = exposure_sensitivity;
// default: OMX_TRUE
exposure_value.bAutoSensitivity = OMX_FALSE;
}
log_debug("setting exposure:\n");
log_debug(" eMetering: %d\n", exposure_value.eMetering);
log_debug(" xEVCompensation: %d\n", exposure_value.xEVCompensation);
log_debug(" nApertureFNumber: %u\n", exposure_value.nApertureFNumber);
log_debug(" bAutoAperture: %u\n", exposure_value.bAutoAperture);
log_debug(" nShutterSpeedMsec: %u\n", exposure_value.nShutterSpeedMsec);
log_debug(" bAutoShutterSpeed: %u\n", exposure_value.bAutoShutterSpeed);
log_debug(" nSensitivity: %u\n", exposure_value.nSensitivity);
log_debug(" bAutoSensitivity: %u\n", exposure_value.bAutoSensitivity);
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexConfigCommonExposureValue, &exposure_value);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set camera exposure value: 0x%x\n", error);
return -1;
}
return 0;
}
static int camera_set_white_balance(char *wb) {
OMX_CONFIG_WHITEBALCONTROLTYPE whitebal;
OMX_ERRORTYPE error;
int i;
memset(&whitebal, 0, sizeof(OMX_CONFIG_WHITEBALCONTROLTYPE));
whitebal.nSize = sizeof(OMX_CONFIG_WHITEBALCONTROLTYPE);
whitebal.nVersion.nVersion = OMX_VERSION;
whitebal.nPortIndex = OMX_ALL;
OMX_WHITEBALCONTROLTYPE control = OMX_WhiteBalControlMax;
for (i = 0; i < sizeof(white_balance_options) / sizeof(white_balance_option); i++) {
if (strcmp(white_balance_options[i].name, wb) == 0) {
control = white_balance_options[i].control;
break;
}
}
if (control == OMX_WhiteBalControlMax) {
log_error("error: invalid white balance value: %s\n", wb);
return -1;
}
whitebal.eWhiteBalControl = control;
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexConfigCommonWhiteBalance, &whitebal);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set camera white balance: 0x%x\n", error);
return -1;
}
return 0;
}
static int camera_set_exposure_control(char *ex) {
OMX_CONFIG_EXPOSURECONTROLTYPE exposure_control_type;
OMX_ERRORTYPE error;
int i;
memset(&exposure_control_type, 0, sizeof(OMX_CONFIG_EXPOSURECONTROLTYPE));
exposure_control_type.nSize = sizeof(OMX_CONFIG_EXPOSURECONTROLTYPE);
exposure_control_type.nVersion.nVersion = OMX_VERSION;
exposure_control_type.nPortIndex = OMX_ALL;
// Find out the value of eExposureControl
OMX_EXPOSURECONTROLTYPE control = OMX_ExposureControlMax;
for (i = 0; i < sizeof(exposure_control_options) / sizeof(exposure_control_option); i++) {
if (strcmp(exposure_control_options[i].name, ex) == 0) {
control = exposure_control_options[i].control;
break;
}
}
if (control == OMX_ExposureControlMax) {
log_error("error: invalid exposure control value: %s\n", ex);
return -1;
}
exposure_control_type.eExposureControl = control;
log_debug("exposure control: %s\n", ex);
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexConfigCommonExposure, &exposure_control_type);
if (error != OMX_ErrorNone) {
log_error("error: failed to set camera exposure control: 0x%x\n", error);
return -1;
}
if (control == OMX_ExposureControlAuto) {
current_exposure_mode = EXPOSURE_AUTO;
} else if (control == OMX_ExposureControlNight) {
current_exposure_mode = EXPOSURE_NIGHT;
}
return 0;
}
/* Set region of interest */
static int camera_set_input_crop(float left, float top, float width, float height) {
OMX_CONFIG_INPUTCROPTYPE input_crop_type;
OMX_ERRORTYPE error;
memset(&input_crop_type, 0, sizeof(OMX_CONFIG_INPUTCROPTYPE));
input_crop_type.nSize = sizeof(OMX_CONFIG_INPUTCROPTYPE);
input_crop_type.nVersion.nVersion = OMX_VERSION;
input_crop_type.nPortIndex = OMX_ALL;
input_crop_type.xLeft = round(left * 0x10000);
input_crop_type.xTop = round(top * 0x10000);
input_crop_type.xWidth = round(width * 0x10000);
input_crop_type.xHeight = round(height * 0x10000);
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexConfigInputCropPercentages, &input_crop_type);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set camera input crop type: 0x%x\n", error);
log_fatal("hint: maybe --roi value is not acceptable to camera\n");
return -1;
}
return 0;
}
static int openmax_cam_open() {
OMX_PARAM_PORTDEFINITIONTYPE cam_def;
OMX_ERRORTYPE error;
OMX_PARAM_PORTDEFINITIONTYPE portdef;
OMX_PARAM_TIMESTAMPMODETYPE timestamp_mode;
OMX_CONFIG_DISPLAYREGIONTYPE display_region;
OMX_CONFIG_ROTATIONTYPE rotation;
OMX_CONFIG_MIRRORTYPE mirror;
int r;
cam_client = ilclient_init();
if (cam_client == NULL) {
log_error("error: openmax_cam_open: ilclient_init failed\n");
return -1;
}
ilclient_set_fill_buffer_done_callback(cam_client, cam_fill_buffer_done, 0);
// create camera_component
error = ilclient_create_component(cam_client, &camera_component, "camera",
ILCLIENT_DISABLE_ALL_PORTS |
ILCLIENT_ENABLE_OUTPUT_BUFFERS);
if (error != 0) {
log_fatal("error: failed to create camera component: 0x%x\n", error);
log_fatal("Have you enabled camera via raspi-config or /boot/config.txt?\n");
exit(EXIT_FAILURE);
}
component_list[n_component_list++] = camera_component;
memset(&cam_def, 0, sizeof(OMX_PARAM_PORTDEFINITIONTYPE));
cam_def.nSize = sizeof(OMX_PARAM_PORTDEFINITIONTYPE);
cam_def.nVersion.nVersion = OMX_VERSION;
cam_def.nPortIndex = CAMERA_CAPTURE_PORT;
error = OMX_GetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexParamPortDefinition, &cam_def);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to get camera %d port definition: 0x%x\n", CAMERA_CAPTURE_PORT, error);
exit(EXIT_FAILURE);
}
if(sensor_mode != sensor_mode_default) {
OMX_PARAM_U32TYPE sensorMode;
memset(&sensorMode, 0, sizeof(OMX_PARAM_U32TYPE));
sensorMode.nSize = sizeof(OMX_PARAM_U32TYPE);
sensorMode.nVersion.nVersion = OMX_VERSION;
sensorMode.nPortIndex = OMX_ALL;
sensorMode.nU32 = sensor_mode;
error = OMX_SetParameter( ILC_GET_HANDLE(camera_component),
OMX_IndexParamCameraCustomSensorConfig, &sensorMode);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set camera sensor mode: 0x%x\n", error);
exit(EXIT_FAILURE);
}
}
// Configure port 71 (camera capture output)
cam_def.format.video.nFrameWidth = video_width;
cam_def.format.video.nFrameHeight = video_height;
// nStride must be a multiple of 32 and equal to or larger than nFrameWidth.
cam_def.format.video.nStride = (video_width+31)&~31;
// nSliceHeight must be a multiple of 16.
cam_def.format.video.nSliceHeight = (video_height+15)&~15;
cam_def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
if (is_vfr_enabled) {
log_debug("using variable frame rate\n");
cam_def.format.video.xFramerate = 0x0; // variable frame rate
} else {
cam_def.format.video.xFramerate = fr_q16; // specify the frame rate in Q.16 (framerate * 2^16)
}
// This specifies the input pixel format.
// See http://www.khronos.org/files/openmax_il_spec_1_0.pdf for details.
cam_def.format.video.eColorFormat = OMX_COLOR_FormatYUV420PackedPlanar;
cam_def.nBufferCountActual = N_BUFFER_COUNT_ACTUAL; // Affects to audio/video sync
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexParamPortDefinition, &cam_def);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set camera %d port definition: 0x%x\n", CAMERA_CAPTURE_PORT, error);
exit(EXIT_FAILURE);
}
// Set timestamp mode (unnecessary?)
memset(&timestamp_mode, 0, sizeof(OMX_PARAM_TIMESTAMPMODETYPE));
timestamp_mode.nSize = sizeof(OMX_PARAM_TIMESTAMPMODETYPE);
timestamp_mode.nVersion.nVersion = OMX_VERSION;
timestamp_mode.eTimestampMode = OMX_TimestampModeRawStc;
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexParamCommonUseStcTimestamps, &timestamp_mode);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set camera timestamp mode: 0x%x\n", error);
exit(EXIT_FAILURE);
}
// image rotation
memset(&rotation, 0, sizeof(OMX_CONFIG_ROTATIONTYPE));
rotation.nSize = sizeof(OMX_CONFIG_ROTATIONTYPE);
rotation.nVersion.nVersion = OMX_VERSION;
rotation.nPortIndex = CAMERA_CAPTURE_PORT;
rotation.nRotation = video_rotation;
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexConfigCommonRotate, &rotation);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set camera capture %d rotation: 0x%x\n", CAMERA_CAPTURE_PORT, error);
exit(EXIT_FAILURE);
}
// image mirroring (horizontal/vertical flip)
memset(&mirror, 0, sizeof(OMX_CONFIG_MIRRORTYPE));
mirror.nSize = sizeof(OMX_CONFIG_MIRRORTYPE);
mirror.nVersion.nVersion = OMX_VERSION;
mirror.nPortIndex = CAMERA_CAPTURE_PORT;
if (video_hflip && video_vflip) {
mirror.eMirror = OMX_MirrorBoth;
} else if (video_hflip) {
mirror.eMirror = OMX_MirrorHorizontal;
} else if (video_vflip) {
mirror.eMirror = OMX_MirrorVertical;
} else {
mirror.eMirror = OMX_MirrorNone;
}
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexConfigCommonMirror, &mirror);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set camera capture %d mirror (hflip/vflip): 0x%x\n", CAMERA_CAPTURE_PORT, error);
exit(EXIT_FAILURE);
}
if (camera_set_exposure_control(exposure_control) != 0) {
exit(EXIT_FAILURE);
}
if (camera_set_exposure_value() != 0) {
exit(EXIT_FAILURE);
}
// Set region of interest (--roi)
if (camera_set_input_crop(roi_left, roi_top, roi_width, roi_height) != 0) {
exit(EXIT_FAILURE);
}
// Set camera component to idle state
if (ilclient_change_component_state(camera_component, OMX_StateIdle) == -1) {
log_fatal("error: failed to set camera to idle state\n");
log_fatal("Perhaps another program is using camera, otherwise you need to reboot this pi\n");
exit(EXIT_FAILURE);
}
if (is_clock_enabled) {
// create clock component
error = ilclient_create_component(cam_client, &clock_component, "clock",
ILCLIENT_DISABLE_ALL_PORTS);
if (error != 0) {
log_fatal("error: failed to create clock component: 0x%x\n", error);
exit(EXIT_FAILURE);
}
component_list[n_component_list++] = clock_component;
// Set clock state
OMX_TIME_CONFIG_CLOCKSTATETYPE clock_state;
memset(&clock_state, 0, sizeof(OMX_TIME_CONFIG_CLOCKSTATETYPE));
clock_state.nSize = sizeof(OMX_TIME_CONFIG_CLOCKSTATETYPE);
clock_state.nVersion.nVersion = OMX_VERSION;
clock_state.eState = OMX_TIME_ClockStateWaitingForStartTime;
clock_state.nWaitMask = 1;
error = OMX_SetParameter(ILC_GET_HANDLE(clock_component), OMX_IndexConfigTimeClockState, &clock_state);
if (error != OMX_ErrorNone) {
log_error("error: failed to set clock state: 0x%x\n", error);
}
// Set up tunnel from clock to camera
set_tunnel(tunnel+n_tunnel,
clock_component, CLOCK_OUTPUT_1_PORT,
camera_component, CAMERA_INPUT_PORT);
if (ilclient_setup_tunnel(tunnel+(n_tunnel++), 0, 0) != 0) {
log_fatal("error: failed to setup tunnel from clock to camera\n");
exit(EXIT_FAILURE);
}
} // if (is_clock_enabled)
if (is_preview_enabled) {
// Set up preview port
memset(&portdef, 0, sizeof(OMX_PARAM_PORTDEFINITIONTYPE));
portdef.nSize = sizeof(OMX_PARAM_PORTDEFINITIONTYPE);
portdef.nVersion.nVersion = OMX_VERSION;
portdef.nPortIndex = CAMERA_PREVIEW_PORT;
error = OMX_GetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexParamPortDefinition, &portdef);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to get camera preview %d port definition: 0x%x\n", CAMERA_PREVIEW_PORT, error);
exit(EXIT_FAILURE);
}
portdef.format.video.nFrameWidth = video_width;
portdef.format.video.nFrameHeight = video_height;
portdef.format.video.nStride = (video_width+31)&~31;
portdef.format.video.nSliceHeight = (video_height+15)&~15;
portdef.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
portdef.format.video.eColorFormat = OMX_COLOR_FormatYUV420PackedPlanar;
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexParamPortDefinition, &portdef);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set camera preview %d port definition: 0x%x\n", CAMERA_PREVIEW_PORT, error);
exit(EXIT_FAILURE);
}
// image rotation
memset(&rotation, 0, sizeof(OMX_CONFIG_ROTATIONTYPE));
rotation.nSize = sizeof(OMX_CONFIG_ROTATIONTYPE);
rotation.nVersion.nVersion = OMX_VERSION;
rotation.nPortIndex = CAMERA_PREVIEW_PORT;
rotation.nRotation = video_rotation;
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexConfigCommonRotate, &rotation);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set camera preview %d rotation: 0x%x\n", CAMERA_PREVIEW_PORT, error);
exit(EXIT_FAILURE);
}
// image mirroring (horizontal/vertical flip)
memset(&mirror, 0, sizeof(OMX_CONFIG_MIRRORTYPE));
mirror.nSize = sizeof(OMX_CONFIG_MIRRORTYPE);
mirror.nVersion.nVersion = OMX_VERSION;
mirror.nPortIndex = CAMERA_PREVIEW_PORT;
if (video_hflip && video_vflip) {
mirror.eMirror = OMX_MirrorBoth;
} else if (video_hflip) {
mirror.eMirror = OMX_MirrorHorizontal;
} else if (video_vflip) {
mirror.eMirror = OMX_MirrorVertical;
} else {
mirror.eMirror = OMX_MirrorNone;
}
error = OMX_SetParameter(ILC_GET_HANDLE(camera_component),
OMX_IndexConfigCommonMirror, &mirror);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set camera preview %d mirror (hflip/vflip): 0x%x\n", CAMERA_PREVIEW_PORT, error);
exit(EXIT_FAILURE);
}
// create render_component
r = ilclient_create_component(cam_client, &render_component, "video_render",
ILCLIENT_DISABLE_ALL_PORTS);
if (r != 0) {
log_fatal("error: failed to create render component: 0x%x\n", r);
exit(EXIT_FAILURE);
}
component_list[n_component_list++] = render_component;
// Setup display region for preview window
memset(&display_region, 0, sizeof(OMX_CONFIG_DISPLAYREGIONTYPE));
display_region.nSize = sizeof(OMX_CONFIG_DISPLAYREGIONTYPE);
display_region.nVersion.nVersion = OMX_VERSION;
display_region.nPortIndex = VIDEO_RENDER_INPUT_PORT;
// set default display
display_region.num = DISP_DISPLAY_DEFAULT;
if (is_previewrect_enabled) { // display preview window at specified position
display_region.set = OMX_DISPLAY_SET_DEST_RECT | OMX_DISPLAY_SET_FULLSCREEN | OMX_DISPLAY_SET_NOASPECT | OMX_DISPLAY_SET_NUM;
display_region.dest_rect.x_offset = preview_x;
display_region.dest_rect.y_offset = preview_y;
display_region.dest_rect.width = preview_width;
display_region.dest_rect.height = preview_height;
display_region.fullscreen = OMX_FALSE;
display_region.noaspect = OMX_TRUE;
} else { // fullscreen
display_region.set = OMX_DISPLAY_SET_FULLSCREEN | OMX_DISPLAY_SET_NUM;
display_region.fullscreen = OMX_TRUE;
}
error = OMX_SetParameter(ILC_GET_HANDLE(render_component),
OMX_IndexConfigDisplayRegion, &display_region);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set render input %d display region: 0x%x\n", VIDEO_RENDER_INPUT_PORT, error);
exit(EXIT_FAILURE);
}
// Set the opacity and layer of the preview window
display_region.set = OMX_DISPLAY_SET_ALPHA | OMX_DISPLAY_SET_LAYER;
display_region.alpha = (OMX_U32) preview_opacity;
display_region.layer = DISP_LAYER_VIDEO_PREVIEW;
error = OMX_SetParameter(ILC_GET_HANDLE(render_component),
OMX_IndexConfigDisplayRegion, &display_region);
if (error != OMX_ErrorNone) {
log_fatal("error: failed to set render input %d alpha: 0x%x\n", VIDEO_RENDER_INPUT_PORT, error);
exit(EXIT_FAILURE);
}
// Set up tunnel from camera to video_render
set_tunnel(tunnel+n_tunnel,
camera_component, CAMERA_PREVIEW_PORT,
render_component, VIDEO_RENDER_INPUT_PORT);
if (ilclient_setup_tunnel(tunnel+(n_tunnel++), 0, 0) != 0) {
log_fatal("error: failed to setup tunnel from camera to render\n");
exit(EXIT_FAILURE);
}
// Set render component to executing state
ilclient_change_component_state(render_component, OMX_StateExecuting);
} // if (is_preview_enabled)
if (is_clock_enabled) {
// Set clock component to executing state
ilclient_change_component_state(clock_component, OMX_StateExecuting);
}
return 0;
}
// This function is called after the video encoder produces each frame
static int video_encode_fill_buffer_done(OMX_BUFFERHEADERTYPE *out) {
int nal_unit_type;
uint8_t *buf;
int buf_len;
int is_endofnal = 1;
uint8_t *concat_buf = NULL;
struct timespec tsEnd, tsDiff;
// out->nTimeStamp is useless as the value is always zero
if (out == NULL) {
log_error("error: cannot get output buffer from video_encode\n");
return 0;
}
if (encbuf != NULL) {
// merge the previous buffer
concat_buf = realloc(encbuf, encbuf_size + out->nFilledLen);
if (concat_buf == NULL) {
log_fatal("error: cannot allocate memory for concat_buf (%d bytes)\n", encbuf_size + out->nFilledLen);
free(encbuf);
exit(EXIT_FAILURE);
}
memcpy(concat_buf + encbuf_size, out->pBuffer, out->nFilledLen);
buf = concat_buf;
buf_len = encbuf_size + out->nFilledLen;
} else {
buf = out->pBuffer;
buf_len = out->nFilledLen;
}
if (!(out->nFlags & OMX_BUFFERFLAG_ENDOFFRAME) &&
!(out->nFlags & OMX_BUFFERFLAG_CODECCONFIG)) {
// There is remaining buffer for the current frame
nal_unit_type = buf[4] & 0x1f;
encbuf_size = buf_len;
if (concat_buf != NULL) {
encbuf = concat_buf;
concat_buf = NULL;
} else {
encbuf = malloc(buf_len);
if (encbuf == NULL) {
log_fatal("error: cannot allocate memory for encbuf (%d bytes)\n", buf_len);
exit(EXIT_FAILURE);
}
memcpy(encbuf, buf, buf_len);
}
is_endofnal = 0;
} else {
encbuf = NULL;
encbuf_size = -1;
nal_unit_type = buf[4] & 0x1f;
if (nal_unit_type != 1 && nal_unit_type != 5) {
log_debug("[NAL%d]", nal_unit_type);
}
if (out->nFlags != 0x480 && out->nFlags != 0x490 &&
out->nFlags != 0x430 && out->nFlags != 0x410 &&
out->nFlags != 0x400 && out->nFlags != 0x510 &&
out->nFlags != 0x530) {
log_warn("\nnew flag (%d,nal=%d)\n", out->nFlags, nal_unit_type);
}
if (out->nFlags & OMX_BUFFERFLAG_DATACORRUPT) {
log_warn("\n=== OMX_BUFFERFLAG_DATACORRUPT ===\n");
}
if (out->nFlags & OMX_BUFFERFLAG_EXTRADATA) {
log_warn("\n=== OMX_BUFFERFLAG_EXTRADATA ===\n");
}
if (out->nFlags & OMX_BUFFERFLAG_FRAGMENTLIST) {