Encodedecode Demo

Check out more papers on Computer Engineering Computer Computer Programming

ENCODEDECODE DEMO

--------------------

/*

* encodedecode.h

*

* ============================================================================

* Copyright (c) Texas Instruments Inc 2005

*

* Use of this software is controlled by the terms and conditions found in the

* license agreement under which this software has been supplied or provided.

* ============================================================================

*/

#ifndef _ENCODEDECODE_H

#define _ENCODEDECODE_H

/* Standard Linux headers */

#include <stdio.h>

#include <pthread.h>

/* Enables or disables debug output */

#ifdef __DEBUG

#define DBG(fmt, args...) fprintf(stderr, "Encodedecode Debug: " fmt, ## args)

#else

#define DBG(fmt, args...)

#endif

#define ERR(fmt, args...) fprintf(stderr, "Encodedecode Error: " fmt, ## args)

/* Function error codes */

#define SUCCESS 0

#define FAILURE -1

/* Thread error codes */

#define THREAD_SUCCESS (void *) 0

#define THREAD_FAILURE (void *) -1

/* The codec engine to use in this application */

#define ENGINE_NAME "encodedecode"

/* Device parameters */

#define V4L2_DEVICE "/dev/video0"

#define OSD_DEVICE "/dev/fb/0"

#define ATTR_DEVICE "/dev/fb/2"

#define FBVID_DEVICE "/dev/fb/3"

/* True of false enumeration */

#define TRUE 1

#define FALSE 0

/* Scaling factors for the video standards */

#define NOSTD 0

#define PALSTD 12

#define NTSCSTD 10

/* Number of microseconds to pause if the demo is 'paused' */

#define PAUSE 100

/* Screen dimensions */

#define SCREEN_BPP 16

#define D1_WIDTH 720

#define D1_HEIGHT yScale(480)

#define D1_LINE_WIDTH D1_WIDTH * SCREEN_BPP / 8

#define D1_FRAME_SIZE D1_LINE_WIDTH * D1_HEIGHT

/* Global data structure */

typedef struct GlobalData {

int quit; /* Global quit flag */

int play; /* Whether to play or pause */

int frames; /* Video frame counter */

int videoBytesEncoded; /* Video bytes encoded counter */

int yFactor; /* Vertical scaling (PAL vs. NTSC) */

pthread_mutex_t mutex; /* Mutex to protect the global data */

} GlobalData;

/* Global data */

extern GlobalData gbl;

/* Functions to protect the global data */

static inline int gblGetQuit(void)

{

int quit;

pthread_mutex_lock(&gbl.mutex);

quit = gbl.quit;

pthread_mutex_unlock(&gbl.mutex);

return quit;

}

static inline void gblSetQuit(void)

{

pthread_mutex_lock(&gbl.mutex);

gbl.quit = TRUE;

pthread_mutex_unlock(&gbl.mutex);

}

static inline int gblGetPlay(void)

{

int play;

pthread_mutex_lock(&gbl.mutex);

play = gbl.play;

pthread_mutex_unlock(&gbl.mutex);

return play;

}

static inline void gblSetPlay(int play)

{

pthread_mutex_lock(&gbl.mutex);

gbl.play = play;

pthread_mutex_unlock(&gbl.mutex);

}

static inline int gblGetAndResetFrames(void)

{

int frames;

pthread_mutex_lock(&gbl.mutex);

frames = gbl.frames;

gbl.frames = 0;

pthread_mutex_unlock(&gbl.mutex);

return frames;

}

static inline void gblIncFrames(void)

{

pthread_mutex_lock(&gbl.mutex);

gbl.frames++;

pthread_mutex_unlock(&gbl.mutex);

}

static inline int gblGetAndResetVideoBytesEncoded(void)

{

int videoBytesEncoded;

pthread_mutex_lock(&gbl.mutex);

videoBytesEncoded = gbl.videoBytesEncoded;

gbl.videoBytesEncoded = 0;

pthread_mutex_unlock(&gbl.mutex);

return videoBytesEncoded;

}

static inline void gblIncVideoBytesEncoded(int videoBytesEncoded)

{

pthread_mutex_lock(&gbl.mutex);

gbl.videoBytesEncoded += videoBytesEncoded;

pthread_mutex_unlock(&gbl.mutex);

}

static inline int gblGetYFactor(void)

{

int yFactor;

pthread_mutex_lock(&gbl.mutex);

yFactor = gbl.yFactor;

pthread_mutex_unlock(&gbl.mutex);

return yFactor;

}

static inline void gblSetYFactor(int yFactor)

{

pthread_mutex_lock(&gbl.mutex);

gbl.yFactor = yFactor;

pthread_mutex_unlock(&gbl.mutex);

}

/* Scale vertically depending on video standard */

#define yScale(x) (((x) * gblGetYFactor()) / 10)

/* Cleans up cleanly after a failure */

#define cleanup(x)

status = (x);

gblSetQuit();

goto cleanup

/* Breaks a processing loop for a clean exit */

#define breakLoop(x)

status = (x);

gblSetQuit();

break

#endif /* _ENCODEDECODE_H */

/*

* main.c

*

* ============================================================================

* Copyright (c) Texas Instruments Inc 2005

*

* Use of this software is controlled by the terms and conditions found in the

* license agreement under which this software has been supplied or provided.

* ============================================================================

*/

/* Standard Linux headers */

#include <stdio.h>

#include <errno.h>

#include <fcntl.h>

#include <stdlib.h>

#include <getopt.h>

#include <unistd.h>

#include <string.h>

#include <pthread.h>

#include <sys/ioctl.h>

#include <sys/resource.h>

/* Davinci specific kernel headers */

#include <video/davincifb.h>

/* Codec Engine headers */

#include <xdc/std.h>

#include <ti/sdo/ce/Engine.h>

#include <ti/sdo/ce/trace/gt.h>

#include <ti/sdo/ce/CERuntime.h>

#include <ti/sdo/ce/utils/trace/TraceUtil.h>

/* Demo headers */

#include <rendezvous.h>

#include "encodedecode.h"

#include "video.h"

#include "ctrl.h"

/* The levels of initialization */

#define LOGSINITIALIZED 0x1

#define RENDEZVOUSOPENED 0x2

#define VIDEOTHREADCREATED 0x4

typedef struct Args {

int imageWidth;

int imageHeight;

int videoBitRate;

int passThrough;

int svideoInput;

int keyboard;

int time;

int interface;

} Args;

/* Global variable declarations for this application */

GlobalData gbl = { 0, 1, 0, 0, NOSTD };

/******************************************************************************

* usage

******************************************************************************/

static void usage(void)

{

printf("Usage: encodedecode [options]nn"

"Options:n"

"-r | --resolution Video resolution ('width'x'height') [720x480]n"

"-b | --bitrate Bit rate to encode video at [variable]n"

"-p | --passthrough Pass video through without encoding [off]n"

"-x | --svideo Use s-video instead of composite video inputn"

"-k | --keyboard Enable keyboard interfacen"

"-t | --time Number of seconds to run the demo [infinite]n"

"-i | --interface Launch the demo interface when exiting [off]n"

"-h | --help Print this messagenn");

}

/******************************************************************************

* parseArgs

******************************************************************************/

static void parseArgs(int argc, char *argv[], Args *argsp)

{

const char shortOptions[] = "r:b:pxkt:ih";

const struct option longOptions[] = {

{"resolution", required_argument, NULL, 'r'},

{"bitrate", required_argument, NULL, 'b'},

{"passthrough", no_argument, NULL, 'p'},

{"svideo", no_argument, NULL, 'x'},

{"keyboard", no_argument, NULL, 'k'},

{"time", required_argument, NULL, 't'},

{"interface", no_argument, NULL, 'i'},

{"help", no_argument, NULL, 'h'},

{0, 0, 0, 0}

};

int index;

int c;

int imageWidth;

int imageHeight;

for (;;) {

c = getopt_long(argc, argv, shortOptions, longOptions, &index);

if (c == -1) {

break;

}

switch (c) {

case 0:

break;

case 'r':

if (sscanf(optarg, "%dx%d", &imageWidth,

&imageHeight) != 2) {

fprintf(stderr, "Invalid resolution supplied (%s)n",

optarg);

usage();

exit(EXIT_FAILURE);

}

/* Sanity check resolution */

if (imageWidth <= 0 || imageHeight <= 0 ||

imageWidth > D1_WIDTH || imageHeight > D1_HEIGHT) {

fprintf(stderr, "Video resolution must be maximum %dx%dn",

D1_WIDTH, D1_HEIGHT);

exit(EXIT_FAILURE);

}

/* Only use multiples of 16 */

argsp->imageWidth = imageWidth & ~0xf;

argsp->imageHeight = imageHeight & ~0xf;

break;

case 'b':

argsp->videoBitRate = atoi(optarg);

break;

case 'x':

argsp->svideoInput = TRUE;

break;

case 'p':

argsp->passThrough = TRUE;

break;

case 'k':

argsp->keyboard = TRUE;

break;

case 't':

argsp->time = atoi(optarg);

break;

case 'i':

argsp->interface = TRUE;

break;

case 'h':

usage();

exit(EXIT_SUCCESS);

default:

usage();

exit(EXIT_FAILURE);

}

}

}

/******************************************************************************

* detectVideoStandard

******************************************************************************/

static int detectVideoStandard(void)

{

int fd;

int std;

/* Open video display device */

fd = open(FBVID_DEVICE, O_RDWR);

if (fd == -1) {

ERR("Failed to open fb device %s (%s)n", FBVID_DEVICE,

strerror(errno));

return FAILURE;

}

/* Query the display device driver for video standard chosen */

if (ioctl(fd, FBIO_GETSTD, &std) == -1) {

ERR("Failed to get video standard from display device drivern");

return FAILURE;

}

if ((std >> 16) == 0x1) {

DBG("NTSC selectedn");

gblSetYFactor(NTSCSTD);

}

else {

DBG("PAL selectedn");

gblSetYFactor(PALSTD);

}

close(fd);

return SUCCESS;

}

/******************************************************************************

* main

******************************************************************************/

int main(int argc, char *argv[])

{

unsigned int initMask = 0;

int status = EXIT_SUCCESS;

int numThreads;

struct sched_param schedParam;

Rendezvous_Obj rendezvous;

pthread_t videoThread;

pthread_attr_t attr;

VideoEnv videoEnv;

CtrlEnv ctrlEnv;

void *ret;

Args args = {

720,

480,

-1,

FALSE,

FALSE,

FALSE,

FOREVER,

FALSE

};

/* Detect PAL or NTSC */

if (detectVideoStandard() == FAILURE) {

cleanup(EXIT_FAILURE);

}

/* Parse the arguments given to the app and set the app environment */

parseArgs(argc, argv, &args);

printf("Encodedecode demo started.n");

/* Initialize the mutex which protects the global data */

pthread_mutex_init(&gbl.mutex, NULL);

/* Set the priority of this whole process to max (requires root) */

setpriority(PRIO_PROCESS, 0, -20);

/* Initialize Codec Engine runtime */

CERuntime_init();

DBG("Codec Engine initializedn");

/* Initialize the logs. Must be done after CERuntime_init() */

TraceUtil_start(ENGINE_NAME);

initMask |= LOGSINITIALIZED;

DBG("Logging initializedn");

/* Open the object which synchronizes the thread initialization */

numThreads = 3;

Rendezvous_open(&rendezvous, numThreads);

initMask |= RENDEZVOUSOPENED;

DBG("Rendezvous opened for %d threadsn", numThreads);

/* Initialize the thread attributes */

if (pthread_attr_init(&attr)) {

ERR("Failed to initialize thread attrsn");

cleanup(EXIT_FAILURE);

}

/* Force the thread to use custom scheduling attributes */

if (pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED)) {

ERR("Failed to set schedule inheritance attributen");

cleanup(EXIT_FAILURE);

}

/* Set the thread to be fifo real time scheduled */

if (pthread_attr_setschedpolicy(&attr, SCHED_FIFO)) {

ERR("Failed to set FIFO scheduling policyn");

cleanup(EXIT_FAILURE);

}

/* Set the thread priority */

schedParam.sched_priority = sched_get_priority_max(SCHED_FIFO);

if (pthread_attr_setschedparam(&attr, &schedParam)) {

ERR("Failed to set scheduler parametersn");

cleanup(EXIT_FAILURE);

}

/* Create the thread */

videoEnv.hRendezvous = &rendezvous;

videoEnv.videoBitRate = args.videoBitRate;

videoEnv.passThrough = args.passThrough;

videoEnv.svideoInput = args.svideoInput;

videoEnv.imageWidth = args.imageWidth;

videoEnv.imageHeight = args.imageHeight;

if (pthread_create(&videoThread, &attr, videoThrFxn, (void *) &videoEnv)) {

ERR("Failed to create video threadn");

cleanup(EXIT_FAILURE);

}

initMask |= VIDEOTHREADCREATED;

/* Main thread becomes the control thread */

ctrlEnv.hRendezvous = &rendezvous;

ctrlEnv.keyboard = args.keyboard;

ctrlEnv.time = args.time;

ctrlEnv.imageWidth = args.imageWidth;

ctrlEnv.imageHeight = args.imageHeight;

ctrlEnv.passThrough = args.passThrough;

ret = ctrlThrFxn(&ctrlEnv);

if (ret == THREAD_FAILURE) {

status = EXIT_FAILURE;

}

cleanup:

/* Make sure the other threads aren't waiting for init to complete */

Rendezvous_force(&rendezvous);

/* Wait until the other threads terminate */

if (initMask & VIDEOTHREADCREATED) {

if (pthread_join(videoThread, &ret) == 0) {

if (ret == THREAD_FAILURE) {

status = EXIT_FAILURE;

}

}

}

if (initMask & RENDEZVOUSOPENED) {

Rendezvous_close(&rendezvous);

}

if (initMask & LOGSINITIALIZED) {

TraceUtil_stop();

}

/* Destroy the global mutex */

pthread_mutex_destroy(&gbl.mutex);

if (status == EXIT_SUCCESS && args.interface) {

if (execl("./interface", "interface", "-l 2", NULL) == -1) {

status = EXIT_FAILURE;

}

}

exit(status);

}

/*

* ctrl.h

*

* ============================================================================

* Copyright (c) Texas Instruments Inc 2005

*

* Use of this software is controlled by the terms and conditions found in the

* license agreement under which this software has been supplied or provided.

* ============================================================================

*/

#ifndef _CTRL_H

#define _CTRL_H

#include <rendezvous.h>

#include "encodedecode.h"

/* Defining infinite time */

#define FOREVER -1

typedef struct CtrlEnv {

Rendezvous_Handle hRendezvous;

int keyboard;

int time;

int imageWidth;

int imageHeight;

int passThrough;

} CtrlEnv;

extern void *ctrlThrFxn(void *arg);

#endif /* _CTRL_H */

/*

* ctrl.c

*

* ============================================================================

* Copyright (c) Texas Instruments Inc 2005

*

* Use of this software is controlled by the terms and conditions found in the

* license agreement under which this software has been supplied or provided.

* ============================================================================

*/

/* Standard Linux headers */

#include <stdio.h>

#include <errno.h>

#include <fcntl.h>

#include <stdlib.h>

#include <string.h>

#include <unistd.h>

#include <pthread.h>

#include <linux/fb.h>

#include <sys/mman.h>

#include <sys/time.h>

#include <sys/ioctl.h>

/* Davinci specific kernel headers */

#include <video/davincifb.h>

/* Codec Engine headers */

#include <xdc/std.h>

#include <ti/sdo/ce/Engine.h>

/* Demo headers */

#include <rendezvous.h>

#include <msp430lib.h>

#include "encodedecode.h"

#include "ctrl.h"

#include "ui.h"

/* Keyboard command prompt */

#define COMMAND_PROMPT "Command [ 'help' for usage ] > "

/* The 0-7 transparency value to use for the OSD */

#define OSD_TRANSPARENCY 0x55

#define MAX_TRANSPARENCY 0x77

#define MIN_TRANSPARENCY 0x11

#define INC_TRANSPARENCY 0x11

#define NO_TRANSPARENCY 0x0

/* The levels of initialization */

#define OSDINITIALIZED 0x1

#define ENGINEOPENED 0x2

#define MSP430LIBINITIALIZED 0x4

#define UICREATED 0x8

/* Local function prototypes */

static int setOsdBuffer(int osdFd, int idx);

static int osdInit(char *displays[]);

static int osdExit(int osdFd);

static int setOsdTransparency(unsigned char trans);

static int waitForVsync(int fd);

static int getArmCpuLoad(int *procLoad, int *cpuLoad);

static int keyAction(enum msp430lib_keycode key, int displayIdx,

int *osdTransPtr);

static void drawDynamicData(Engine_Handle hEngine, int osdFd, int *time,

int *displayIdx, int *workingIdx);

/******************************************************************************

* setOsdBuffer

******************************************************************************/

static int setOsdBuffer(int osdFd, int idx)

{

struct fb_var_screeninfo vInfo;

if (ioctl(osdFd, FBIOGET_VSCREENINFO, &vInfo) == -1) {

ERR("Failed FBIOGET_VSCREENINFO (%s)n", strerror(errno));

return FAILURE;

}

vInfo.yoffset = vInfo.yres * idx;

if (ioctl(osdFd, FBIOPAN_DISPLAY, &vInfo) == -1) {

ERR("Failed FBIOPAN_DISPLAY (%s)n", strerror(errno));

return FAILURE;

}

return SUCCESS;

}

/******************************************************************************

* osdInit

******************************************************************************/

static int osdInit(char *displays[])

{

struct fb_var_screeninfo varInfo;

int fd;

int size;

/* Open the OSD device */

fd = open(OSD_DEVICE, O_RDWR);

if (fd == -1) {

ERR("Failed to open fb device %sn", OSD_DEVICE);

return FAILURE;

}

if (ioctl(fd, FBIOGET_VSCREENINFO, &varInfo) == -1) {

ERR("Failed ioctl FBIOGET_VSCREENINFO on %sn", OSD_DEVICE);

return FAILURE;

}

/* Try the requested size */

varInfo.xres = D1_WIDTH;

varInfo.yres = D1_HEIGHT;

varInfo.bits_per_pixel = SCREEN_BPP;

if (ioctl(fd, FBIOPUT_VSCREENINFO, &varInfo) == -1) {

ERR("Failed ioctl FBIOPUT_VSCREENINFO on %sn", OSD_DEVICE);

return FAILURE;

}

if (varInfo.xres != D1_WIDTH ||

varInfo.yres != D1_HEIGHT ||

varInfo.bits_per_pixel != SCREEN_BPP) {

ERR("Failed to get the requested screen size: %dx%d at %d bppn",

D1_WIDTH, D1_HEIGHT, SCREEN_BPP);

return FAILURE;

}

size = varInfo.xres * varInfo.yres * varInfo.bits_per_pixel / 8;

/* Map the frame buffers to user space */

displays[0] = (char *) mmap(NULL, size * NUM_BUFS,

PROT_READ | PROT_WRITE,

MAP_SHARED, fd, 0);

if (displays[0] == MAP_FAILED) {

ERR("Failed mmap on %sn", OSD_DEVICE);

return FAILURE;

}

displays[1] = displays[0] + size;

setOsdBuffer(fd, 0);

return fd;

}

/******************************************************************************

* osdExit

******************************************************************************/

static int osdExit(int osdFd)

{

setOsdBuffer(osdFd, 0);

close(osdFd);

return SUCCESS;

}

/******************************************************************************

* setOsdTransparency

******************************************************************************/

static int setOsdTransparency(unsigned char trans)

{

struct fb_var_screeninfo vInfo;

unsigned short *attrDisplay;

int attrSize;

int fd;

/* Open the attribute device */

fd = open(ATTR_DEVICE, O_RDWR);

if (fd == -1) {

ERR("Failed to open attribute window %sn", ATTR_DEVICE);

return FAILURE;

}

if (ioctl(fd, FBIOGET_VSCREENINFO, &vInfo) == -1) {

ERR("Error reading variable information.n");

return FAILURE;

}

/* One nibble per pixel */

attrSize = vInfo.xres_virtual * vInfo.yres / 2;

attrDisplay = (unsigned short *) mmap(NULL, attrSize,

PROT_READ | PROT_WRITE,

MAP_SHARED, fd, 0);

if (attrDisplay == MAP_FAILED) {

ERR("Failed mmap on %sn", ATTR_DEVICE);

return FAILURE;

}

/* Fill the window with the new attribute value */

memset(attrDisplay, trans, attrSize);

munmap(attrDisplay, attrSize);

close(fd);

return SUCCESS;

}

/******************************************************************************

* waitForVsync

******************************************************************************/

static int waitForVsync(int fd)

{

int dummy;

/* Wait for vertical sync */

if (ioctl(fd, FBIO_WAITFORVSYNC, &dummy) == -1) {

ERR("Failed FBIO_WAITFORVSYNC (%s)n", strerror(errno));

return FAILURE;

}

return SUCCESS;

}

/******************************************************************************

* getArmCpuLoad

******************************************************************************/

static int getArmCpuLoad(int *procLoad, int *cpuLoad)

{

static unsigned long prevIdle = 0;

static unsigned long prevTotal = 0;

static unsigned long prevProc = 0;

int cpuLoadFound = FALSE;

unsigned long user, nice, sys, idle, total, proc;

unsigned long uTime, sTime, cuTime, csTime;

unsigned long deltaTotal, deltaIdle, deltaProc;

char textBuf[4];

FILE *fptr;

/* Read the overall system information */

fptr = fopen("/proc/stat", "r");

if (fptr == NULL) {

ERR("/proc/stat not found. Is the /proc filesystem mounted?n");

return FAILURE;

}

/* Scan the file line by line */

while (fscanf(fptr, "%4s %lu %lu %lu %lu %*[^n]", textBuf,

&user, &nice, &sys, &idle) != EOF) {

if (strcmp(textBuf, "cpu") == 0) {

cpuLoadFound = TRUE;

break;

}

}

if (fclose(fptr) != 0) {

return FAILURE;

}

if (!cpuLoadFound) {

return FAILURE;

}

/* Read the current process information */

fptr = fopen("/proc/self/stat", "r");

if (fptr == NULL) {

ERR("/proc/self/stat not found. Is the /proc filesystem mounted?n");

return FAILURE;

}

if (fscanf(fptr, "%*d %*s %*s %*d %*d %*d %*d %*d %*d %*d %*d %*d %*d %lu "

"%lu %lu %lu", &uTime, &sTime, &cuTime, &csTime) != 4) {

ERR("Failed to get process load information.n");

fclose(fptr);

return FAILURE;

}

if (fclose(fptr) != 0) {

return FAILURE;

}

total = user + nice + sys + idle;

proc = uTime + sTime + cuTime + csTime;

/* Check if this is the first time, if so init the prev values */

if (prevIdle == 0 && prevTotal == 0 && prevProc == 0) {

prevIdle = idle;

prevTotal = total;

prevProc = proc;

return SUCCESS;

}

deltaIdle = idle - prevIdle;

deltaTotal = total - prevTotal;

deltaProc = proc - prevProc;

prevIdle = idle;

prevTotal = total;

prevProc = proc;

*cpuLoad = 100 - deltaIdle * 100 / deltaTotal;

*procLoad = deltaProc * 100 / deltaTotal;

return SUCCESS;

}

/******************************************************************************

* drawDynamicData

******************************************************************************/

static void drawDynamicData(Engine_Handle hEngine, int osdFd, int *time,

int *displayIdx, int *workingIdx)

{

static unsigned long firstTime = 0;

static unsigned long prevTime;

unsigned long newTime;

unsigned long deltaTime;

struct timeval tv;

struct tm *timePassed;

time_t spentTime;

char tempString[9];

int procLoad;

int armLoad;

int dspLoad;

int fps;

int videoKbps;

float fpsf;

float videoKbpsf;

*time = -1;

if (gettimeofday(&tv, NULL) == -1) {

ERR("Failed to get os timen");

return;

}

newTime = tv.tv_sec * 1000 + tv.tv_usec / 1000;

if (!firstTime) {

firstTime = newTime;

prevTime = newTime;

return;

}

/* Only update OSD every second */

deltaTime = newTime - prevTime;

if (deltaTime <= 1000) {

return;

}

prevTime = newTime;

spentTime = (newTime - firstTime) / 1000;

if (spentTime <= 0) {

return;

}

*time = spentTime;

fpsf = gblGetAndResetFrames() * 1000.0 / deltaTime;

fps = fpsf + 0.5;

videoKbpsf = gblGetAndResetVideoBytesEncoded() * 8.0 / deltaTime;

videoKbps = videoKbpsf + 0.5;

uiClearScreen(COLUMN_2, 0, COLUMN_3 - COLUMN_2, yScale(220), *workingIdx);

if (getArmCpuLoad(&procLoad, &armLoad) != FAILURE) {

sprintf(tempString, "%d%%", procLoad);

uiDrawText(tempString, COLUMN_2, ROW_1, *workingIdx);

}

else {

ERR("Failed to get ARM CPU loadn");

}

dspLoad = Engine_getCpuLoad(hEngine);

if (dspLoad > -1) {

sprintf(tempString, "%d%%", dspLoad);

uiDrawText(tempString, COLUMN_2, ROW_2, *workingIdx);

}

else {

ERR("Failed to get DSP CPU loadn");

}

sprintf(tempString, "%d fps", fps);

uiDrawText(tempString, COLUMN_2, ROW_3, *workingIdx);

sprintf(tempString, "%d kbps", videoKbps);

uiDrawText(tempString, COLUMN_2, ROW_4, *workingIdx);

timePassed = localtime(&spentTime);

if (timePassed == NULL) {

return;

}

sprintf(tempString, "%.2d:%.2d:%.2d", timePassed->tm_hour,

timePassed->tm_min,

timePassed->tm_sec);

uiDrawText(tempString, COLUMN_2, ROW_5, *workingIdx);

*displayIdx = (*displayIdx + 1) % NUM_BUFS;

*workingIdx = (*workingIdx + 1) % NUM_BUFS;

setOsdBuffer(osdFd, *displayIdx);

waitForVsync(osdFd);

}

/******************************************************************************

* keyAction

******************************************************************************/

static int keyAction(enum msp430lib_keycode key, int displayIdx,

int *osdTransPtr)

{

static int osdVisible = TRUE;

switch(key) {

case MSP430LIB_KEYCODE_OK:

case MSP430LIB_KEYCODE_PLAY:

DBG("Play button pressedn");

gblSetPlay(TRUE);

if (uiPressButton(BTNIDX_CTRLPLAY, displayIdx) == FAILURE) {

return FAILURE;

}

break;

case MSP430LIB_KEYCODE_PAUSE:

DBG("Pause button pressedn");

gblSetPlay(FALSE);

if (uiPressButton(BTNIDX_CTRLPAUSE, displayIdx) == FAILURE) {

return FAILURE;

}

break;

case MSP430LIB_KEYCODE_STOP:

DBG("Stop button pressed, quitting demo..n");

gblSetQuit();

break;

case MSP430LIB_KEYCODE_VOLINC:

DBG("Volume inc button pressedn");

if (*osdTransPtr < MAX_TRANSPARENCY && osdVisible) {

*osdTransPtr += INC_TRANSPARENCY;

setOsdTransparency(*osdTransPtr);

}

if (uiPressButton(BTNIDX_NAVPLUS, displayIdx) == FAILURE) {

return FAILURE;

}

break;

case MSP430LIB_KEYCODE_VOLDEC:

DBG("Volume dec button pressedn");

if (*osdTransPtr > MIN_TRANSPARENCY && osdVisible) {

*osdTransPtr -= INC_TRANSPARENCY;

setOsdTransparency(*osdTransPtr);

}

if (uiPressButton(BTNIDX_NAVMINUS, displayIdx) == FAILURE) {

return FAILURE;

}

break;

case MSP430LIB_KEYCODE_INFOSELECT:

DBG("Info/select command received.n");

if (osdVisible) {

setOsdTransparency(NO_TRANSPARENCY);

osdVisible = FALSE;

}

else {

setOsdTransparency(*osdTransPtr);

osdVisible = TRUE;

}

break;

default:

DBG("Unknown button pressed.n");

if (uiPressButton(BTNIDX_WRONGPRESSED, displayIdx) == FAILURE) {

return FAILURE;

}

}

return SUCCESS;

}

/******************************************************************************

* getKbdCommand

******************************************************************************/

int getKbdCommand(enum msp430lib_keycode *keyPtr)

{

struct timeval tv;

fd_set fds;

int ret;

char string[80];

FD_ZERO(&fds);

FD_SET(fileno(stdin), &fds);

/* Timeout of 0 means polling */

tv.tv_sec = 0;

tv.tv_usec = 0;

ret = select(FD_SETSIZE, &fds, NULL, NULL, &tv);

if (ret == -1) {

ERR("Select failed on stdin (%s)n", strerror(errno));

return FAILURE;

}

if (ret == 0) {

return SUCCESS;

}

if (FD_ISSET(fileno(stdin), &fds)) {

if (fgets(string, 80, stdin) == NULL) {

return FAILURE;

}

/* Remove the end of line */

strtok(string, "n");

/* Assign corresponding msp430lib key */

if (strcmp(string, "play") == 0) {

*keyPtr = MSP430LIB_KEYCODE_PLAY;

}

else if (strcmp(string, "pause") == 0) {

*keyPtr = MSP430LIB_KEYCODE_PAUSE;

}

else if (strcmp(string, "stop") == 0) {

*keyPtr = MSP430LIB_KEYCODE_STOP;

}

else if (strcmp(string, "inc") == 0) {

*keyPtr = MSP430LIB_KEYCODE_VOLDEC;

}

else if (strcmp(string, "dec") == 0) {

*keyPtr = MSP430LIB_KEYCODE_VOLINC;

}

else if (strcmp(string, "hide") == 0) {

*keyPtr = MSP430LIB_KEYCODE_INFOSELECT;

}

else if (strcmp(string, "help") == 0) {

printf("nAvailable commands:n"

" play - Play videon"

" pause - Pause videon"

" stop - Quit demon"

" inc - Increase OSD transparencyn"

" dec - Decrease OSD transparencyn"

" hide - Show / hide the OSDn"

" help - Show this help screennn");

}

else {

printf("Unknown command: [ %s ]n", string);

}

if (*keyPtr != MSP430LIB_KEYCODE_STOP) {

printf(COMMAND_PROMPT);

fflush(stdout);

}

else {

printf("n");

}

}

return SUCCESS;

}

/******************************************************************************

* ctrlThrFxn

******************************************************************************/

void *ctrlThrFxn(void *arg)

{

Engine_Handle hEngine = NULL;

unsigned int initMask = 0;

CtrlEnv *envp = (CtrlEnv *) arg;

int osdTransparency = OSD_TRANSPARENCY;

void *status = THREAD_SUCCESS;

int displayIdx = 0;

int workingIdx = 1;

char *osdDisplays[NUM_BUFS];

enum msp430lib_keycode key;

UIParams uiParams;

int osdFd;

int timeSpent;

/* Initialize the OSD */

osdFd = osdInit(osdDisplays);

if (osdFd == FAILURE) {

cleanup(THREAD_FAILURE);

}

DBG("OSD successfully initializedn");

initMask |= OSDINITIALIZED;

/* Initialize the OSD transparency */

if (setOsdTransparency(osdTransparency) == FAILURE) {

cleanup(THREAD_FAILURE);

}

DBG("OSD transparency initializedn");

/* Reset, load, and start DSP Engine */

hEngine = Engine_open(ENGINE_NAME, NULL, NULL);

if (hEngine == NULL) {

ERR("Failed to open codec engine %sn", ENGINE_NAME);

cleanup(THREAD_FAILURE);

}

DBG("Codec Engine opened in control threadn");

initMask |= ENGINEOPENED;

/* Initialize the MSP430 library to be able to receive IR commands */

if (msp430lib_init() == MSP430LIB_FAILURE) {

ERR("Failed to initialize msp430lib.n");

cleanup(THREAD_FAILURE);

}

DBG("MSP430 library initializedn");

initMask |= MSP430LIBINITIALIZED;

/* Create the user interface */

uiParams.imageWidth = envp->imageWidth;

uiParams.imageHeight = envp->imageHeight;

uiParams.passThrough = envp->passThrough;

uiParams.osdDisplays = osdDisplays;

if (uiCreate(&uiParams) == FAILURE) {

cleanup(THREAD_FAILURE);

}

DBG("User interface createdn");

initMask |= UICREATED;

/* Signal that initialization is done and wait for other threads */

Rendezvous_meet(envp->hRendezvous);

/* Initialize the ARM cpu load */

getArmCpuLoad(NULL, NULL);

if (envp->keyboard) {

printf(COMMAND_PROMPT);

fflush(stdout);

}

DBG("Entering control main loop.n");

while (!gblGetQuit()) {

/* Draw the cpu load on the OSD */

drawDynamicData(hEngine, osdFd, &timeSpent, &displayIdx, &workingIdx);

/* Has the demo timelimit been hit? */

if (envp->time > FOREVER && timeSpent >= envp->time) {

breakLoop(THREAD_SUCCESS);

}

/* See if an IR remote key has been pressed */

if (msp430lib_get_ir_key(&key) == MSP430LIB_FAILURE) {

DBG("Failed to get IR value.n");

}

if (envp->keyboard) {

if (getKbdCommand(&key) == FAILURE) {

breakLoop(THREAD_FAILURE);

}

}

/* If an IR key had been pressed, service it */

if (key != 0) {

if (keyAction(key, displayIdx, &osdTransparency) == FAILURE) {

breakLoop(THREAD_FAILURE);

}

}

usleep(REMOTECONTROLLATENCY);

}

cleanup:

/* Make sure the other threads aren't waiting for init to complete */

Rendezvous_force(envp->hRendezvous);

/* Clean up the control thread */

if (initMask & UICREATED) {

uiDelete();

}

if (initMask & MSP430LIBINITIALIZED) {

msp430lib_exit();

}

if (initMask & ENGINEOPENED) {

Engine_close(hEngine);

}

if (initMask & OSDINITIALIZED) {

osdExit(osdFd);

}

return status;

}

/*

* video.h

*

* ============================================================================

* Copyright (c) Texas Instruments Inc 2005

*

* Use of this software is controlled by the terms and conditions found in the

* license agreement under which this software has been supplied or provided.

* ============================================================================

*/

#ifndef _VIDEO_H

#define _VIDEO_H

#include <rendezvous.h>

typedef struct VideoEnv {

Rendezvous_Handle hRendezvous;

int videoBitRate;

int svideoInput;

int passThrough;

int imageWidth;

int imageHeight;

} VideoEnv;

extern void *videoThrFxn(void *arg);

#endif /* _VIDEO_H */

/*

* video.c

*

* ============================================================================

* Copyright (c) Texas Instruments Inc 2005

*

* Use of this software is controlled by the terms and conditions found in the

* license agreement under which this software has been supplied or provided.

* ============================================================================

*/

/* Standard Linux headers */

#include <stdio.h>

#include <fcntl.h>

#include <errno.h>

#include <stdlib.h>

#include <string.h>

#include <unistd.h>

#include <pthread.h>

#include <sys/mman.h>

#include <sys/ioctl.h>

#include <asm/types.h>

#include <linux/videodev2.h>

/* Davinci specific kernel headers */

#include <video/davincifb.h>

#include <media/davinci_vpfe.h>

#include <media/tvp5146.h>

/* Codec Engine headers */

#include <xdc/std.h>

#include <ti/sdo/ce/Engine.h>

#include <ti/sdo/ce/osal/Memory.h>

#include <ti/sdo/ce/video/viddec.h>

#include <ti/sdo/ce/video/videnc.h>

/* Demo headers */

#include <rendezvous.h>

#include <fifoutil.h>

#include "encodedecode.h"

#include "display.h"

#include "video.h"

#include "display.h"

/* The levels of initialization */

#define INFIFOOPENED 0x1

#define OUTFIFOOPENED 0x2

#define DISPLAYTHREADCREATED 0x4

#define CAPTUREDEVICEINITIALIZED 0x8

#define ENGINEOPENED 0x10

#define VIDEODECODERCREATED 0x20

#define VIDEOENCODERCREATED 0x40

#define ENCODEDBUFFERALLOCATED 0x80

#define DISPLAYBUFFERSALLOCATED 0x100

/* Macro for clearing structures */

#define CLEAR(x) memset (&(x), 0, sizeof (x))

/* Number of buffers between video thread and display thread */

#define DISPLAY_BUFFERS 3

/* Black color in UYVY format */

#define UYVY_BLACK 0x10801080

/* Triple buffering for the capture driver */

#define NUM_BUFS 3

/* Work around a bug in the algorithms (using scratch in init) */

#define ALGO_INIT_WORKAROUND

/* Structure containing statistics about the frames in a clip */

typedef struct FrameStats {

int framesRejected;

int iFrames;

int pFrames;

int bFrames;

int idrFrames;

} FrameStats;

/* Describes a capture frame buffer */

typedef struct VideoBuffer {

void *start;

size_t length;

} VideoBuffer;

/* Local function prototypes */

static int videoDecodeAlgCreate(Engine_Handle hEngine,

VIDDEC_Handle *hDecodePtr, int *encBufSizePtr,

int width, int height);

static int videoEncodeAlgCreate(Engine_Handle hEngine,

VIDENC_Handle *hEncodePtr,

int width, int height, int bitrate);

static int encodeVideoBuffer(VIDENC_Handle hEncode, char *inBuf, int inBufSize,

char *outBuf, int outBufMaxSize, int *outBufSize);

static int decodeVideoBuffer(VIDDEC_Handle hDecode, char *inBuf,

int inBufSize, char *outBuf,

int outBufSize, FrameStats *stats);

static int initCaptureDevice(VideoBuffer **vidBufsPtr, int *numVidBufsPtr,

int svideoInput, int captureWidth,

int captureHeight);

static void cleanupCaptureDevice(int fd, VideoBuffer *vidBufs, int numVidBufs);

/******************************************************************************

* videoDecodeAlgCreate

******************************************************************************/

static int videoDecodeAlgCreate(Engine_Handle hEngine,

VIDDEC_Handle *hDecodePtr, int *encBufSizePtr,

int width, int height)

{

#ifndef ALGO_INIT_WORKAROUND

VIDDEC_Status decStatus;

XDAS_Int32 status;

VIDDEC_DynamicParams dynamicParams;

#endif

VIDDEC_Params params;

VIDDEC_Handle hDecode;

params.size = sizeof(VIDDEC_Params);

params.maxFrameRate = 0;

params.maxBitRate = 0;

params.dataEndianness = XDM_BYTE;

params.forceChromaFormat = XDM_YUV_422ILE;

params.maxHeight = D1_HEIGHT;

params.maxWidth = D1_WIDTH;

/* Create H.264 decoder instance */

hDecode = VIDDEC_create(hEngine, "h264dec", &params);

if (hDecode == NULL) {

ERR("Can't open decode algorithm: %sn", "h264");

return FAILURE;

}

#ifndef ALGO_INIT_WORKAROUND

/* Get buffer information from video decoder */

decStatus.size = sizeof(VIDDEC_Status);

dynamicParams.size = sizeof(VIDDEC_DynamicParams);

status = VIDDEC_control(hDecode, XDM_GETBUFINFO, &dynamicParams,

&decStatus);

if (status != VIDDEC_EOK) {

ERR("XDM_GETBUFINFO failed, status=%ldn", status);

return FAILURE;

}

*encBufSizePtr = decStatus.bufInfo.minInBufSize[0];

#else

/* Hardcoding encoded buffer size to work around codec issue */

*encBufSizePtr = 1658880;

#endif

*hDecodePtr = hDecode;

return SUCCESS;

}

/******************************************************************************

* videoEncodeAlgCreate

******************************************************************************/

static int videoEncodeAlgCreate(Engine_Handle hEngine,

VIDENC_Handle *hEncodePtr,

int width, int height, int bitrate)

{

VIDENC_DynamicParams dynamicParams;

VIDENC_Status encStatus;

VIDENC_Params params;

VIDENC_Handle hEncode;

XDAS_Int32 status;

params.size = sizeof(VIDENC_Params);

params.encodingPreset = XDM_DEFAULT;

params.rateControlPreset = bitrate < 0 ? IVIDEO_NONE : IVIDEO_LOW_DELAY;

params.maxFrameRate = gblGetYFactor() == NTSCSTD ? 30000 : 25000;

params.maxBitRate = bitrate < 0 ? 0 : bitrate;

params.dataEndianness = XDM_BYTE;

params.maxInterFrameInterval = 0;

params.inputChromaFormat = XDM_YUV_422ILE;

params.inputContentType = IVIDEO_PROGRESSIVE;

params.maxHeight = D1_HEIGHT;

params.maxWidth = D1_WIDTH;

/* Create H.264 encoder instance */

hEncode = VIDENC_create(hEngine, "h264enc", &params);

if (hEncode == NULL) {

ERR("Can't open encode algorithm: %sn", "h264");

return FAILURE;

}

dynamicParams.size = sizeof(VIDENC_DynamicParams);

dynamicParams.inputHeight = height;

dynamicParams.inputWidth = width;

dynamicParams.targetBitRate = bitrate < 0 ? 0 : bitrate;

dynamicParams.intraFrameInterval = 30;

dynamicParams.generateHeader = XDM_ENCODE_AU;

dynamicParams.captureWidth = 0;

dynamicParams.forceIFrame = 0;

if (gblGetYFactor() == NTSCSTD) {

dynamicParams.targetFrameRate = 30000;

dynamicParams.refFrameRate = 30000;

}

else {

dynamicParams.targetFrameRate = 25000;

dynamicParams.refFrameRate = 25000;

}

/* Set video encoder dynamic parameters */

encStatus.size = sizeof(VIDENC_Status);

status = VIDENC_control(hEncode, XDM_SETPARAMS, &dynamicParams,

&encStatus);

if (status != VIDENC_EOK) {

ERR("XDM_SETPARAMS failed, status=%ldn", status);

return FAILURE;

}

*hEncodePtr = hEncode;

return SUCCESS;

}

/******************************************************************************

* encodeVideoBuffer

******************************************************************************/

static int encodeVideoBuffer(VIDENC_Handle hEncode, char *inBuf, int inBufSize,

char *outBuf, int outBufMaxSize, int *outBufSize)

{

XDAS_Int32 inBufSizeArray[1];

XDAS_Int32 outBufSizeArray[1];

XDAS_Int32 status;

XDM_BufDesc inBufDesc;

XDM_BufDesc outBufDesc;

VIDENC_InArgs inArgs;

VIDENC_OutArgs outArgs;

inBufSizeArray[0] = inBufSize;

outBufSizeArray[0] = outBufMaxSize;

inBufDesc.numBufs = 1;

inBufDesc.bufSizes = inBufSizeArray;

inBufDesc.bufs = (XDAS_Int8 **) &inBuf;

outBufDesc.numBufs = 1;

outBufDesc.bufSizes = outBufSizeArray;

outBufDesc.bufs = (XDAS_Int8 **) &outBuf;

inArgs.size = sizeof(VIDENC_InArgs);

outArgs.size = sizeof(VIDENC_OutArgs);

/* Encode video buffer */

status = VIDENC_process(hEncode, &inBufDesc, &outBufDesc,

&inArgs, &outArgs);

if (status != VIDENC_EOK) {

ERR("VIDENC_process() failed with a fatal error (%ld ext: %#lx)n",

status, outArgs.extendedError);

return FAILURE;

}

*outBufSize = outArgs.bytesGenerated;

return SUCCESS;

}

/******************************************************************************

* decodeVideoBuffer

******************************************************************************/

static int decodeVideoBuffer(VIDDEC_Handle hDecode, char *inBuf,

int inBufSize, char *outBuf,

int outBufSize, FrameStats *stats)

{

VIDDEC_DynamicParams dynamicParams;

VIDDEC_InArgs inArgs;

VIDDEC_OutArgs outArgs;

VIDDEC_Status decStatus;

XDM_BufDesc inBufDesc;

XDM_BufDesc outBufDesc;

XDAS_Int32 inBufSizeArray[1];

XDAS_Int32 outBufSizeArray[1];

XDAS_Int32 status;

dynamicParams.size = sizeof(VIDDEC_DynamicParams);

decStatus.size = sizeof(VIDDEC_Status);

inBufSizeArray[0] = inBufSize;

outBufSizeArray[0] = outBufSize;

inBufDesc.numBufs = 1;

inBufDesc.bufSizes = inBufSizeArray;

inBufDesc.bufs = (XDAS_Int8 **) &inBuf;

outBufDesc.numBufs = 1;

outBufDesc.bufSizes = outBufSizeArray;

outBufDesc.bufs = (XDAS_Int8 **) &outBuf;

inArgs.size = sizeof(VIDDEC_InArgs);

inArgs.numBytes = inBufSize;

inArgs.inputID = 0;

outArgs.size = sizeof(VIDDEC_OutArgs);

/* Decode video buffer */

status = VIDDEC_process(hDecode, &inBufDesc, &outBufDesc,

&inArgs, &outArgs);

if (status != VIDDEC_EOK) {

if (status == VIDDEC_ERUNTIME ||

XDM_ISFATALERROR(outArgs.extendedError)) {

ERR("VIDDEC_process() failed with a fatal error (%ld ext: %#lxn",

status, outArgs.extendedError);

return FAILURE;

}

else {

stats->framesRejected++;

return SUCCESS;

}

}

switch (outArgs.decodedFrameType) {

case IVIDEO_I_FRAME:

stats->iFrames++;

break;

case IVIDEO_P_FRAME:

stats->pFrames++;

break;

case IVIDEO_B_FRAME:

stats->bFrames++;

break;

case IVIDEO_IDR_FRAME:

stats->idrFrames++;

break;

}

return SUCCESS;

}

/******************************************************************************

* initCaptureDevice

******************************************************************************/

static int initCaptureDevice(VideoBuffer **vidBufsPtr, int *numVidBufsPtr,

int svideoInput, int captureWidth,

int captureHeight)

{

struct v4l2_requestbuffers req;

struct v4l2_capability cap;

struct v4l2_cropcap cropCap;

struct v4l2_crop crop;

struct v4l2_format fmt;

struct v4l2_buffer buf;

v4l2_std_id std;

enum v4l2_buf_type type;

int input;

int fd;

int ret;

VideoBuffer *buffers;

int numBufs;

/* Open video capture device */

fd = open(V4L2_DEVICE, O_RDWR | O_NONBLOCK, 0);

if (fd == -1) {

ERR("Cannot open %s (%s)n", V4L2_DEVICE, strerror(errno));

return FAILURE;

}

/* Select the video input */

if (svideoInput == TRUE) {

input = TVP5146_AMUX_SVIDEO;

}

else {

input = TVP5146_AMUX_COMPOSITE;

}

if (ioctl(fd, VIDIOC_S_INPUT, &input) == -1) {

ERR("Failed to set video input to %dn", input);

return FAILURE;

}

DBG("Set the capture input to id %dn", input);

/* Query for capture device capabilities */

if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == -1) {

if (errno == EINVAL) {

ERR("%s is no V4L2 devicen", V4L2_DEVICE);

return FAILURE;

}

ERR("Failed VIDIOC_QUERYCAP on %s (%s)n", V4L2_DEVICE,

strerror(errno));

return FAILURE;

}

if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {

ERR("%s is no video capture devicen", V4L2_DEVICE);

return FAILURE;

}

if (!(cap.capabilities & V4L2_CAP_STREAMING)) {

ERR("%s does not support streaming i/on", V4L2_DEVICE);

return FAILURE;

}

cropCap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

if (ioctl(fd, VIDIOC_CROPCAP, &cropCap) == -1) {

ERR("VIDIOC_CROPCAP failed %d, %sn", errno, strerror(errno));

return FAILURE;

}

/* Auto detect PAL or NTSC using the capture driver as sanity check */

std = VPFE_STD_AUTO;

if(ioctl(fd, VIDIOC_S_STD, &std) == -1) {

ERR("VIDIOC_S_STD (auto) failed on %s (%s)n", V4L2_DEVICE,

strerror(errno));

return FAILURE;

}

do {

ret = ioctl(fd, VIDIOC_QUERYSTD, &std);

} while (ret == -1 && errno == EAGAIN);

if (ret == -1) {

ERR("VIDIOC_QUERYSTD failed on %s (%s)n", V4L2_DEVICE,

strerror(errno));

return FAILURE;

}

switch (std) {

case V4L2_STD_NTSC:

DBG("NTSC camera detectedn");

if (gblGetYFactor() == PALSTD) {

ERR("NTSC camera connected but PAL selected.n");

return FAILURE;

}

break;

case V4L2_STD_PAL:

DBG("PAL camera detectedn");

if (gblGetYFactor() == NTSCSTD) {

ERR("PAL camera connected but NTSC selected.n");

return FAILURE;

}

break;

default:

ERR("Camera (%s) using unsupported video standardn", V4L2_DEVICE);

return FAILURE;

}

/* Use either NTSC or PAL depending on display kernel parameter */

std = gblGetYFactor() == NTSCSTD ? V4L2_STD_NTSC : V4L2_STD_PAL;

if(ioctl(fd, VIDIOC_S_STD, &std) == -1) {

ERR("VIDIOC_S_STD failed on %s (%s)n", V4L2_DEVICE,

strerror(errno));

return FAILURE;

}

/* Set the video capture image format */

CLEAR(fmt);

fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;

fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;

fmt.fmt.pix.width = D1_WIDTH;

fmt.fmt.pix.height = D1_HEIGHT;

/* Set the video capture format */

if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1) {

ERR("VIDIOC_S_FMT failed on %s (%s)n", V4L2_DEVICE,

strerror(errno));

return FAILURE;

}

crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

crop.c.left = D1_WIDTH / 2 - captureWidth / 2;

crop.c.top = D1_HEIGHT / 2 - captureHeight / 2;

crop.c.width = captureWidth;

crop.c.height = captureHeight;

/* Crop the image depending on requested image size */

if (ioctl(fd, VIDIOC_S_CROP, &crop) == -1) {

ERR("VIDIOC_S_CROP failed %d, %sn", errno, strerror(errno));

return FAILURE;

}

printf("Capturing %dx%d video (cropped to %dx%d)n",

fmt.fmt.pix.width, fmt.fmt.pix.height, crop.c.width, crop.c.height);

CLEAR(req);

req.count = NUM_BUFS;

req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

req.memory = V4L2_MEMORY_MMAP;

/* Allocate buffers in the capture device driver */

if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1) {

ERR("VIDIOC_REQBUFS failed on %s (%s)n", V4L2_DEVICE,

strerror(errno));

return FAILURE;

}

DBG("%d capture buffers were successfully allocated.n", req.count);

if (req.count < NUM_BUFS) {

ERR("Insufficient buffer memory on %sn", V4L2_DEVICE);

return FAILURE;

}

buffers = calloc(req.count, sizeof(*buffers));

if (!buffers) {

ERR("Failed to allocate memory for capture buffer structs.n");

return FAILURE;

}

/* Map the allocated buffers to user space */

for (numBufs = 0; numBufs < req.count; numBufs++) {

CLEAR(buf);

buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

buf.memory = V4L2_MEMORY_MMAP;

buf.index = numBufs;

if (ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1) {

ERR("Failed VIDIOC_QUERYBUF on %s (%s)n", V4L2_DEVICE,

strerror(errno));

return FAILURE;

}

buffers[numBufs].length = buf.length;

buffers[numBufs].start = mmap(NULL,

buf.length,

PROT_READ | PROT_WRITE,

MAP_SHARED,

fd, buf.m.offset);

if (buffers[numBufs].start == MAP_FAILED) {

ERR("Failed to mmap buffer on %s (%s)n", V4L2_DEVICE,

strerror(errno));

return FAILURE;

}

DBG("Capture buffer %d mapped to address %#lxn", numBufs,

(unsigned long) buffers[numBufs].start);

if (ioctl(fd, VIDIOC_QBUF, &buf) == -1) {

ERR("VIODIOC_QBUF failed on %s (%s)n", V4L2_DEVICE,

strerror(errno));

return FAILURE;

}

}

/* Start the video streaming */

type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

if (ioctl(fd, VIDIOC_STREAMON, &type) == -1) {

ERR("VIDIOC_STREAMON failed on %s (%s)n", V4L2_DEVICE,

strerror(errno));

return FAILURE;

}

*vidBufsPtr = buffers;

*numVidBufsPtr = numBufs;

return fd;

}

/******************************************************************************

* cleanupCaptureDevice

******************************************************************************/

static void cleanupCaptureDevice(int fd, VideoBuffer *vidBufs, int numVidBufs)

{

enum v4l2_buf_type type;

unsigned int i;

/* Shut off the video capture */

type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

if (ioctl(fd, VIDIOC_STREAMOFF, &type) == -1) {

ERR("VIDIOC_STREAMOFF failed (%s)n", strerror(errno));

}

/* Unmap the capture frame buffers from user space */

for (i = 0; i < numVidBufs; ++i) {

if (munmap(vidBufs[i].start, vidBufs[i].length) == -1) {

ERR("Failed to unmap capture buffer %dn", i);

}

}

free(vidBufs);

if (close(fd) == -1) {

ERR("Failed to close capture device (%s)n", strerror(errno));

}

}

/******************************************************************************

* videoThrFxn

******************************************************************************/

void *videoThrFxn(void *arg)

{

BufferElement flush = { DISPLAY_FLUSH };

FrameStats frameStats = { 0, 0, 0, 0, 0 };

Engine_Handle hEngine = NULL;

unsigned int initMask = 0;

VideoEnv *envp = (VideoEnv *) arg;

void *status = THREAD_SUCCESS;

char *encBuf = NULL;

int captureFd = 0;

int numDisplayBufs = 0;

BufferElement bufferElements[DISPLAY_BUFFERS];

struct sched_param schedParam;

VIDDEC_Handle hDecode;

VIDENC_Handle hEncode;

struct v4l2_buffer v4l2buf;

DisplayEnv displayEnv;

unsigned int numVidBufs;

VideoBuffer *vidBufs;

void *ret;

int encBufSize;

int frameSize;

int imageSize;

pthread_t displayThread;

pthread_attr_t attr;

BufferElement e;

int i;

imageSize = envp->imageWidth * envp->imageHeight * SCREEN_BPP / 8;

/* Open the input fifo */

if (FifoUtil_open(&displayEnv.inFifo,

sizeof(BufferElement)) == FIFOUTIL_FAILURE) {

ERR("Failed to open input fifon");

cleanup(THREAD_FAILURE);

}

initMask |= INFIFOOPENED;

/* Open the output fifo */

if (FifoUtil_open(&displayEnv.outFifo,

sizeof(BufferElement)) == FIFOUTIL_FAILURE) {

ERR("Failed to open output fifon");

cleanup(THREAD_FAILURE);

}

initMask |= OUTFIFOOPENED;

/* Initialize the priming synchronization mutex */

pthread_mutex_init(&displayEnv.prime, NULL);

/* Initialize the thread attributes */

if (pthread_attr_init(&attr)) {

ERR("Failed to initialize thread attrsn");

cleanup(THREAD_FAILURE);

}

/* Force the thread to use custom scheduling attributes */

if (pthread_attr_setinheritsched(&attr, PTHREAD_EXPLICIT_SCHED)) {

ERR("Failed to set schedule inheritance attributen");

cleanup(THREAD_FAILURE);

}

/* Set the thread to be fifo real time scheduled */

if (pthread_attr_setschedpolicy(&attr, SCHED_FIFO)) {

ERR("Failed to set FIFO scheduling policyn");

cleanup(THREAD_FAILURE);

}

/* Set the thread priority */

schedParam.sched_priority = sched_get_priority_max(SCHED_FIFO);

if (pthread_attr_setschedparam(&attr, &schedParam)) {

ERR("Failed to set scheduler parametersn");

cleanup(THREAD_FAILURE);

}

/* Create the thread */

displayEnv.hRendezvous = envp->hRendezvous;

if (pthread_create(&displayThread, &attr, displayThrFxn, &displayEnv)) {

ERR("Failed to create display threadn");

cleanup(THREAD_FAILURE);

}

initMask |= DISPLAYTHREADCREATED;

/* Initialize the video capture device */

captureFd = initCaptureDevice(&vidBufs, &numVidBufs, envp->svideoInput,

envp->imageWidth, envp->imageHeight);

if (captureFd == FAILURE) {

cleanup(THREAD_FAILURE);

}

DBG("Video capture initialized and startedn");

initMask |= CAPTUREDEVICEINITIALIZED;

if (!envp->passThrough) {

/* Reset, load, and start DSP Engine */

hEngine = Engine_open(ENGINE_NAME, NULL, NULL);

if (hEngine == NULL) {

ERR("Failed to open codec engine %sn", ENGINE_NAME);

cleanup(THREAD_FAILURE);

}

DBG("Codec Engine opened in video threadn");

initMask |= ENGINEOPENED;

/* Allocate and initialize video decoder on the engine */

if (videoDecodeAlgCreate(hEngine, &hDecode, &encBufSize,

envp->imageWidth,

envp->imageHeight) == FAILURE) {

cleanup(THREAD_FAILURE);

}

DBG("Video decoder createdn");

initMask |= VIDEODECODERCREATED;

/* Allocate and initialize video encoder on the engine */

if (videoEncodeAlgCreate(hEngine, &hEncode,

envp->imageWidth, envp->imageHeight,

envp->videoBitRate) == FAILURE) {

cleanup(THREAD_FAILURE);

}

DBG("Video encoder createdn");

initMask |= VIDEOENCODERCREATED;

}

/* Allocate intermediate buffer (for encoded data) */

encBuf = (char *) Memory_contigAlloc(encBufSize,

Memory_DEFAULTALIGNMENT);

if (encBuf == NULL) {

ERR("Failed to allocate contiguous memory block.n");

cleanup(THREAD_FAILURE);

}

DBG("Contiguous buffer allocated at physical address %#lxn",

Memory_getPhysicalAddress(encBuf));

initMask |= ENCODEDBUFFERALLOCATED;

memset(bufferElements, 0, sizeof(BufferElement) * DISPLAY_BUFFERS);

for (i=0; i < DISPLAY_BUFFERS; i++) {

bufferElements[i].frameBuffer = (char *)

Memory_contigAlloc(imageSize, Memory_DEFAULTALIGNMENT);

if (bufferElements[i].frameBuffer == NULL) {

ERR("Failed to allocate contiguous memory block.n");

cleanup(THREAD_FAILURE);

}

DBG("Contiguous buffer allocated at physical address 0x%lxn",

Memory_getPhysicalAddress(bufferElements[i].frameBuffer));

}

initMask |= DISPLAYBUFFERSALLOCATED;

/* Signal that initialization is done and wait for other threads */

Rendezvous_meet(envp->hRendezvous);

/* Lock the display priming mutex */

pthread_mutex_lock(&displayEnv.prime);

/* Tell the display thread that we are priming */

e.id = DISPLAY_PRIME;

if (FifoUtil_put(&displayEnv.outFifo, &e) == FIFOUTIL_FAILURE) {

ERR("Failed to put buffer to output fifon");

pthread_mutex_unlock(&displayEnv.prime);

cleanup(THREAD_FAILURE);

}

/* Prime the display thread */

for (i=0; i < DISPLAY_BUFFERS; i++) {

CLEAR(v4l2buf);

v4l2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

v4l2buf.memory = V4L2_MEMORY_MMAP;

/* Dequeue a frame buffer from the capture device driver */

if (ioctl(captureFd, VIDIOC_DQBUF, &v4l2buf) == -1) {

ERR("VIDIOC_DQBUF failed (%s)n", strerror(errno));

breakLoop(THREAD_FAILURE);

}

if (envp->passThrough) {

/* Copy the buffer without processing */

memcpy(bufferElements[i].frameBuffer, vidBufs[v4l2buf.index].start,

imageSize);

/* Issue capture buffer back to capture device driver */

if (ioctl(captureFd, VIDIOC_QBUF, &v4l2buf) == -1) {

ERR("VIDIOC_QBUF failed (%s)n", strerror(errno));

breakLoop(THREAD_FAILURE);

}

}

else {

/* Encode the buffer using H.264 */

if (encodeVideoBuffer(hEncode, vidBufs[v4l2buf.index].start,

imageSize, encBuf, encBufSize,

&frameSize) == FAILURE) {

breakLoop(THREAD_FAILURE);

}

/* Issue capture buffer back to capture device driver */

if (ioctl(captureFd, VIDIOC_QBUF, &v4l2buf) == -1) {

ERR("VIDIOC_QBUF failed (%s)n", strerror(errno));

breakLoop(THREAD_FAILURE);

}

/* Decode the buffer using H.264 */

if (decodeVideoBuffer(hDecode, encBuf, frameSize,

bufferElements[i].frameBuffer, imageSize,

&frameStats) == FAILURE) {

breakLoop(THREAD_FAILURE);

}

}

bufferElements[i].width = envp->imageWidth;

bufferElements[i].height = envp->imageHeight;

/* Send frame buffer to display thread */

if (FifoUtil_put(&displayEnv.outFifo,

&bufferElements[i]) == FIFOUTIL_FAILURE) {

ERR("Failed to put buffer in output fifon");

breakLoop(THREAD_FAILURE);

}

numDisplayBufs++;

}

/* Release the display thread */

pthread_mutex_unlock(&displayEnv.prime);

DBG("Entering video main loop.n");

while (!gblGetQuit()) {

/* Receive a buffer with a displayed frame from the display thread */

if (FifoUtil_get(&displayEnv.inFifo, &e) == FIFOUTIL_FAILURE) {

ERR("Failed to get buffer from input fifon");

breakLoop(THREAD_FAILURE);

}

numDisplayBufs--;

/* Is the display thread flushing the pipe? */

if (e.id == DISPLAY_FLUSH) {

breakLoop(THREAD_SUCCESS);

}

CLEAR(v4l2buf);

v4l2buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;

v4l2buf.memory = V4L2_MEMORY_MMAP;

/* Dequeue a frame buffer from the capture device driver */

if (ioctl(captureFd, VIDIOC_DQBUF, &v4l2buf) == -1) {

ERR("VIDIOC_DQBUF failed (%s)n", strerror(errno));

breakLoop(THREAD_FAILURE);

}

if (envp->passThrough) {

/* Copy the buffer without processing */

memcpy(e.frameBuffer, vidBufs[v4l2buf.index].start, imageSize);

/* Issue capture buffer back to capture device driver */

if (ioctl(captureFd, VIDIOC_QBUF, &v4l2buf) == -1) {

ERR("VIDIOC_QBUF failed (%s)n", strerror(errno));

breakLoop(THREAD_FAILURE);

}

}

else {

/* Encode the buffer using H.264 */

if (encodeVideoBuffer(hEncode, vidBufs[v4l2buf.index].start,

imageSize, encBuf, encBufSize,

&frameSize) == FAILURE) {

breakLoop(THREAD_FAILURE);

}

/* Issue capture buffer back to capture device driver */

if (ioctl(captureFd, VIDIOC_QBUF, &v4l2buf) == -1) {

ERR("VIDIOC_QBUF failed (%s)n", strerror(errno));

breakLoop(THREAD_FAILURE);

}

/* Decode the buffer using H.264 */

if (decodeVideoBuffer(hDecode, encBuf, frameSize, e.frameBuffer,

imageSize, &frameStats) == FAILURE) {

breakLoop(THREAD_FAILURE);

}

}

/* Send frame buffer to display thread */

if (FifoUtil_put(&displayEnv.outFifo, &e) == FIFOUTIL_FAILURE) {

ERR("Failed to put buffer in output fifon");

breakLoop(THREAD_FAILURE);

}

numDisplayBufs++;

/* Increment statistics for OSD display */

gblIncVideoBytesEncoded(frameSize);

}

printf("nTotal I-frames: %d, P-frames: %d, B-frames: %d, IDR-frames: %d "

"rejected: %dn",

frameStats.iFrames, frameStats.pFrames, frameStats.bFrames,

frameStats.idrFrames, frameStats.framesRejected);

cleanup:

/* Make sure the other threads aren't waiting for init to complete */

Rendezvous_force(envp->hRendezvous);

/* Make sure the video thread isn't stuck in FifoUtil_get() */

FifoUtil_put(&displayEnv.outFifo, &flush);

/* Drain the display thread */

while (numDisplayBufs-- > 0 && e.id != DISPLAY_FLUSH) {

if (FifoUtil_get(&displayEnv.inFifo, &e) == FIFOUTIL_FAILURE) {

ERR("Failed to get buffer from input fifon");

break;

}

}

/* Clean up the video thread */

if (initMask & DISPLAYBUFFERSALLOCATED) {

for (i=0; i < DISPLAY_BUFFERS; i++) {

if (bufferElements[i].frameBuffer) {

Memory_contigFree(bufferElements[i].frameBuffer,

imageSize);

}

}

}

if (initMask & ENCODEDBUFFERALLOCATED) {

Memory_contigFree(encBuf, encBufSize);

}

if (initMask & VIDEOENCODERCREATED) {

VIDENC_delete(hEncode);

}

if (initMask & VIDEODECODERCREATED) {

VIDDEC_delete(hDecode);

}

if (initMask & ENGINEOPENED) {

Engine_close(hEngine);

}

if (initMask & CAPTUREDEVICEINITIALIZED) {

cleanupCaptureDevice(captureFd, vidBufs, numVidBufs);

}

if (initMask & DISPLAYTHREADCREATED) {

if (pthread_join(displayThread, &ret) == 0) {

status = ret;

}

pthread_mutex_destroy(&displayEnv.prime);

}

if (initMask & OUTFIFOOPENED) {

FifoUtil_close(&displayEnv.outFifo);

}

if (initMask & INFIFOOPENED) {

FifoUtil_close(&displayEnv.inFifo);

}

return status;

}

/*

* display.h

*

* ============================================================================

* Copyright (c) Texas Instruments Inc 2005

*

* Use of this software is controlled by the terms and conditions found in the

* license agreement under which this software has been supplied or provided.

* ============================================================================

*/

#ifndef _DISPLAY_H

#define _DISPLAY_H

#include <pthread.h>

#include <fifoutil.h>

#include <rendezvous.h>

/* Display thread commands */

#define DISPLAY_FLUSH -1

#define DISPLAY_PRIME -2

typedef struct BufferElement {

int id;

int width;

int height;

char *frameBuffer;

} BufferElement;

typedef struct DisplayEnv {

Rendezvous_Handle hRendezvous;

FifoUtil_Obj outFifo;

FifoUtil_Obj inFifo;

pthread_mutex_t prime;

} DisplayEnv;

extern void *displayThrFxn(void *arg);

#endif /* _DISPLAY_H */

/*

* display.c

*

* ============================================================================

* Copyright (c) Texas Instruments Inc 2005

*

* Use of this software is controlled by the terms and conditions found in the

* license agreement under which this software has been supplied or provided.

* ============================================================================

*/

/* Standard Linux headers */

#include <stdio.h>

#include <stdlib.h>

#include <string.h>

#include <fcntl.h>

#include <unistd.h>

#include <errno.h>

#include <sys/mman.h>

#include <sys/ioctl.h>

#include <pthread.h>

#include <linux/fb.h>

/* Davinci specific kernel headers */

#include <video/davincifb.h>

/* Demo headers */

#include <rendezvous.h>

#include <fifoutil.h>

#include "encodedecode.h"

#include "display.h"

/* Video display is triple buffered */

#define NUM_BUFS 3

/* Black color in UYVY format */

#define UYVY_BLACK 0x10801080

/* The levels of initialization */

#define DISPLAYDEVICEINITIALIZED 0x1

static int waitForVsync(int fd);

static int flipDisplayBuffers(int fd, int displayIdx);

static int initDisplayDevice(char *displays[]);

static void cleanupDisplayDevice(int fd, char *displays[]);

/******************************************************************************

* waitForVsync

******************************************************************************/

static int waitForVsync(int fd)

{

int dummy;

/* Wait for vertical sync */

if (ioctl(fd, FBIO_WAITFORVSYNC, &dummy) == -1) {

ERR("Failed FBIO_WAITFORVSYNC (%s)n", strerror(errno));

return FAILURE;

}

return SUCCESS;

}

/******************************************************************************

* flipDisplayBuffers

******************************************************************************/

static int flipDisplayBuffers(int fd, int displayIdx)

{

struct fb_var_screeninfo vInfo;

if (ioctl(fd, FBIOGET_VSCREENINFO, &vInfo) == -1) {

ERR("Failed FBIOGET_VSCREENINFO (%s)n", strerror(errno));

return FAILURE;

}

vInfo.yoffset = vInfo.yres * displayIdx;

/* Swap the working buffer for the displayed buffer */

if (ioctl(fd, FBIOPAN_DISPLAY, &vInfo) == -1) {

ERR("Failed FBIOPAN_DISPLAY (%s)n", strerror(errno));

return FAILURE;

}

return SUCCESS;

}

/******************************************************************************

* initDisplayDevice

******************************************************************************/

static int initDisplayDevice(char *displays[])

{

struct fb_var_screeninfo varInfo;

unsigned int *buf;

int fd;

int i;

/* Open video display device */

fd = open(FBVID_DEVICE, O_RDWR);

if (fd == -1) {

ERR("Failed to open fb device %s (%s)n", FBVID_DEVICE,

strerror(errno));

return FAILURE;

}

if (ioctl(fd, FBIOGET_VSCREENINFO, &varInfo) == -1) {

ERR("Failed FBIOGET_VSCREENINFO on %s (%s)n", FBVID_DEVICE,

strerror(errno));

return FAILURE;

}

varInfo.xres = D1_WIDTH;

varInfo.yres = D1_HEIGHT;

varInfo.bits_per_pixel = SCREEN_BPP;

/* Set video display format */

if (ioctl(fd, FBIOPUT_VSCREENINFO, &varInfo) == -1) {

ERR("Failed FBIOPUT_VSCREENINFO on %s (%s)n", FBVID_DEVICE,

strerror(errno));

return FAILURE;

}

if (varInfo.xres != D1_WIDTH ||

varInfo.yres != D1_HEIGHT ||

varInfo.bits_per_pixel != SCREEN_BPP) {

ERR("Failed to get the requested screen size: %dx%d at %d bppn",

D1_WIDTH, D1_HEIGHT, SCREEN_BPP);

return FAILURE;

}

/* Map the video buffers to user space */

displays[0] = (char *) mmap (NULL,

D1_FRAME_SIZE * NUM_BUFS,

PROT_READ | PROT_WRITE,

MAP_SHARED,

fd, 0);

if (displays[0] == MAP_FAILED) {

ERR("Failed mmap on %s (%s)n", FBVID_DEVICE, strerror(errno));

return FAILURE;

}

/* Clear the video buffers */

buf = (unsigned int *) displays[0];

for (i=0; i<D1_FRAME_SIZE * NUM_BUFS / sizeof(unsigned int); i++) {

buf[i] = UYVY_BLACK;

}

DBG("Display buffer %d mapped to address %#lxn", 0,

(unsigned long) displays[0]);

for (i=0; i<NUM_BUFS-1; i++) {

displays[i+1] = displays[i] + D1_FRAME_SIZE;

DBG("Display buffer %d mapped to address %#lxn", i+1,

(unsigned long) displays[i+1]);

}

return fd;

}

/******************************************************************************

* cleanupDisplayDevice

******************************************************************************/

static void cleanupDisplayDevice(int fd, char *displays[])

{

munmap(displays[0], D1_FRAME_SIZE * NUM_BUFS);

close(fd);

}

/******************************************************************************

* copyFrame

******************************************************************************/

static void copyFrame(char *dst, char *src, int width, int height)

{

int lineWidth = width * SCREEN_BPP / 8;

int xoffset = (D1_LINE_WIDTH - lineWidth) / 2;

int ystart = ( (D1_HEIGHT - height) / 2 ) & ~1;

int y;

/* Truncate if frame buffer higher than display buffer */

if (ystart < 0) {

ystart = 0;

}

dst += ystart * D1_LINE_WIDTH + xoffset;

/* Copy the frame into the middle of the screen */

for (y=0; y < height && y < D1_HEIGHT; y++) {

memcpy(dst, src, lineWidth);

dst += D1_LINE_WIDTH;

src += lineWidth;

}

}

/******************************************************************************

* displayThrFxn

******************************************************************************/

void *displayThrFxn(void *arg)

{

DisplayEnv *envp = (DisplayEnv *) arg;

BufferElement flush = { DISPLAY_FLUSH };

void *status = THREAD_SUCCESS;

unsigned int initMask = 0;

int displayIdx = 0;

int fbFd = 0;

char *displays[NUM_BUFS];

BufferElement e;

/* Initialize the video display device */

fbFd = initDisplayDevice(displays);

if (fbFd == FAILURE) {

cleanup(THREAD_FAILURE);

}

DBG("Video display device initialized.n");

initMask |= DISPLAYDEVICEINITIALIZED;

/* Signal that initialization is done and wait for other threads */

Rendezvous_meet(envp->hRendezvous);

/* Wait for the vertical sync of the display device */

waitForVsync(fbFd);

DBG("Entering display main loop.n");

while (TRUE) {

if (!gblGetPlay() && !gblGetQuit()) {

usleep(PAUSE);

continue;

}

/* Receive a buffer with a decoded frame from the video thread */

if (FifoUtil_get(&envp->outFifo, &e) == FIFOUTIL_FAILURE) {

breakLoop(THREAD_FAILURE);

}

/* Is the video thread flushing the pipe? */

if (e.id == DISPLAY_FLUSH) {

breakLoop(THREAD_SUCCESS);

} else if (e.id == DISPLAY_PRIME) {

pthread_mutex_lock(&envp->prime);

pthread_mutex_unlock(&envp->prime);

continue;

}

/* Increment the display index */

displayIdx = (displayIdx + 1) % NUM_BUFS;

/* Copy the supplied frame to the display frame buffer */

copyFrame(displays[displayIdx], e.frameBuffer, e.width, e.height);

/* Give back the buffer to the video thread */

if (FifoUtil_put(&envp->inFifo, &e) == FIFOUTIL_FAILURE) {

breakLoop(THREAD_FAILURE);

}

/* Flip display buffer and working buffer */

flipDisplayBuffers(fbFd, displayIdx);

/* Wait for the vertical sync of the display device */

waitForVsync(fbFd);

gblIncFrames();

}

cleanup:

/* Make sure the other threads aren't waiting for init to complete */

Rendezvous_force(envp->hRendezvous);

/* Make sure the video thread isn't stuck in FifoUtil_get() */

FifoUtil_put(&envp->inFifo, &flush);

/* Clean up the display thread */

if (initMask & DISPLAYDEVICEINITIALIZED) {

cleanupDisplayDevice(fbFd, displays);

}

return status;

}

/*

* loader.h

*

* ============================================================================

* Copyright (c) Texas Instruments Inc 2005

*

* Use of this software is controlled by the terms and conditions found in the

* license agreement under which this software has been supplied or provided.

* ============================================================================

*/

#ifndef _LOADER_H

#define _LOADER_H

typedef struct LoaderState {

int inputFd; // INPUT - The file descriptor of the input file

int loop; // INPUT - true if clip is to start over

char *readBuffer; // INPUT - A large CMEM allocated buffer

int readBufSize; // INPUT - Total size of the readBuffer

int readSize; // INPUT - Size of the 'window'

int doneMask; // INPUT - VIDEO_DONE or SOUND_DONE

int firstFrame; // OUTPUT - True if first frame of a clip

int endClip; // OUTPUT - True if time to start clip over

char *curPtr; // INTERNAL - Points to current frame

char *readPtr; // INTERNAL - Points to the end of current 'window'

} LoaderState;

extern int loaderPrime(LoaderState *state, char **framePtr);

extern int loaderGetFrame(LoaderState *state, int frameSize, char **framePtr);

#endif /* _LOADER_H */

/*

* loader.c

*

* ============================================================================

* Copyright (c) Texas Instruments Inc 2005

*

* Use of this software is controlled by the terms and conditions found in the

* license agreement under which this software has been supplied or provided.

* ============================================================================

*/

#include <stdio.h>

#include <unistd.h>

#include <string.h>

#include <errno.h>

#include "decode.h"

#include "loader.h"

/******************************************************************************

* loaderPrime

******************************************************************************/

int loaderPrime(LoaderState *statep, char **framePtr)

{

int numBytes;

/* Read a full 'window' of encoded data */

numBytes = read(statep->inputFd, statep->readBuffer, statep->readSize);

if (numBytes == -1) {

ERR("Error reading data from video file [%s]n",strerror(errno));

return FAILURE;

}

/* Initialize the state */

statep->curPtr = statep->readBuffer;

statep->readPtr = statep->readBuffer + numBytes;

*framePtr = statep->curPtr;

statep->firstFrame = TRUE;

statep->endClip = FALSE;

return SUCCESS;

}

/******************************************************************************

* loaderGetFrame

******************************************************************************/

int loaderGetFrame(LoaderState *statep, int frameSize, char **framePtr)

{

int numBytes;

int dataLeft;

int spaceLeft;

char *endBuf;

statep->firstFrame = FALSE;

statep->curPtr += frameSize;

dataLeft = statep->readPtr - statep->curPtr;

if (dataLeft <= 0) {

/* End of file */

while (!gblGetQuit() && !gblAllDone(statep->doneMask)) {

usleep(PAUSE);

}

gblResetDone(statep->doneMask);

if (statep->loop) {

/* Restart at beginning of file */

statep->endClip = TRUE;

if (lseek(statep->inputFd, 0, SEEK_SET) == -1) {

ERR("Failed lseek on file (%s)n", strerror(errno));

return FAILURE;

}

}

else {

gblSetQuit();

}

}

else {

endBuf = statep->readBuffer + statep->readBufSize;

spaceLeft = endBuf - statep->curPtr;

if (spaceLeft < statep->readSize) {

/* Could not fit 'window', start over at beginning of buffer */

memcpy(statep->readBuffer, statep->curPtr, dataLeft);

statep->curPtr = statep->readBuffer;

statep->readPtr = statep->readBuffer + dataLeft;

}

if (dataLeft == (statep->readSize - frameSize)) {

/* Incremental read making full we have a full 'window' */

numBytes = read(statep->inputFd, statep->readPtr, frameSize);

if (numBytes == -1) {

ERR("Error reading data from file (%s)n", strerror(errno));

return FAILURE;

}

statep->readPtr += numBytes;

}

*framePtr = statep->curPtr;

}

return SUCCESS;

}

Did you like this example?

Cite this page

Encodedecode demo. (2017, Jun 26). Retrieved November 21, 2024 , from
https://studydriver.com/encodedecode-demo/

Save time with Studydriver!

Get in touch with our top writers for a non-plagiarized essays written to satisfy your needs

Get custom essay

Stuck on ideas? Struggling with a concept?

A professional writer will make a clear, mistake-free paper for you!

Get help with your assignment
Leave your email and we will send a sample to you.
Stop wasting your time searching for samples!
You can find a skilled professional who can write any paper for you.
Get unique paper

Hi!
I'm Amy :)

I can help you save hours on your homework. Let's start by finding a writer.

Find Writer