v4l2cap.c
上传用户:aoeyumen
上传日期:2007-01-06
资源大小:3329k
文件大小:87k
- /* Video for Linux Two
- * Video Capture Driver
- * -- example code --
- *
- * This software is in the public domain.
- * Written by Bill Dirks
- *
- * This module is an example implementation of the Video for Linux Two
- * video capture API specification. The purposes of this software are
- * 1) Serve as a starting point for a new V4L2 capture driver
- * 2) Serve as a dummy driver for an application developer
- * 3) Supplement the V4L2 API documentation
- *
- * gcc -c -O2 -Wall v4l2cap.c
- */
- #ifndef __KERNEL__
- #define __KERNEL__
- #endif
- #ifndef MODULE
- #define MODULE
- #endif
- #include <linux/module.h>
- #include <linux/errno.h>
- #include <linux/kernel.h>
- #include <linux/malloc.h>
- #include <linux/mm.h>
- #include <linux/poll.h>
- #include <linux/ioport.h>
- #include <asm/io.h>
- #include <linux/videodev2.h>
- #include <linux/version.h>
- #include <asm/uaccess.h>
- #include <asm/pgtable.h>
- #include <asm/page.h>
- #include <linux/i2c.h>
- #include "mgavideo.h"
- #include "ks0127.h"
- #include "tuner.h"
- #include "msp3400.h"
- #include "zr36060.h"
- #define PKMOD "cap: "
- #if 0
- #define debug_msg(fmt,arg...) printk(KERN_DEBUG PKMOD fmt,##arg)
- #else
- #define debug_msg(fmt,arg...)
- #endif
- #if 1
- #define err_msg(fmt,arg...) printk(KERN_ERR PKMOD fmt,##arg)
- #else
- #define err_msg(fmt,arg...)
- #endif
- #if 1
- #define info_msg(fmt,arg...) printk(KERN_INFO PKMOD fmt,##arg)
- #else
- #define info_msg(fmt,arg...)
- #endif
- /* Video controls */
- static struct v4l2_queryctrl capture_control[] =
- {
- {V4L2_CID_BRIGHTNESS, "Brightness", -126, 126, 1, 0, V4L2_CTRL_TYPE_INTEGER},
- {V4L2_CID_CONTRAST, "Contrast", -126, 126, 1, 0, V4L2_CTRL_TYPE_INTEGER},
- {V4L2_CID_SATURATION, "Saturation", -126, 126, 1, 0, V4L2_CTRL_TYPE_INTEGER},
- {V4L2_CID_HUE, "Hue", -126, 126, 1, 0, V4L2_CTRL_TYPE_INTEGER},
- #if 1 /* extra stuff used for testing vidpanel */
- {V4L2_CID_EXPOSURE, "Exposure", 0,4,0,0, V4L2_CTRL_TYPE_MENU},
- {V4L2_CID_AUTOGAIN, "Auto Gain", 0, 1, 0, 1, V4L2_CTRL_TYPE_BOOLEAN},
- {V4L2_CID_DO_WHITE_BALANCE, "White Balance", 0,0,0,0, V4L2_CTRL_TYPE_BUTTON},
- #endif
- };
- #define MAXCONTROLS (sizeof(capture_control)/sizeof(capture_control[0]))
- #define VCTRL_BRIGHTNESS 0
- #define VCTRL_CONTRAST 1
- #define VCTRL_SATURATION 2
- #define VCTRL_HUE 3
- static int
- find_vctrl(int id)
- {
- int i;
- if (id == V4L2_CID_PRIVATE_BASE ||
- id < V4L2_CID_BASE ||
- id > V4L2_CID_LASTP1)
- return -EDOM;
- for (i = MAXCONTROLS - 1; i >= 0; i--)
- if (capture_control[i].id == id)
- break;
- if (i < 0)
- i = -EINVAL;
- return i;
- }
- static int
- vctrl_querymenu(struct v4l2_querymenu *qm)
- {
- static char *expo_menu[] = {
- "1/60",
- "1/100",
- "1/250",
- "1/1000",
- "1/5000",
- };
- if (qm->id == V4L2_CID_EXPOSURE)
- {
- if (qm->index < 0 ||
- qm->index >= sizeof(expo_menu)/sizeof(char *))
- return -EINVAL;
- {
- memcpy(qm->name, expo_menu[qm->index],
- sizeof(qm->name));
- }
- return 0;
- }
- return -EINVAL;
- }
- struct capture_device;/* forward reference */
- struct video_source
- {
- struct v4l2_input input;
- int control[MAXCONTROLS];
- struct v4l2_tuner tuner;
- int freq;
- struct v4l2_audio audio;
- int vcrmode;
- };
- /* Bus-master scatter list */
- struct scatter_node
- {
- __u32 addr;
- __u32 len;
- };
- #define END_OF_SCATTER_LIST 0x80000000
- /* Image translations */
- struct lookup_rgb24
- {
- __u32 u_rgb[256];
- __u32 v_rgb[256];
- __u32 y_rgb[256];
- __u8 sat[1024];
- __u32 sat8[1024];
- __u32 sat16[1024];
- __u32 uv_rgb[256 * 256];
- };
- struct lookup
- {
- int type;
- int size;
- union
- {
- void *base; /* vmalloc() */
- __u16 *rgb16;
- struct lookup_rgb24 *rgb24;
- } table;
- };
- #define LUT_NULL 0
- #define LUT_RGB555 1
- #define LUT_RGB565 2
- #define LUT_RGB24 3
- struct translation
- {
- int type;
- int width;
- int height;
- __u8 *in;
- int in_stride;
- __u8 *out;
- int out_stride;
- int output_size;
- int output_is_user;
- struct lookup lut;
- };
- #define XLAT_NULL 0
- #define XLAT_YUYV_TO_UYVY 1
- #define XLAT_YUYV_TO_YUV420 2
- #define XLAT_YUYV_TO_GREY 4
- #define XLAT_YUYV_TO_RGB555 6
- #define XLAT_YUYV_TO_RGB565 7
- #define XLAT_YUYV_TO_RGB24 8
- #define XLAT_YUYV_TO_RGB32 9
- /* Per-open data for handling multiple opens on one device */
- struct device_open
- {
- int isopen;
- int noncapturing;
- struct capture_device *dev;
- };
- #define MAX_OPENS 3
- /* Streaming data buffer */
- struct stream_buffer
- {
- struct v4l2_q_node qnode;
- struct v4l2_buffer vidbuf;
- int requested;
- __u8 *vaddress; /* vmalloc() */
- struct scatter_node *dma_list; /* get_free_page() */
- };
- #define MAX_CAPTURE_BUFFERS 30
- #define MAX_LOCKED_MEMORY 2000000
- /*
- * Capture device structure
- *
- * One for each handled device in the system.
- * This structure holds all the global information the driver
- * needs about each device.
- */
- struct capture_device
- {
- struct v4l2_device v; /* Must be first */
- struct mga_dev* mga;
- char shortname[16];
- int is_registered;
- int open_count;
- struct device_open open_data[MAX_OPENS];
- int capturing_opens;
- /* Per-bus index number for each device */
- int index;
- /* General type of device */
- int type;
- /* Interrupts */
- int ints_enabled;
- struct tq_struct tqnode_dpc;/* for Bottom Half routine */
- struct wait_queue *new_video_frame;
- /* Video decoder stuff */
- __u32 standards;
- __u32 standard;
- __u32 frame_period;
- struct video_source source[KS_INPUT_COUNT];
- int source_width;
- int source_height;
- /* Client capture format and capture modes */
- struct v4l2_format clientfmt;
- struct v4l2_captureparm capture;
- int input;/* which video source is selected */
- /* Hardware capture format */
- int capture_bypp;
- int capture_size;
- struct scatter_node *capture_dma_list;/* get_free_page() */
- /* Capture state */
- int ready_to_capture;
- int grabber_enabled;
- int capture_completed;
- unsigned long time_acquired;/* millisecond time stamp */
- int streaming;
- struct stream_buffer stream_buf[MAX_CAPTURE_BUFFERS];
- int stream_buffers_requested;
- int stream_buffers_mapped;
- int stream_contig_map;
- struct v4l2_queue stream_q_capture;
- struct v4l2_queue stream_q_done;
- struct timeval stream_begin;
- unsigned long stream_last_frame;
- /* Image format conversions */
- struct translation translation;
- __u8 *xlat_temp;/* vmalloc() */
- /* Performance statistics */
- struct v4l2_performance perf;
- /* frame counter for test images */
- int h,m,s,f;
- /* video preview stuff */
- struct v4l2_framebuffer fbuf;
- struct v4l2_window window;
- };
- /* Values for type field */
- #define DEVICE_TYPE_0 0
- /* Extreme video dimensions */
- #define MIN_WIDTH 32
- #define MIN_HEIGHT 24
- #define MAX_WIDTH 704
- #define MAX_HEIGHT 290
- #define MAX_FRAME_AGE 200 /* ms */
- /*
- * The Capture device structure array. This is the only global
- * variable in the module besides those used by the device probing
- * and enumeration routines (command line overrides)
- */
- #define NBOARDS 2
- static struct capture_device capture[NBOARDS];
- static int unit_video[NBOARDS] = { 0, 1, };
- MODULE_PARM(unit_video, "1-"__MODULE_STRING(NBOARDS)"i");
- static inline struct capture_device *
- capture_device_from_file(struct file *file)
- {
- return (struct capture_device *)v4l2_device_from_file(file);
- }
- /* These macros can be used to make device I/O operations atomic */
- /* static spinlock_t device_lock = SPIN_LOCK_UNLOCKED; */
- /* #define BEGIN_CRITICAL_SECTION */
- /* do{unsigned long flags;spin_lock_irqsave(&wavi_lock,flags) */
- /* #define END_CRITICAL_SECTION */
- /* spin_unlock_irqrestore(&wavi_lock,flags);}while(0) */
- /*
- * D E V I C E F U N C T I O N S
- */
- static void
- device_initialize(struct capture_device *dev)
- {
- int in;
- /* TODO: Put hardware into a sensible state and */
- /* do the one-time startup things */
- mgavideo_decoder( dev->mga, KS0127_RESET, 0 );
- in = KS_INPUT_COMPOSITE;
- mgavideo_decoder( dev->mga, KS0127_SET_INPUT, &in );
- in = KS_STD_NTSC;
- mgavideo_decoder( dev->mga, KS0127_SET_STANDARD, &in );
- in = KS_OUTPUT_YUV656E;
- mgavideo_decoder( dev->mga, KS0127_SET_OUTPUT, &in );
- in = -1;
- mgavideo_tuner( dev->mga, TUNER_SET_TYPE, &in );
- mgavideo_zr36060_reset(dev->mga);
- }
- static void
- device_brightness(struct capture_device *dev, int x)
- {
- mgavideo_decoder( dev->mga, KS0127_SET_BRIGHTNESS, &x);
- }
- static void
- device_contrast(struct capture_device *dev, int x)
- {
- mgavideo_decoder( dev->mga, KS0127_SET_CONTRAST, &x);
- }
- static void
- device_saturation(struct capture_device *dev, int x)
- {
- mgavideo_decoder( dev->mga, KS0127_SET_SATURATION, &x);
- }
- static void
- device_hue(struct capture_device *dev, int x)
- {
- mgavideo_decoder( dev->mga, KS0127_SET_HUE, &x);
- }
-
- static void
- device_tone_controls(struct capture_device *dev)
- {
- int *ctrl;
- ctrl = dev->source[dev->input].control;
- device_brightness(dev, ctrl[VCTRL_BRIGHTNESS]);
- device_contrast(dev, ctrl[VCTRL_CONTRAST]);
- device_saturation(dev, ctrl[VCTRL_SATURATION]);
- device_hue(dev, ctrl[VCTRL_HUE]);
- }
- static void
- grabbing_enable(struct capture_device *dev, int x)
- {
- // XXX
- }
- static unsigned long
- current_time_ms(void)
- {
- struct timeval now;
- do_gettimeofday(&now);
- return now.tv_sec * 1000 + now.tv_usec / 1000;
- }
- static void
- set_video_input(struct capture_device *dev, int i)
- {
- if (i < 0 || i >= KS_INPUT_COUNT)
- return;
- dev->input = i;
- mgavideo_decoder( dev->mga, KS0127_SET_INPUT, &i );
- device_tone_controls(dev);
- }
- static void
- set_video_standard(struct capture_device *dev, int x)
- {
- int in;
- dev->standard = x;
- switch (x)
- {
- case V4L2_STD_NTSC:
- dev->frame_period = 333667;
- in = KS_STD_NTSC;
- break;
- case V4L2_STD_PAL:
- dev->frame_period = 400000;
- in = KS_STD_PAL;
- break;
- case V4L2_STD_SECAM:
- dev->frame_period = 400000;
- in = KS_STD_SECAM;
- break;
- }
- mgavideo_decoder( dev->mga, KS0127_SET_STANDARD, &in );
- }
- static void
- set_video_freq(struct capture_device *dev, int *freq)
- {
- int norm = 0;
- mgavideo_audio( dev->mga, MSP_SWITCH_MUTE, 0 );
- mgavideo_tuner( dev->mga, TUNER_SET_TVFREQ, freq );
- mgavideo_audio( dev->mga, MSP_SET_TVNORM, &norm );
- mgavideo_audio( dev->mga, MSP_NEWCHANNEL, 0 );
- dev->source[dev->input].freq = *freq;
- }
- /*
- *
- * I M A G E F O R M A T T R A N S L A T I O N
- *
- */
- static int
- translate_yuyv_grey(struct translation *xlat)
- {
- __u8 *esi, *edi;
- __u32 eax, ebx, ecx, edx;
- int row;
- esi = xlat->in;
- edi = xlat->out;
- eax = xlat->in_stride - xlat->width * 2;
- ebx = xlat->out_stride - xlat->width;
- for (row = xlat->height; row; esi += eax, edi += ebx, --row)
- {
- for (ecx = xlat->width >> 2; ecx; --ecx)
- {
- edx = esi[4] | (esi[6] << 8);
- edx <<= 16;
- edx |= esi[0] | (esi[2] << 8);
- esi += 8;
- *(__u32 *)edi = edx;
- edi += 4;
- }
- }
- return 1;
- }
- static int
- translate_yuyv_yuv420(struct translation *xlat)
- {
- __u8 *esi, *edi;
- __u32 eax, ebx, ecx, edx;
- __u8 dl;
- int row;
- /* Y's */
- esi = xlat->in;
- edi = xlat->out;
- eax = xlat->in_stride - xlat->width * 2;
- ebx = xlat->out_stride - xlat->width;
- for (row = xlat->height; row; esi += eax, edi += ebx, --row)
- {
- for (ecx = xlat->width >> 2; ecx; --ecx)
- {
- edx = esi[4] | (esi[6] << 8);
- edx <<= 16;
- edx |= esi[0] | (esi[2] << 8);
- esi += 8;
- *(__u32 *)edi = edx;
- edi += 4;
- }
- }
- /* U's */
- esi = xlat->in + xlat->in_stride;
- eax = xlat->in_stride * 2 - xlat->width * 2;
- ebx >>= 1;
- for (row = xlat->height >> 1; row; esi += eax, edi += ebx, --row)
- {
- for (ecx = xlat->width >> 1; ecx; ++edi, --ecx)
- {
- dl = esi[1];
- esi += 4;
- *edi = dl;
- }
- }
- /* V's */
- esi = xlat->in + xlat->in_stride;
- for (row = xlat->height >> 1; row; esi += eax, edi += ebx, --row)
- {
- for (ecx = xlat->width >> 1; ecx; ++edi, --ecx)
- {
- dl = esi[3];
- esi += 4;
- *edi = dl;
- }
- }
- return 1;
- }
- #define K12_1 4096
- #define K12_S 12
- #define K12_GU -1409
- #define K12_BU 7258
- #define K12_RV 5743
- #define K12_GV -2925
- static int
- translate_expand_y(int y)
- {
- y = (255 * y + 110) / 220;
- if (y < 0) y = 0; else if (y > 255) y = 255;
- return y;
- }
- static int
- translate_expand_c(int c)
- {
- c = (127 * c + 56) / 112;
- if (c < -128) c = -128; else if (c > 127) c = 127;
- return c;
- }
- static int
- translate_make_rgb16_lut(struct translation *xlat)
- {
- __u16 *lut;
- long gu[32], bu[32], rv[32], gv[32];
- int rscale[256], gscale[256], bscale[256];
- int rrange, grange, brange;
- int rshift, gshift, bshift;
- long x;
- int y, u, v;
- int r, g, b;
- int i, t;
- if ((xlat->type == XLAT_YUYV_TO_RGB555 &&
- xlat->lut.type == LUT_RGB555) ||
- (xlat->type == XLAT_YUYV_TO_RGB565 &&
- xlat->lut.type == LUT_RGB565))
- return 1;
- if (xlat->lut.table.base)
- vfree(xlat->lut.table.base);
- xlat->lut.table.base = vmalloc(1 << 17);
- if (xlat->lut.table.base == NULL)
- {
- err_msg("vmalloc() failed in make_rgb16_lutn");
- return 0;
- }
- lut = xlat->lut.table.rgb16;
- // Compute all different chroma components to 8-bit precision
- for (i = 0, t = -128; t < 128; t += 8, ++i)
- {
- x = translate_expand_c(t) + 2;
- gu[i] = (K12_GU * x + K12_1/2) >> K12_S;
- bu[i] = (K12_BU * x + K12_1/2) >> K12_S;
- rv[i] = (K12_RV * x + K12_1/2) >> K12_S;
- gv[i] = (K12_GV * x + K12_1/2) >> K12_S;
- }
- // 8-bit to ?-bit scaling tables
- if (xlat->type == XLAT_YUYV_TO_RGB555)
- {
- xlat->lut.type = LUT_RGB555;
- rrange = grange = brange = 31;
- rshift = 10; gshift = 5; bshift = 0;
- }
- else
- {
- xlat->lut.type = LUT_RGB565;
- rrange = brange = 31;
- grange = 63;
- rshift = 11; gshift = 5; bshift = 0;
- }
- for (i = 0; i < 256; ++i)
- {
- rscale[i] = ((i * rrange + 127) / 255) << rshift;
- gscale[i] = ((i * grange + 127) / 255) << gshift;
- bscale[i] = ((i * brange + 127) / 255) << bshift;
- }
- // Fill in the RGB values for each combination of YUV
- for (i = 0; i < 256; i += 4)
- {
- y = translate_expand_y(i) + 2;
- if (y > 255) y = 255;
- for (u = 0; u < 32; ++u)
- for (v = 0; v < 32; ++v)
- {
- // Red, Green and Blue
- r = y + rv[v];
- g = y + gu[u] + gv[v];
- b = y + bu[u];
- // Saturate
- if (r < 0) r = 0; else if (r > 254) r = 254;
- if (g < 0) g = 0; else if (g > 254) g = 254;
- if (b < 0) b = 0; else if (b > 254) b = 254;
- // scale, shift and combine
- *lut++ = rscale[r] + gscale[g] + bscale[b];
- }
- }
- return 1;
- }
- static int
- translate_yuyv_rgb16(struct translation *xlat)
- {
- __u32 *src, *dst;
- __u32 uv, yuv0, yuv1, dual;
- __u16 *lut;
- int stride;
- int row, i;
- if (!translate_make_rgb16_lut(xlat))
- return 0;
- lut = xlat->lut.table.rgb16;
- src = (__u32 *)xlat->in;
- dst = (__u32 *)xlat->out;
- if (src == NULL || dst == NULL)
- return 0;
- stride = (xlat->out_stride - xlat->width * 2) >> 2;
- for (row = xlat->height; row; --row)
- {
- for (i = xlat->width >> 1; i; --i)
- {
- dual = *src++;
- uv = ((dual & 0x0000F800) >> 6)
- + ((dual & 0xF8000000) >> 27);
- yuv1 = ((dual & 0x00FC0000) >> 8) + uv;
- yuv0 = ((dual & 0x000000FC) << 8) + uv;
- *dst++ = ((__u32)lut[yuv1] << 16) | lut[yuv0];
- }
- dst += stride;
- }
- return 1;
- }
- static int
- translate_make_rgb24_lut(struct translation *xlat)
- {
- struct lookup_rgb24 *lut;
- int r, g, b;
- int i;
- int j;
- int x;
- if (xlat->lut.type == LUT_RGB24)
- return 1;
- if (xlat->lut.table.base)
- vfree(xlat->lut.table.base);
- xlat->lut.table.base = vmalloc(sizeof(struct lookup_rgb24));
- if (xlat->lut.table.base == NULL)
- {
- err_msg("vmalloc() failed in make_rgb24_lutn");
- return 0;
- }
- xlat->lut.type = LUT_RGB24;
- lut = xlat->lut.table.rgb24;
- for (i = 0; i < 256; ++i)
- {
- x = i; // Value is in excess-128 format
- if (x < 128)
- ++x; // Add 1 to negative values for noise rejection
- x -= 128; // Convert to two's complement format
- x = translate_expand_c(x);
- g = (K12_GU * x + K12_1/2) >> K12_S;
- b = (K12_BU * x + K12_1/2) >> K12_S;
- lut->u_rgb[i] = ((g & 0x3FF) << 11) | (b & 0x3FF);
- r = (K12_RV * x + K12_1/2) >> K12_S;
- g = (K12_GV * x + K12_1/2) >> K12_S;
- lut->v_rgb[i] = (r << 22) | ((g & 0x3FF) << 11);
-
- x = translate_expand_y(i);
- lut->y_rgb[i] = (x << 22) | (x << 11) | x;
- }
- for (i = 0; i < 1024; ++i)
- {
- x = (i > 511) ? 0 : ((i > 255) ? 255 : i);
- lut->sat[i] = x;
- lut->sat8[i] = x << 8;
- lut->sat16[i] = x << 16;
- }
- for(i=0; i < 256; i++) {
- for(j = 0; j < 256; j++) {
- lut->uv_rgb[i*256 + j] = lut->u_rgb[i] + lut->v_rgb[j];
- }
- }
- return 1;
- }
- static int
- translate_yuyv_rgb24(struct translation *xlat)
- {
- struct lookup_rgb24 *lut;
- __u32 *src;
- __u32 *dst;
- union pixel {
- __u32 yuyv;
- __u8 part[4];
- } pixel_data;
- int i;
- int row;
- int stride;
- __u32 pela, pelb, pelc, peld;
- if (!translate_make_rgb24_lut(xlat))
- return 0;
- lut = xlat->lut.table.rgb24;
- src = (__u32 *)xlat->in;
- dst = (__u32 *)xlat->out;
- if (src == NULL || dst == NULL)
- return 0;
- stride = (xlat->out_stride - 3 * xlat->width) >> 2;
- for (row = xlat->height; row; --row) {
- for (i = xlat->width >> 2; i; --i)
- {
- pixel_data.yuyv = *src++;
- pelb = lut->u_rgb[pixel_data.part[1]]
- + lut->v_rgb[pixel_data.part[3]];
- pela = lut->y_rgb[pixel_data.part[0]] + pelb;
- pelb += lut->y_rgb[pixel_data.part[2]];
- pixel_data.yuyv = *src++;
- peld = lut->u_rgb[pixel_data.part[1]]
- + lut->v_rgb[pixel_data.part[3]];
- pelc = lut->y_rgb[pixel_data.part[0]] + peld;
- peld += lut->y_rgb[pixel_data.part[2]];
- dst[0] = ((u32)lut->sat[pela & 0x3FF])
- + ((u32)lut->sat[(pela >> 11) & 0x3FF] << 8)
- + ((u32)lut->sat[pela >> 22] << 16)
- + ((u32)lut->sat[pelb & 0x3FF] << 24);
- dst[1] = ((u32)lut->sat[(pelb >> 11) & 0x3FF])
- + ((u32)lut->sat[pelb >> 22] << 8)
- + ((u32)lut->sat[pelc & 0x3FF] << 16)
- + ((u32)lut->sat[(pelc >> 11) & 0x3FF] << 24);
- dst[2] = ((u32)lut->sat[pelc >> 22])
- + ((u32)lut->sat[peld & 0x3FF] << 8)
- + ((u32)lut->sat[(peld >> 11) & 0x3FF] << 16)
- + ((u32)lut->sat[peld >> 22] << 24);
- dst += 3;
- }
- dst += stride;
- }
- return 1;
- }
- static int
- translate_yuyv_rgb32(struct translation *xlat)
- {
- struct lookup_rgb24 *lut;
- __u32 *src;
- __u32 *dst;
- union pixel {
- __u32 yuyv;
- __u8 part[4];
- } pixel_data;
- int i;
- int row;
- int stride;
- __u32 pela, pelb;
- if (!translate_make_rgb24_lut(xlat))
- return 0;
- lut = xlat->lut.table.rgb24;
- src = (__u32 *)xlat->in;
- dst = (__u32 *)xlat->out;
- if (src == NULL || dst == NULL)
- return 0;
- stride = (xlat->out_stride - 4 * xlat->width) >> 2;
- for (row = xlat->height; row; --row)
- {
- for (i = xlat->width >> 1; i--;)
- {
- pixel_data.yuyv = *src++;
- pelb = lut->u_rgb[pixel_data.part[1]] +
- lut->v_rgb[pixel_data.part[3]];
- pela = lut->y_rgb[pixel_data.part[0]] + pelb;
- pelb += lut->y_rgb[pixel_data.part[2]];
- dst[0] = lut->sat[pela & 0x3FF]
- + (lut->sat8[(pela >> 11) & 0x3FF])
- + (lut->sat16[pela >> 22]);
- dst[1] = lut->sat[pelb & 0x3FF]
- + (lut->sat8[(pelb >> 11) & 0x3FF])
- + (lut->sat16[pelb >> 22]);
- dst += 2;
- }
- dst += stride;
- }
- return 1;
- }
- static void
- translate_close(struct capture_device *dev)
- {
- dev->translation.type = XLAT_NULL;
- dev->translation.in = NULL;
- dev->translation.out = NULL;
- dev->translation.lut.type = LUT_NULL;
- if (dev->translation.lut.table.base)
- vfree(dev->translation.lut.table.base);
- dev->translation.lut.table.base = NULL;
- if (dev->xlat_temp)
- vfree(dev->xlat_temp);
- dev->xlat_temp = NULL;
- }
- static int
- translate_setup(struct capture_device *dev)
- {
- int npix2;
- translate_close(dev);
- /* Translation: YUYV to client format */
- dev->translation.width = dev->clientfmt.width;
- dev->translation.height = dev->clientfmt.height;
- dev->translation.in_stride = dev->translation.width * 2;
- dev->translation.output_size = dev->clientfmt.sizeimage;
- npix2 = dev->translation.width * dev->translation.height;
- switch (dev->clientfmt.pixelformat)
- {
- case V4L2_PIX_FMT_YUYV:
- dev->translation.type = XLAT_NULL;
- break;
- case V4L2_PIX_FMT_GREY:
- dev->translation.type = XLAT_YUYV_TO_GREY;
- dev->translation.out_stride = dev->translation.width;
- break;
- case V4L2_PIX_FMT_YUV420:
- dev->translation.type = XLAT_YUYV_TO_YUV420;
- dev->translation.out_stride = dev->translation.width;
- break;
- case V4L2_PIX_FMT_RGB555:
- case V4L2_PIX_FMT_RGB565:
- dev->translation.type = (dev->clientfmt.pixelformat ==
- V4L2_PIX_FMT_RGB555) ? XLAT_YUYV_TO_RGB555
- : XLAT_YUYV_TO_RGB565;
- dev->translation.out_stride = dev->translation.width * 2;
- if (!translate_make_rgb16_lut(&dev->translation))
- return 0;
- break;
- case V4L2_PIX_FMT_BGR24:
- dev->translation.type = XLAT_YUYV_TO_RGB24;
- dev->translation.out_stride = dev->translation.width * 3;
- if (!translate_make_rgb24_lut(&dev->translation))
- return 0;
- break;
- #ifdef NEVER
- case V4L2_PIX_FMT_BGR24:
- dev->translation.type = XLAT_NULL;
- break;
- #endif
- case V4L2_PIX_FMT_BGR32:
- dev->translation.type = XLAT_YUYV_TO_RGB32;
- dev->translation.out_stride = dev->translation.width * 4;
- if (!translate_make_rgb24_lut(&dev->translation))
- return 0;
- break;
- }
- return 1;
- }
- static void
- translate_inandout(struct capture_device *dev,
- __u8 *input_buffer,
- __u8 *output_buffer,
- __u8 output_is_user_space)
- {
- /* Translation: YUYV to client format */
- dev->translation.in = input_buffer;
- dev->translation.out = output_buffer;
- dev->translation.output_is_user = output_is_user_space;
- }
- static int /* length of output image or negative error */
- translate_image(struct capture_device *dev,
- __u8 *input_buffer,
- __u8 *output_buffer,
- int len,
- int output_is_user)
- {
- int err;
- /* The buffer must be large enough for the whole image */
- if (len < dev->translation.output_size)
- {
- debug_msg("Read buffer too small, %d < %dn",
- len, dev->translation.output_size);
- return -EFAULT;
- }
- if (len > dev->translation.output_size)
- len = dev->translation.output_size;
- translate_inandout(dev, input_buffer, output_buffer, output_is_user);
- /* Translation: YUYV to client format */
- if (dev->translation.type == XLAT_NULL)
- {
- if (dev->translation.in == dev->translation.out)
- return len;
- if (!output_is_user)
- {
- memcpy(output_buffer, dev->translation.in, len);
- }
- else
- {
- err = copy_to_user(output_buffer,
- dev->translation.in, len);
- len = (err) ? -EFAULT : len;
- }
- return len;
- }
- if (output_is_user && !access_ok(VERIFY_WRITE, output_buffer, len))
- {
- debug_msg("Buffer verify failed in translate_imagen");
- return -EFAULT;
- }
- switch (dev->translation.type)
- {
- case XLAT_YUYV_TO_GREY:
- translate_yuyv_grey(&dev->translation);
- break;
- case XLAT_YUYV_TO_YUV420:
- translate_yuyv_yuv420(&dev->translation);
- break;
- case XLAT_YUYV_TO_RGB555:
- case XLAT_YUYV_TO_RGB565:
- translate_yuyv_rgb16(&dev->translation);
- break;
- case XLAT_YUYV_TO_RGB24:
- translate_yuyv_rgb24(&dev->translation);
- break;
- case XLAT_YUYV_TO_RGB32:
- translate_yuyv_rgb32(&dev->translation);
- break;
- default:
- debug_msg("Unknown image translationn");
- break;
- }
- dev->translation.out = NULL;
- return len;
- }
- /*
- *
- * V I D E O C A P T U R E F U N C T I O N S
- *
- */
- /*
- * Supported capture formats (for VIDIOC_ENUM_CAPFMT)
- */
- static struct v4l2_fmtdesc capfmt[] =
- {
- { 0, {"RGB-16 (5-5-5)"},
- V4L2_PIX_FMT_RGB555, V4L2_FMT_FLAG_SWCONVERSION, 16, {0, 0},
- },
- { 1, {"RGB-16 (5-6-5)"},
- V4L2_PIX_FMT_RGB565, V4L2_FMT_FLAG_SWCONVERSION, 16, {0, 0},
- },
- { 2, {"RGB-24 (B-G-R)"},
- V4L2_PIX_FMT_BGR24, V4L2_FMT_FLAG_SWCONVERSION, 24, {0, 0},
- },
- { 3, {"RGB-32 (B-G-R-?)"},
- V4L2_PIX_FMT_BGR32, V4L2_FMT_FLAG_SWCONVERSION, 32, {0, 0},
- },
- { 4, {"Greyscale-8"},
- V4L2_PIX_FMT_GREY, V4L2_FMT_CS_601YUV | V4L2_FMT_FLAG_SWCONVERSION, 8, {0, 0},
- },
- { 5, {"YUV 4:2:2 (Y-U-Y-V)"},
- V4L2_PIX_FMT_YUYV, V4L2_FMT_CS_601YUV, 16, {0, 0},
- },
- { 6, {"YUV 4:2:0 (planar)"},
- V4L2_PIX_FMT_YUV420, V4L2_FMT_CS_601YUV | V4L2_FMT_FLAG_SWCONVERSION, 12, {0, 0},
- },
- };
- #define NUM_CAPFMT (sizeof(capfmt)/sizeof(capfmt[0]))
- static void interrupt_enable(struct capture_device *dev);
- /* The image format has changed, width, height, pixel format.
- * Decide if the format is ok or take the closest valid format.
- */
- static int
- capture_new_format(struct capture_device *dev)
- {
- dev->ready_to_capture = 0;
- switch (dev->standard) {
- case V4L2_STD_NTSC:
- dev->source_width = 704;
- dev->source_height = 240;
- break;
- case V4L2_STD_PAL:
- dev->source_width = 704;
- dev->source_height = 290;
- break;
- case V4L2_STD_SECAM:
- dev->source_width = 704;
- dev->source_height = 290;
- break;
- }
- dev->clientfmt.flags &= ~V4L2_FMT_CS_field;
- dev->clientfmt.flags |= V4L2_FMT_CS_601YUV;
- switch (dev->clientfmt.pixelformat)
- {
- case V4L2_PIX_FMT_GREY:
- dev->clientfmt.depth = 8;
- break;
- case V4L2_PIX_FMT_YUV420:
- dev->clientfmt.depth = 12;
- break;
- case V4L2_PIX_FMT_RGB555:
- case V4L2_PIX_FMT_RGB565:
- dev->clientfmt.flags = 0;
- /* fall thru */
- case V4L2_PIX_FMT_YUYV:
- case V4L2_PIX_FMT_UYVY:
- dev->clientfmt.depth = 16;
- break;
- case V4L2_PIX_FMT_BGR24:
- dev->clientfmt.depth = 24;
- dev->clientfmt.flags = 0;
- break;
- case V4L2_PIX_FMT_BGR32:
- dev->clientfmt.depth = 32;
- dev->clientfmt.flags = 0;
- break;
- default:
- debug_msg("unknown format %4.4sn",
- (char *)&dev->clientfmt.pixelformat);
- dev->clientfmt.depth = 16;
- dev->clientfmt.pixelformat = V4L2_PIX_FMT_YUYV;
- dev->clientfmt.flags = 0;
- break;
- }
- dev->capture_bypp = 2;
- if (dev->clientfmt.height <= dev->source_height)
- dev->clientfmt.flags &= ~V4L2_FMT_FLAG_INTERLACED;
- if (dev->clientfmt.flags & V4L2_FMT_FLAG_INTERLACED) {
- dev->clientfmt.flags |= V4L2_FMT_FLAG_TOPFIELD |
- V4L2_FMT_FLAG_BOTFIELD;
-
- if (dev->clientfmt.height > dev->source_height * 2)
- dev->clientfmt.height = dev->source_height * 2;
- if (dev->clientfmt.height < 32)
- dev->clientfmt.height = 32;
- } else {
- /* Make sure that at least 1 field is requested */
- if ((dev->clientfmt.flags &
- (V4L2_FMT_FLAG_TOPFIELD | V4L2_FMT_FLAG_BOTFIELD)) == 0)
- dev->clientfmt.flags |= V4L2_FMT_FLAG_TOPFIELD;
-
- if (dev->clientfmt.height > dev->source_height)
- dev->clientfmt.height = dev->source_height;
- if (dev->clientfmt.height < 32)
- dev->clientfmt.height = 32;
- }
- if (dev->clientfmt.width > dev->source_width)
- dev->clientfmt.width = dev->source_width;
- if (dev->clientfmt.width < 32)
- dev->clientfmt.width = 32;
- dev->clientfmt.width &= ~3;
- mgavideo_set_dims(dev->mga, dev->clientfmt.width, dev->clientfmt.height);
- dev->clientfmt.sizeimage = (dev->clientfmt.width
- * dev->clientfmt.height
- * dev->clientfmt.depth)
- / 8;
- dev->clientfmt.flags |= V4L2_FMT_FLAG_BYTESPERLINE;
- dev->clientfmt.bytesperline = (dev->clientfmt.width *
- dev->clientfmt.depth) / 8;
- dev->capture_size = dev->clientfmt.width
- * dev->clientfmt.height
- * dev->capture_bypp;
- /* TODO: Any other driver state related to the image format */
- return 1;
- }
- /* Stop the music!
- */
- static void
- capture_abort(struct capture_device *dev)
- {
- dev->grabber_enabled = 0;
- /* Turn off the capture hardware */
- grabbing_enable(dev, 0);
- }
- /* Allocate buffers, and get everything ready to capture
- * an image, but don't start capturing yet.
- */
- static int
- capture_begin(struct capture_device *dev)
- {
- capture_abort(dev);
- if (dev->ready_to_capture)
- return dev->ready_to_capture;
- if (!translate_setup(dev))
- return dev->ready_to_capture;
- interrupt_enable(dev);
- return (dev->ready_to_capture = 1);
- }
- /* Start an image capture
- */
- static void
- capture_grab_frame(struct capture_device *dev)
- {
- if (dev->ready_to_capture && dev->grabber_enabled) {
- return;
- }
- capture_begin(dev);
- if (!dev->ready_to_capture)
- return;
- /* TODO: Prepare the hardware for the next capture */
- /* Set up stream_capture_buffer to point to the buffer to */
- /* capture the next frame into */
- if (dev->streaming)
- {
- struct stream_buffer *buf;
- /* Go straight into streaming buffer? */
- if (dev->translation.type == XLAT_NULL)
- {
- buf = v4l2_q_peek_head(&dev->stream_q_capture);
- if (buf != NULL)
- {
- //XXX no streaming yet dev->stream_capture_buffer = buf->vaddress;
- //list = buf->dma_list;
- }
- }
- }
- /* Start the hardware */
- grabbing_enable(dev, 1);
- dev->grabber_enabled = 1;
- dev->capture_completed = 0;
- }
- /*
- * STREAMING CAPTURE
- */
- static int/* 1 = success; 0 = failed */
- capture_queuebuffer(struct capture_device *dev,
- struct v4l2_buffer *vidbuf)
- {
- int i = vidbuf->index;
- struct stream_buffer *buf = NULL;
- if (!dev->stream_buffers_mapped)
- {
- debug_msg("QBUF no buffers mappedn");
- return 0;
- }
- if (vidbuf->type != V4L2_BUF_TYPE_CAPTURE)
- {
- debug_msg("QBUF wrong typen");
- return 0;
- }
- if (i < 0 || i >= MAX_CAPTURE_BUFFERS || !dev->stream_buf[i].requested)
- {
- debug_msg("QBUF buffer index %d is out of rangen", i);
- return 0;
- }
- buf = &dev->stream_buf[i];
- if (!(buf->vidbuf.flags & V4L2_BUF_FLAG_MAPPED))
- {
- debug_msg("QBUF buffer %d is not mappedn", i);
- return 0;
- }
- if ((buf->vidbuf.flags & V4L2_BUF_FLAG_QUEUED))
- {
- debug_msg("QBUF buffer %d is already queuedn", i);
- return 0;
- }
- buf->vidbuf.flags &= ~V4L2_BUF_FLAG_DONE;
- v4l2_q_add_tail(&dev->stream_q_capture, &buf->qnode);
- buf->vidbuf.flags |= V4L2_BUF_FLAG_QUEUED;
- return 1;
- }
- static int/* 1 = got a buffer; 0 = no buffers */
- capture_dequeuebuffer(struct capture_device *dev,
- struct v4l2_buffer *buf)
- {
- struct stream_buffer *newbuf;
- if (!dev->streaming || buf->type != V4L2_BUF_TYPE_CAPTURE)
- {
- debug_msg("DQBUF not streaming or wrong buffer typen");
- return 0;
- }
- newbuf = v4l2_q_del_head(&dev->stream_q_done);
- if (newbuf == NULL)
- {
- debug_msg("DQBUF nothing on done queuen");
- return 0;
- }
- newbuf->vidbuf.flags &= ~V4L2_BUF_FLAG_QUEUED;
- *buf = newbuf->vidbuf;
- return 1;
- }
- static int
- capture_streamon(struct capture_device *dev,
- __u32 type)
- {
- struct stream_buffer *buf;
- if (dev->streaming)
- return 1;
- if (type != V4L2_BUF_TYPE_CAPTURE)
- {
- debug_msg("STREAMON wrong buffer typen");
- return 0;
- }
- capture_abort(dev);/* cancel any capture that might be in progress */
- /* -2 is a magic number that triggers start-of-stream logic in */
- /* capture_interrupt() */
- dev->stream_last_frame = -2;
- dev->perf.frames = 0;
- dev->perf.framesdropped = 0;
- dev->perf.bytesout = 0;
- /* Can't capture frames faster than the video input */
- if (dev->capture.timeperframe < dev->frame_period)
- dev->capture.timeperframe = dev->frame_period;
- /* Move any leftover DONE buffers to the free pool */
- while ((buf = v4l2_q_del_head(&dev->stream_q_done)))
- buf->vidbuf.flags &= ~V4L2_BUF_FLAG_QUEUED;
- /* Kick off the machine */
- dev->streaming = 1;
- capture_grab_frame(dev);
- return 1;
- }
- static void
- capture_streamoff(struct capture_device *dev,
- __u32 type)
- {
- if (!dev->streaming)
- return;
- if (type != V4L2_BUF_TYPE_CAPTURE)
- {
- debug_msg("STREAMOFF wrong buffer typen");
- return;
- }
- capture_abort(dev);
- dev->streaming = 0;
- /* Note: should really delay this till next capture */
- dev->perf.frames = 0;
- dev->perf.framesdropped = 0;
- dev->perf.bytesout = 0;
- }
- /* Read out and convert the next frame
- */
- static int /* returns length of data or negative for error */
- capture_imagereadout(struct capture_device *dev,
- __u8 *output_buffer,
- int output_size,
- int output_is_user)
- {
- int len;
- __u8* input_buffer;
- dev->grabber_enabled = 0;
- grabbing_enable(dev, 0);
- // XXX
- input_buffer = mgavideo_lock_video( dev->mga );
- len = translate_image(dev, input_buffer, output_buffer,
- output_size, output_is_user);
- mgavideo_unlock_video( dev->mga );
- if (len < 0)
- return len;
- ++dev->perf.frames;
- dev->perf.bytesout += len;
- return len;
- }
- /* The hardware has issued the interrupt signal, do any post-capture
- * processing that may be necessary.
- * [This function is called indirectly through the immediate task queue;
- * it executes at elevated IRQL, but it is interruptible. (It's a b.h.)]
- */
- static void
- capture_interrupt(void *v)
- {
- struct capture_device *dev = (struct capture_device *)v;
- struct stream_buffer *buf;
- int len;
- struct timeval timestamp_rough;
- unsigned long raw_frame_num;
- unsigned long next_raw_frame_to_keep;
- unsigned long stream_frame_num;
- u64 temp64;
- /* TODO: Check for an interrupt pending on the device, and */
- /* return if there is no interrupt pending */
- /* (In this hardware-less demo I'll just check the completed flag) */
- if (!dev->grabber_enabled ||
- dev->capture_completed)
- return;
- if (!dev->ints_enabled)
- {
- err_msg("Can't process the interruptn");
- return;
- }
- dev->capture_completed = 1;
- {
- struct timeval curr;
- do_gettimeofday(&curr);
- debug_msg("cap interrupt: time: %d:%dn", curr.tv_sec, curr.tv_usec);
- }
- if (!dev->streaming)
- {
- dev->time_acquired = current_time_ms();
- /* DMA might not have finished, but we'll check in read() */
- //debug_msg("New frame readyn");
- debug_msg("interupt_cap: non_streaming wakup new_video_framen");
- wake_up_interruptible(&dev->new_video_frame);
- return;
- }
- /* Only get here in streaming mode */
- if (dev->stream_last_frame == -2)
- {/* First frame of the stream */
- v4l2_masterclock_gettime(&dev->stream_begin);
- dev->stream_last_frame = -1;
- }
- buf = v4l2_q_peek_head(&dev->stream_q_capture);
- if (buf == NULL)
- {/* No available buffers. Skip this frame. This is not an */
- /* error, it's a normal way to throttle the capture rate */
- dev->grabber_enabled = 0;
- grabbing_enable(dev, 0);
- capture_grab_frame(dev);
- return;
- }
- /* Compute current stream time */
- v4l2_masterclock_gettime(×tamp_rough);
- v4l2_timeval_delta(×tamp_rough,
- &dev->stream_begin, ×tamp_rough);
- /* Capture rate control */
- raw_frame_num = v4l2_timeval_divide(
- ×tamp_rough, dev->frame_period);
- temp64 = (u64)dev->capture.timeperframe
- * (dev->stream_last_frame + 1)
- + (dev->frame_period >> 1);
- next_raw_frame_to_keep =
- v4l2_math_div6432(temp64, dev->frame_period, NULL);
- #if 0
- if (raw_frame_num < next_raw_frame_to_keep)
- {/* Not time yet, don't keep this frame */
- dev->grabber_enabled = 0;
- grabbing_enable(dev, 0);
- capture_grab_frame(dev);
- return;
- }
- #endif
- {
- struct timeval curr;
- do_gettimeofday(&curr);
- debug_msg("cap start: time: %d:%dn", curr.tv_sec, curr.tv_usec);
- }
- /* Want this frame */
- len = capture_imagereadout(dev, buf->vaddress, buf->vidbuf.length, 0);
- {
- struct timeval curr;
- do_gettimeofday(&curr);
- debug_msg("cap end : time: %d:%dn", curr.tv_sec, curr.tv_usec);
- }
- if (len <= 0)
- {/* Frame no good, DMA did not finish, etc. */
- /* Begin capturing the next frame now */
- capture_grab_frame(dev);
- return;
- }
- /* Fill in the buffer information fields */
- buf->vidbuf.bytesused = len;
- /* only mark as done if both frames for interlace have been
- * seen
- */
- buf->vidbuf.flags |= V4L2_BUF_FLAG_DONE | V4L2_BUF_FLAG_KEYFRAME;
- buf->vidbuf.timestamp = timestamp_rough;
- stream_frame_num = v4l2_timeval_correct(&buf->vidbuf.timestamp,
- dev->capture.timeperframe);
- //debug_msg("Stream frame %4lu T= %lu.%06lun", stream_frame_num,
- // buf->vidbuf.timestamp.tv_sec,buf->vidbuf.timestamp.tv_usec);
- if (stream_frame_num > dev->stream_last_frame + 1)
- {/* We have missed one or more frames */
- dev->perf.framesdropped += stream_frame_num
- - dev->stream_last_frame + 1;
- }
- dev->stream_last_frame = stream_frame_num;
- /* Move buffer to done queue */
- buf = v4l2_q_del_head(&dev->stream_q_capture);
- v4l2_q_add_tail(&dev->stream_q_done, &buf->qnode);
- /* Begin capturing the next frame now */
- capture_grab_frame(dev);
- /* A new frame is ready! */
- debug_msg("interrupt_cap: waking up new_video_framen");
- wake_up_interruptible(&dev->new_video_frame);
- }
- /* Read captured data into a user buffer.
- * Return: negative = error
- * 0 = keep waiting
- * positive = count of bytes read successfully
- */
- static long
- capture_read(struct capture_device *dev,
- __u8 *user_buffer,
- int user_buffer_size)
- {
- int len = user_buffer_size;
- unsigned long now;
- if (!dev->ints_enabled)
- return -EIO;
- if (!dev->capture_completed)
- {/* No interrupt has occurred yet, or DMA didn't finish. */
- //debug_msg("No data ready.n");
- if (!dev->grabber_enabled)
- capture_grab_frame(dev);
- return 0;/* caller should keep waiting */
- }
-
- now = current_time_ms();
- if (now - dev->time_acquired > MAX_FRAME_AGE)
- {/* Frame in buffer is stale, get a new one */
- debug_msg("Stale frame, re-acquiring.n");
- dev->grabber_enabled = 0;
- grabbing_enable(dev, 0);
- capture_grab_frame(dev);
- return 0;/* caller should keep waiting */
- }
- len = capture_imagereadout(dev, user_buffer, user_buffer_size, 1);
- capture_grab_frame(dev);
- return len;
- }
- /* Stop capturing and free all resources used for capture.
- */
- static void
- capture_close(struct capture_device *dev)
- {
- // int i;
- if (dev->streaming)
- capture_streamoff(dev, V4L2_BUF_TYPE_CAPTURE);
- capture_abort(dev);
- dev->ready_to_capture = 0;
- translate_close(dev);
- if (dev->capture_dma_list)
- free_page((unsigned long)dev->capture_dma_list);
- dev->capture_dma_list = 0;
- #if 0
- for (i = 0; i < MAX_CAPTURE_BUFFERS; ++i)
- {
- dev->stream_buf[i].requested = 0;
- if (dev->stream_buf[i].vaddress)
- vfree(dev->stream_buf[i].vaddress);
- dev->stream_buf[i].vaddress = NULL;
- if (dev->stream_buf[i].dma_list)
- free_page((unsigned long)dev->stream_buf[i].dma_list);
- dev->stream_buf[i].dma_list = NULL;
- }
- #endif
- }
- /*
- *
- * I N T E R R U P T R O U T I N E S
- *
- */
- static void
- interrupt_disable(struct capture_device *dev)
- {
- if (!dev->ints_enabled)
- return;
- dev->ints_enabled = 0;
- /* TODO: Disable interrupts on the device */
- mgavideo_ivsync_enable( dev->mga, 0 );
- /* Wake up any processes that might be waiting for a frame */
- /* and let them return an error */
- debug_msg("interrupt_disable: waking up new_video_framen");
- wake_up_interruptible(&dev->new_video_frame);
- }
- static void
- interrupt_enable(struct capture_device *dev)
- {
- int even_fields;
- int odd_fields;
- if (dev->ints_enabled)
- interrupt_disable(dev);
- dev->ints_enabled = 1;
- /* TODO: Enable interrupts on the device */
- even_fields= (dev->clientfmt.flags & V4L2_FMT_FLAG_TOPFIELD) ? 1:0;
- odd_fields = (dev->clientfmt.flags & V4L2_FMT_FLAG_BOTFIELD) ? 1:0;
- mgavideo_register_bh( dev->mga, even_fields, odd_fields,
- capture_interrupt, dev );
- mgavideo_ivsync_enable( dev->mga, 1 );
- }
- /*
- *
- * M E M O R Y M A P P I N G
- *
- */
- static struct stream_buffer *
- mmap_stream_buffer_from_offset(struct capture_device *dev,
- unsigned long offset)
- {
- int i;
- for (i = 0; i < MAX_CAPTURE_BUFFERS; ++i)
- if (offset == dev->stream_buf[i].vidbuf.offset)
- return &dev->stream_buf[i];
- return NULL;
- }
- static int
- mmap_request_buffers(struct capture_device *dev,
- struct v4l2_requestbuffers *req)
- {
- int i;
- u32 buflen;
- u32 type;
- if (dev->stream_buffers_mapped)
- return 0;/* can't make requests if buffers are mapped */
- if (req->count < 1)
- req->count = 1;
- if (req->count > MAX_CAPTURE_BUFFERS)
- req->count = MAX_CAPTURE_BUFFERS;
- type = V4L2_BUF_TYPE_CAPTURE;
- dev->stream_contig_map = 0;
- if (req->type & V4L2_BUF_REQ_CONTIG)
- {
- dev->stream_contig_map = 1;
- req->type = type | V4L2_BUF_REQ_CONTIG;
- /* note: _REQ_CONTIG is only used in v4l2_requestbuffers */
- }
- /* The buffer length needs to be a multiple of the page size */
- buflen = (dev->clientfmt.sizeimage + PAGE_SIZE - 1)
- & ~(PAGE_SIZE - 1);
- debug_msg("Granting %d buffersn",req->count);
- /* Now initialize the buffer structures. Don't allocate the */
- /* buffers until they're mapped. */
- for (i = 0; i < req->count; ++i)
- {
- dev->stream_buf[i].requested = 1;
- dev->stream_buf[i].vidbuf.index = i;
- dev->stream_buf[i].vidbuf.type = type;
- dev->stream_buf[i].vidbuf.offset = 4*i;/* anything unique */
- dev->stream_buf[i].vidbuf.length = buflen;
- dev->stream_buf[i].vidbuf.bytesused = 0;
- dev->stream_buf[i].vidbuf.timestamp.tv_sec = 0;
- dev->stream_buf[i].vidbuf.timestamp.tv_usec = 0;
- dev->stream_buf[i].vidbuf.flags = 0;
- }
- for (i = req->count; i < MAX_CAPTURE_BUFFERS; ++i)
- dev->stream_buf[i].requested = 0;
- dev->stream_buffers_requested = req->count;
- return 1;
- }
- static void
- mmap_unrequest_buffers(struct capture_device *dev)
- {
- int i;
- if (dev->stream_buffers_requested == 0 ||
- dev->stream_buffers_mapped)
- return;
- for (i = 0; i < MAX_CAPTURE_BUFFERS; ++i)
- dev->stream_buf[i].requested = 0;
- dev->stream_buffers_requested = 0;
- }
- static void
- mmap_vma_open(struct vm_area_struct *vma)
- {
- struct capture_device *dev =
- capture_device_from_file(vma->vm_file);
- if (dev == NULL)
- return;
- //debug_msg("vma_open calledn");
- //MOD_INC_USE_COUNT;
- }
- static void
- mmap_vma_close(struct vm_area_struct *vma)
- {
- struct capture_device *dev =
- capture_device_from_file(vma->vm_file);
- struct stream_buffer *buf =
- mmap_stream_buffer_from_offset(dev, vma->vm_offset);
- int i, n = 1;
- if (dev->stream_contig_map)
- {/* Unmap all the buffers in one stroke */
- n = dev->stream_buffers_mapped;
- buf = &dev->stream_buf[0];
- }
- for (i = 0; i < n; ++i)
- {
- if (dev->streaming)
- {
- info_msg("Warning- munmap() called while streamingn");
- capture_streamoff(dev, buf->vidbuf.type);
- }
- v4l2_q_yank_node(&dev->stream_q_capture, &buf->qnode);
- v4l2_q_yank_node(&dev->stream_q_done, &buf->qnode);
- if (buf->vaddress != NULL && i == 0)
- vfree(buf->vaddress);
- buf->vaddress = NULL;
- if (buf->dma_list)
- free_page((unsigned long)buf->dma_list);
- buf->dma_list = NULL;
- buf->vidbuf.flags = 0;
- //debug_msg("Buffer %d deallocatedn",(int)vma->vm_offset/4);
- ++buf;
- if (dev->stream_buffers_mapped > 0)
- --dev->stream_buffers_mapped;
- }
- //MOD_DEC_USE_COUNT;
- }
- static unsigned long
- mmap_vma_nopage(struct vm_area_struct *vma,
- unsigned long address, int write)
- {
- struct capture_device *dev;
- struct stream_buffer *buf;
- unsigned long offset_into_buffer;
- unsigned long page;
- int n = 1;
- dev = capture_device_from_file(vma->vm_file);
- if (dev == NULL)
- return 0;
- if (dev->stream_contig_map)
- {
- buf = &dev->stream_buf[0];
- n = dev->stream_buffers_requested;
- }
- else
- buf = mmap_stream_buffer_from_offset(dev, vma->vm_offset);
- if (buf == NULL)
- return 0;
- offset_into_buffer = address - vma->vm_start;
- if (offset_into_buffer >= buf->vidbuf.length * n)
- {
- err_msg("Attempt to read past end of mmap() buffern");
- return 0;
- }
- page = v4l2_vmalloc_to_page(buf->vaddress + offset_into_buffer);
- if (page == 0)
- return 0;
- atomic_inc(&mem_map[MAP_NR(page)].count);
- return page;
- }
- static struct vm_operations_struct capture_vma_operations =
- {
- mmap_vma_open, mmap_vma_close, NULL, NULL, NULL, NULL,
- mmap_vma_nopage,
- };
- /*
- *
- * V I D E O F O R L I N U X I N T E R F A C I N G
- *
- */
- static int
- v4l2_open(struct v4l2_device *v, int flags, void **idptr)
- {
- struct capture_device *dev = (struct capture_device *)v;
- int i, n;
- int cap;
- for (i = 0, n = -1, cap = 0; i < MAX_OPENS; ++i)
- {
- if (!dev->open_data[i].isopen)
- n = i;/* available open_data structure */
- else if (!dev->open_data[i].noncapturing)
- cap = 1;/* another open is already capturing */
- }
- if (n == -1)/* No available open_data structures */
- {
- debug_msg("No more opens on this devicen");
- return -EBUSY;
- }
- if (flags & O_NONCAP)/* Non-capturing open */
- dev->open_data[n].noncapturing = 1;
- else if (cap)
- {
- debug_msg("No more capturing opens on this devicen");
- return -EBUSY;
- }
- else
- {
- dev->open_data[n].noncapturing = 0;
- /* Keep track of whether there is a capturing open */
- ++dev->capturing_opens;
- dev->perf.frames = 0;
- dev->perf.framesdropped = 0;
- dev->perf.bytesout = 0;
- }
- //MOD_INC_USE_COUNT;
- ++dev->open_count;
- dev->open_data[n].isopen = 1;
- dev->open_data[n].dev = dev;
- *idptr = &dev->open_data[n];
- if (dev->open_count == 1)
- {
- dev->ready_to_capture = 0;/* benchmark changes parameters! */
- dev->capture_completed = 0;
- dev->grabber_enabled = 0;
- v4l2_q_init(&dev->stream_q_capture);
- v4l2_q_init(&dev->stream_q_done);
- }
- debug_msg("Open succeededn");
- /* frame counter for test images only */
- if (!dev->open_data[n].noncapturing)
- dev->h=dev->m=dev->s=dev->f=0;
- return 0;
- }
- static void
- v4l2_close(void *id)
- {
- struct device_open *o = (struct device_open *)id;
- struct capture_device *dev = o->dev;
- if (!o->noncapturing)
- {
- --dev->capturing_opens;
- debug_msg("Closen");
- }
- o->isopen = 0;
- --dev->open_count;
- if (dev->open_count == 0)
- {
- interrupt_disable(dev);
- capture_close(dev);
- }
- //MOD_DEC_USE_COUNT;
- }
- static long
- v4l2_write(void *id,
- const char *buf,
- unsigned long count,
- int noblock)
- {
- debug_msg("Write() not handledn");
- return -EINVAL;
- }
- /* The arguments are already copied into kernel memory, so don't use
- copy_from_user() or copy_to_user() on arg. */
- static int
- v4l2_ioctl(void *id,
- unsigned int cmd,
- void *arg)
- {
- struct device_open *o = (struct device_open *)id;
- struct capture_device *dev = o->dev;
- //debug_msg("ioctl %dn", _IOC_NR(cmd));
- switch(cmd)
- {
- case VIDIOC_QUERYCAP:
- {
- struct v4l2_capability *b = arg;
- strcpy(b->name, dev->v.name);
- b->type = V4L2_TYPE_CAPTURE;
- b->flags = V4L2_FLAG_READ |
- V4L2_FLAG_STREAMING |
- V4L2_FLAG_PREVIEW |
- V4L2_FLAG_TUNER |
- V4L2_FLAG_SELECT;
- b->inputs = KS_INPUT_COUNT;
- b->outputs = 0;
- b->audios = 0;
- b->maxwidth = MAX_WIDTH;
- b->maxheight = MAX_HEIGHT;
- b->minwidth = MIN_WIDTH;
- b->minheight = MIN_HEIGHT;
- b->maxframerate = 30;
- return 0;
- }
- case VIDIOC_ENUM_CAPFMT:
- {
- struct v4l2_fmtdesc *f = arg;
- if (f->index < 0 || f->index >= NUM_CAPFMT)
- return -EINVAL;
- *f = capfmt[f->index];
- return 0;
- }
- case VIDIOC_G_FMT:
- {
- memcpy(arg, &dev->clientfmt, sizeof(dev->clientfmt));
- return 0;
- }
- case VIDIOC_S_FMT:
- {
- struct v4l2_format *fmt = arg;
- if (o->noncapturing)
- {
- debug_msg("S_FMT illegal in non-capturing openn");
- return -EPERM;
- }
- dev->clientfmt = *fmt;
- if (!capture_new_format(dev))
- return -EINVAL;
- mmap_unrequest_buffers(dev);
- *fmt = dev->clientfmt;
- return 0;
- }
- case VIDIOC_G_COMP: return -EINVAL;
- case VIDIOC_S_COMP: return -EINVAL;
- case VIDIOC_REQBUFS:
- {
- struct v4l2_requestbuffers *req = arg;
- if (o->noncapturing)
- {
- debug_msg("REQBUFS illegal in non-capturing openn");
- return -EPERM;
- }
- if (dev->stream_buffers_mapped)
- {
- debug_msg("Can't request buffers if buffers are "
- "already mappedn");
- return -EPERM;
- }
- capture_begin(dev);
- if (!mmap_request_buffers(dev, req))
- return -EINVAL;
- return 0;
- }
- case VIDIOC_QUERYBUF:
- {
- struct v4l2_buffer *buf = arg;
- int i;
- if (o->noncapturing)
- {
- debug_msg("QUERYBUF illegal in non-capturing openn");
- return -EPERM;
- }
- i = buf->index;
- if (i < 0 || i >= MAX_CAPTURE_BUFFERS ||
- !dev->stream_buf[i].requested ||
- (buf->type & V4L2_BUF_TYPE_field) !=
- (dev->stream_buf[i].vidbuf.type & V4L2_BUF_TYPE_field))
- {
- debug_msg("QUERYBUF bad parametern");
- return -EINVAL;
- }
- *buf = dev->stream_buf[i].vidbuf;
- return 0;
- }
- case VIDIOC_QBUF:
- {
- struct v4l2_buffer *buf = arg;
- if (o->noncapturing)
- {
- debug_msg("QBUF illegal in non-capturing openn");
- return -EPERM;
- }
- if (!dev->stream_buffers_mapped)
- {
- debug_msg("QBUF no buffers are mappedn");
- return -EINVAL;
- }
- if (!capture_queuebuffer(dev, buf))
- return -EINVAL;
- return 0;
- }
- case VIDIOC_DQBUF:
- {
- struct v4l2_buffer *buf = arg;
- if (o->noncapturing)
- {
- debug_msg("DQBUF illegal in non-capturing openn");
- return -EPERM;
- }
- if (!capture_dequeuebuffer(dev, buf))
- return -EINVAL;
- return 0;
- }
- case VIDIOC_STREAMON:
- {
- __u32 type = (__u32)arg;
- if (o->noncapturing)
- {
- debug_msg("STREAMON illegal in non-capturing openn");
- return -EPERM;
- }
- if (!capture_streamon(dev, type))
- return -EINVAL;
- return 0;
- }
- case VIDIOC_STREAMOFF:
- {
- __u32 type = (__u32)arg;
- if (o->noncapturing)
- {
- debug_msg("STREAMOFF illegal in non-capturing openn");
- return -EPERM;
- }
- capture_streamoff(dev, type);
- return 0;
- }
- /* Video Preview support */
- case VIDIOC_ENUM_FBUFFMT:
- {
- struct v4l2_fmtdesc* fmt = (struct v4l2_fmtdesc*)arg;
- strcpy( fmt->description, "Video Overlay" );
- fmt->pixelformat = 0;
- fmt->flags = 0;
- fmt->depth = 0;
- return 0;
- }
- case VIDIOC_G_FBUF:
- (struct v4l2_framebuffer*)arg = &dev->fbuf;
- return 0;
- case VIDIOC_S_FBUF: return -EINVAL;
- case VIDIOC_G_WIN:
- (struct v4l2_window*)arg = &dev->window;
- return 0;
- case VIDIOC_S_WIN:
- {
- struct v4l2_window* win = (struct v4l2_window*)arg;
- int red, green, blue;
- if( ( win->clips != NULL ) || ( win->clipcount != 0 ) )
- return -EINVAL;
- memcpy( &dev->window, win, sizeof(dev->window) );
-
- /* set on hardware */
- mgavideo_set_window( dev->mga, win->x, win->y,
- win->width, win->height );
- if ((long)win->chromakey < 0) {
- mgavideo_set_overlay(dev->mga);
- } else {
- red = (win->chromakey >> 16) & 0xff;
- green = (win->chromakey >> 8) & 0xff;
- blue = (win->chromakey >> 0) & 0xff;
- mgavideo_set_colorkey( dev->mga, red, green, blue );
- }
- return 0;
- }
-
- case VIDIOC_PREVIEW:
- mgavideo_ivsync_enable( dev->mga, *(int *)arg );
- mgavideo_preview_enable( dev->mga, *(int*)arg );
- return 0;
- case VIDIOC_G_PERF:
- {
- memcpy(arg, &dev->perf, sizeof(dev->perf));
- return 0;
- }
- case VIDIOC_G_INPUT:
- {
- memcpy(arg, &dev->input, sizeof(dev->input));
- return 0;
- }
- case VIDIOC_S_INPUT:
- {
- int input = (int)arg;
- if (input < 0 || input >= KS_INPUT_COUNT)
- {
- debug_msg("Input out of range %dn", input);
- return -EINVAL;
- }
- if (input != dev->input)
- {
- dev->input = input;
- set_video_input(dev, input);
- }
- return 0;
- }
- case VIDIOC_G_PARM:
- {
- memcpy(arg, &dev->capture, sizeof(dev->capture));
- return 0;
- }
- case VIDIOC_S_PARM:
- {
- struct v4l2_captureparm *vp = arg;
- if (vp->capturemode & ~dev->capture.capability)
- {
- debug_msg("PARM unsupported capture capabilityn");
- return -EINVAL;
- }
- if ((dev->capture.capability & V4L2_CAP_TIMEPERFRAME) &&
- vp->timeperframe < 10000)
- {
- debug_msg("PARM time per frame out of range %ldn",
- vp->timeperframe);
- return -EINVAL;
- }
- if (vp->capturemode != dev->capture.capturemode &&
- !o->noncapturing && dev->streaming)
- return -EINVAL;
- if (o->noncapturing)
- return 0;
- if (vp->capturemode != dev->capture.capturemode)
- {
- dev->capture.capturemode = vp->capturemode;
- capture_new_format(dev);
- }
- if ((vp->capturemode & V4L2_CAP_TIMEPERFRAME) &&
- vp->timeperframe >= dev->frame_period)
- dev->capture.timeperframe = vp->timeperframe;
- else
- dev->capture.timeperframe = dev->frame_period;
- return 0;
- }
- case VIDIOC_G_STD:
- {
- struct v4l2_standard *std = arg;
- v4l2_video_std_construct(std, dev->standard, 0);
- return 0;
- }
- case VIDIOC_S_STD:
- {
- struct v4l2_standard *std = arg;
- int id;
- if ((o->noncapturing && dev->capturing_opens) ||
- dev->stream_buffers_mapped) {
- printk("noncapturing=%d capturing_opens=%d mapped=%dn",
- o->noncapturing, dev->capturing_opens, dev->stream_buffers_mapped);
- return -EPERM;
- }
- id = v4l2_video_std_confirm(std);
- if (!((1 << id) & dev->standards))
- {
- printk("Bad standard: %un", (unsigned)id);
- debug_msg("Bad standard: %un", (unsigned)id);
- return -EINVAL;
- }
- set_video_standard(dev, id);
- return 0;
- }
- case VIDIOC_ENUMSTD:
- {
- struct v4l2_enumstd *estd = arg;
- __u32 b, i;
- if (estd->index < 0 || estd->index > 30)
- return -EINVAL;
- for (b = 1, i = 0; b < 32; ++b)
- {
- if (((1 << b) & dev->standards) == 0)
- continue;
- if (i == estd->index)
- {
- v4l2_video_std_construct(&estd->std, b, 0);
- estd->inputs = (__u32)-1; /* all inputs */
- estd->outputs = 0;
- return 0;
- }
- ++i;
- }
- return -EINVAL;
- }
- case VIDIOC_ENUMINPUT:
- {
- struct v4l2_input *vi = arg;
- if (vi->index < 0 || vi->index >= KS_INPUT_COUNT)
- return -EINVAL;
- *vi = dev->source[vi->index].input;
- return 0;
- }
- case VIDIOC_QUERYCTRL:
- {
- struct v4l2_queryctrl *qc = arg;
- int i;
- i = find_vctrl(qc->id);
- if (i < 0)
- {
- return i;
- }
- /* V4L2 filled in category and catname, preserve them */
- capture_control[i].category = qc->category;
- memcpy(capture_control[i].catname, qc->catname,
- sizeof(qc->catname));
- *qc = capture_control[i];
- return 0;
- }
- case VIDIOC_QUERYMENU:
- {
- struct v4l2_querymenu *qm = arg;
- return vctrl_querymenu(qm);
- }
- case VIDIOC_G_CTRL:
- {
- struct v4l2_control *vc = arg;
- int i;
- i = find_vctrl(vc->id);
- if (i < 0)
- return i;
- vc->value = dev->source[dev->input].control[i];
- return 0;
- }
- case VIDIOC_S_CTRL:
- {
- struct v4l2_control *vc = arg;
- int i;
- i = find_vctrl(vc->id);
- if (i < 0)
- return i;
- dev->source[dev->input].control[i] = vc->value;
- device_tone_controls(dev);
- return 0;
- }
- case VIDIOC_G_TUNER:
- {
- struct v4l2_tuner* tuner = (struct v4l2_tuner*)arg;
- if( dev->source[tuner->input].input.type !=
- V4L2_INPUT_TYPE_TUNER ) {
- return -EINVAL;
- }
- memcpy( tuner, &dev->source[tuner->input].tuner,
- sizeof( *tuner ) );
- break;
- }
- case VIDIOC_S_TUNER: return -EINVAL;
- case VIDIOC_G_FREQ:
- memcpy(arg, &dev->source[dev->input].freq, sizeof(int));
- break;
-
- case VIDIOC_S_FREQ:
- set_video_freq( dev, arg );
- break;
- case VIDIOC_G_AUDIO: return -EINVAL;
- {
- struct v4l2_audio* audio = (struct v4l2_audio*)arg;
- if( dev->source[audio->audio].input.capability &
- V4L2_INPUT_CAP_AUDIO ) {
- return -EINVAL;
- }
- memcpy( audio, &dev->source[audio->audio].audio,
- sizeof( *audio ) );
- break;
- }
- case VIDIOC_S_AUDIO: return -EINVAL;
- default:
- return -ENOIOCTLCMD;
- }
- return 0;
- }
- static int
- v4l2_mmap(void *id,
- struct vm_area_struct *vma)
- {
- struct device_open *o = (struct device_open *)id;
- struct capture_device *dev = o->dev;
- struct stream_buffer *buf;
- int i, n = 1;
- if (o->noncapturing)
- {
- debug_msg("mmap() called on non-capturing openn");
- return -ENODEV;
- }
- buf = mmap_stream_buffer_from_offset(dev, vma->vm_offset);
- if (dev->stream_contig_map)
- {/* N buffers in one contiguous map */
- buf = &dev->stream_buf[0];
- n = dev->stream_buffers_requested;
- }
- if (buf == NULL)
- {
- debug_msg("mmap() Invalid offset parametern");
- return -EINVAL;/* no such buffer */
- }
- if (buf->vidbuf.length * n != vma->vm_end - vma->vm_start)
- {
- debug_msg("mmap() Wrong length parametern");
- return -EINVAL;/* wrong length */
- }
- for (i = 0; i < n; ++i)
- {
- if (!buf->requested)
- {
- debug_msg("mmap() Buffer is not available for"
- " mappingn");
- return -EINVAL;/* not requested */
- }
- if (buf->vidbuf.flags & V4L2_BUF_FLAG_MAPPED)
- {
- debug_msg("mmap() Buffer is already mappedn");
- return -EINVAL;/* already mapped */
- }
- if (buf->vaddress != NULL)
- vfree(buf->vaddress);
- if (i == 0)
- buf->vaddress = vmalloc(buf->vidbuf.length * n);
- else
- buf->vaddress = buf[-1].vaddress + buf->vidbuf.length;
- if (buf->vaddress == NULL)
- {
- err_msg("Could not allocate mmap() buffern");
- return -ENODEV;
- }
- #if 0 /* TODO: build scatter list for buffer if using DMA */
- if ((using DMA) &&
- !bm_build_scatter_list(dev, buf->vaddress, &buf->dma_list))
- return -ENODEV;
- #endif
- buf->vidbuf.flags |= V4L2_BUF_FLAG_MAPPED;
- ++dev->stream_buffers_mapped;
- ++buf;
- }
- vma->vm_ops = &capture_vma_operations;
- if (vma->vm_ops->open)
- vma->vm_ops->open(vma);
- /* Note: vma->vm_file will be set up by V4L2 */
- return 0;
- }
- static int
- v4l2_poll(void *id,
- struct file *file,
- poll_table *table)
- {
- struct device_open *o = (struct device_open *)id;
- struct capture_device *dev = o->dev;
- debug_msg("starting pool: noncapturing=%d streaming=%dn",
- o->noncapturing, dev->streaming);
- if (o->noncapturing)
- {
- debug_msg("poll() illegal in non-capturing openn");
- return POLLERR;
- }
- if (dev->streaming)
- {
- void *node;
- node = v4l2_q_peek_head(&dev->stream_q_done);
- debug_msg("poll: check done queue: %xn", node);
- if (node != NULL)
- return (POLLIN | POLLRDNORM);/* data is ready now */
- node = v4l2_q_peek_head(&dev->stream_q_capture);
- debug_msg("poll: check capture queue: %xn", node);
- if (node == NULL)
- return POLLERR; /* no frames queued */
- {
- struct timeval curr;
- do_gettimeofday(&curr);
- debug_msg("waiting on new_video_frame: time: %d:%dn", curr.tv_sec, curr.tv_usec);
- }
- poll_wait(file, &dev->new_video_frame, table);
- {
- struct timeval curr;
- do_gettimeofday(&curr);
- debug_msg("done waiting on new_video_frame: time: %d:%dn", curr.tv_sec, curr.tv_usec);
- }
- return 0;
- }
- /* Capture is through read() call */
- if (dev->capture_completed)/* data is ready now */
- return (POLLIN | POLLRDNORM);
- capture_grab_frame(dev);/* does nothing if capture is in progress */
- if (!dev->ready_to_capture)/* Can't grab frames! */
- return POLLERR;
- poll_wait(file, &dev->new_video_frame, table);
- return 0;
- }
- static long
- v4l2_read(void *id,
- char *buf,
- unsigned long count,
- int noblock)
- {
- struct device_open *o = (struct device_open *)id;
- struct capture_device *dev = o->dev;
- long len = 0;
- long my_timeout;
- if (o->noncapturing)
- {
- debug_msg("read() illegal in non-capturing openn");
- return -EPERM;
- }
- if (dev->streaming)
- {
- debug_msg("Can't read() when streaming is onn");
- return -EPERM;
- }
- capture_grab_frame(dev);/* does nothing if capture is in progress */
- if (!dev->ready_to_capture)
- {
- debug_msg("Can't grab frames!n");
- return 0;
- }
- my_timeout = HZ / 5;
- #if LINUX_VERSION_CODE < KERNEL_VERSION(2,1,127)
- current->timeout = jiffies + my_timeout;
- #endif
- while (len == 0)
- {
- if (noblock)
- {
- if (!dev->capture_completed)
- return -EAGAIN;
- }
- else
- {
- /* watch out for race condition going to sleep! */
- cli();
- if (!dev->capture_completed)
- {
- debug_msg("read: wainting on new_video_framen");
- #if LINUX_VERSION_CODE < KERNEL_VERSION(2,1,127)
- interruptible_sleep_on(&dev->new_video_frame);
- #else
- my_timeout = interruptible_sleep_on_timeout(
- &dev->new_video_frame, my_timeout);
- #endif
- debug_msg("readL wakeup on new_video_timeoutn");
- }
- sti();
- }
- #if LINUX_VERSION_CODE < KERNEL_VERSION(2,1,127)
- if (current->timeout <= jiffies)
- #else
- if (my_timeout == 0)
- #endif
- {
- printk("Timeout on readn");
- debug_msg("Timeout on readn");
- break;
- }
- len = capture_read(dev, buf, count);
- }
- #if LINUX_VERSION_CODE < KERNEL_VERSION(2,1,127)
- current->timeout = 0;
- #endif
- //debug_msg("read %dn", (int)len);
- return len;
- }
- /*
- * Remaining initialization of video decoder etc. This is only
- * done when the device is successfully identified and registered.
- */
- static int
- v4l2_init_done(struct v4l2_device *v)
- {
- struct capture_device *dev = (struct capture_device *)v;
- int i;
- /* Initialize video input array */
- for (i = 0; i < KS_INPUT_COUNT; ++i)
- {
- /* Initialize video control properties */
- dev->source[i].control[VCTRL_BRIGHTNESS] =
- capture_control[VCTRL_BRIGHTNESS].default_value;
- dev->source[i].control[VCTRL_CONTRAST] =
- capture_control[VCTRL_CONTRAST].default_value;
- dev->source[i].control[VCTRL_SATURATION] =
- capture_control[VCTRL_SATURATION].default_value;
- dev->source[i].control[VCTRL_HUE] =
- capture_control[VCTRL_HUE].default_value;
- }
- dev->source[KS_INPUT_COMPOSITE].input.index = KS_INPUT_COMPOSITE;
- strcpy(dev->source[KS_INPUT_COMPOSITE].input.name, "Composite");
- dev->source[KS_INPUT_COMPOSITE].input.type = V4L2_INPUT_TYPE_CAMERA;
- dev->source[KS_INPUT_COMPOSITE].input.capability = V4L2_INPUT_CAP_AUDIO;
- dev->source[KS_INPUT_COMPOSITE].input.assoc_audio = 0;
- dev->source[KS_INPUT_SVIDEO].input.index = KS_INPUT_SVIDEO;
- strcpy(dev->source[KS_INPUT_SVIDEO].input.name, "S-Video");
- dev->source[KS_INPUT_SVIDEO].input.type = V4L2_INPUT_TYPE_CAMERA;
- dev->source[KS_INPUT_SVIDEO].input.capability = 0;
- dev->source[KS_INPUT_SVIDEO].input.assoc_audio = 0;
- dev->source[KS_INPUT_TUNER].input.index = KS_INPUT_TUNER;
- strcpy(dev->source[KS_INPUT_TUNER].input.name, "Tuner");
- dev->source[KS_INPUT_TUNER].input.type = V4L2_INPUT_TYPE_TUNER;
- dev->source[KS_INPUT_TUNER].input.capability = V4L2_INPUT_CAP_AUDIO;
- dev->source[KS_INPUT_TUNER].input.assoc_audio = 0;
- dev->source[KS_INPUT_TUNER].tuner.input = KS_INPUT_TUNER;
- strcpy(dev->source[KS_INPUT_TUNER].tuner.name, "Tuner");
- dev->source[KS_INPUT_TUNER].tuner.capability = 0;
- dev->source[KS_INPUT_TUNER].tuner.rangelow = 552500 / 625;
- dev->source[KS_INPUT_TUNER].tuner.rangehigh = 8012500 / 625;
- dev->source[KS_INPUT_TUNER].tuner.rxsubchans = V4L2_TUNER_SUB_MONO;
- dev->source[KS_INPUT_TUNER].tuner.audmode = V4L2_TUNER_MODE_MONO;
- dev->source[KS_INPUT_TUNER].tuner.afc = 0;
- dev->source[KS_INPUT_TUNER].audio.audio = 0;
- strcpy(dev->source[KS_INPUT_TUNER].audio.name, "Audio");
- dev->source[KS_INPUT_TUNER].audio.capability = 0;
- dev->source[KS_INPUT_TUNER].audio.mode = 0;
- dev->source[KS_INPUT_YUV656_60HZ].input.index = KS_INPUT_YUV656_60HZ;
- strcpy(dev->source[KS_INPUT_YUV656_60HZ].input.name, "YUV656 60Hz");
- dev->source[KS_INPUT_YUV656_60HZ].input.type = V4L2_INPUT_TYPE_CAMERA;
- dev->source[KS_INPUT_YUV656_60HZ].input.capability = 0;
- dev->source[KS_INPUT_YUV656_60HZ].input.assoc_audio = 0;
- dev->source[KS_INPUT_YUV656_50HZ].input.index = KS_INPUT_YUV656_50HZ;
- strcpy(dev->source[KS_INPUT_YUV656_50HZ].input.name, "YUV656 50Hz");
- dev->source[KS_INPUT_YUV656_50HZ].input.type = V4L2_INPUT_TYPE_CAMERA;
- dev->source[KS_INPUT_YUV656_50HZ].input.capability = 0;
- dev->source[KS_INPUT_YUV656_50HZ].input.assoc_audio = 0;
-
- dev->standards = (1<<V4L2_STD_PAL)
- | (1<<V4L2_STD_NTSC)
- | (1<<V4L2_STD_SECAM);
- /* BUG: get defaults from user somehow... */
- set_video_input(dev, KS_INPUT_COMPOSITE);
- set_video_standard(dev, V4L2_STD_NTSC);
- /* Capture parameters */
- dev->capture.capability = V4L2_CAP_TIMEPERFRAME;
- dev->capture.capturemode = 0;
- dev->capture.extendedmode = 0;
- dev->capture.timeperframe = dev->frame_period;
- /* Default capture dimensions */
- dev->clientfmt.width = 704;
- dev->clientfmt.height = 240;
- dev->clientfmt.depth = 16;
- dev->clientfmt.pixelformat = V4L2_PIX_FMT_RGB565;
- dev->clientfmt.flags = 0;
- dev->clientfmt.bytesperline = 0;
- dev->clientfmt.sizeimage = 0;
- capture_new_format(dev);
- /* Default preview window */
- dev->window.x = 0;
- dev->window.y = 0;
- dev->window.width = 704;
- dev->window.height = 480;
- dev->window.chromakey = 0x000800;
- dev->window.clips = NULL;
- dev->window.clipcount = 0;
- /* Default preview framebuffer */
- dev->fbuf.capability = V4L2_FBUF_CAP_EXTERNOVERLAY
- | V4L2_FBUF_CAP_CHROMAKEY
- | V4L2_FBUF_CAP_SCALEUP
- | V4L2_FBUF_CAP_SCALEDOWN;
- dev->fbuf.flags = V4L2_FBUF_FLAG_PRIMARY
- | V4L2_FBUF_FLAG_OVERLAY
- | V4L2_FBUF_FLAG_CHROMAKEY;
- dev->fbuf.base[0] = mgavideo_get_base( dev->mga );
- /* dev->fbuf.fmt = ? */
- return 0;
- }
- /* =====================================================================
- * The functions below this point are only called during loading
- * and unloading of the driver.
- */
- /*
- * D E V I C E I N I A L I Z A T I O N R O U T I N E S
- *
- * These routines locate and enable the hardware, and initialize
- * the device structure.
- */
- static int
- config_a_device(struct capture_device *dev)
- {
- sprintf(dev->shortname, "capture%d", dev - capture);
- dev->mga = mgavideo_get();
- if( dev->mga == NULL )
- return 0;
- device_initialize(dev);
- sprintf(dev->v.name, "V4L2 Video Capture Driver (%d)", dev-capture);
- dev->v.type = V4L2_TYPE_CAPTURE;
- dev->v.minor = unit_video[dev-capture];
- dev->v.open = v4l2_open;
- dev->v.close = v4l2_close;
- dev->v.read = v4l2_read;
- dev->v.write = v4l2_write;
- dev->v.ioctl = v4l2_ioctl;
- dev->v.mmap = v4l2_mmap;
- dev->v.poll = v4l2_poll;
- dev->v.initialize = v4l2_init_done;
- dev->v.priv = NULL;
- if(!( mgavideo_querycaps( dev->mga ) & MGAVIDEO_HAS_DECODER ))
- {
- err_msg("Bad or unrecognized video decodern");
- mgavideo_release(dev->mga);
- return 0;/* failed */
- }
- return 1;
- }
- static void
- unconfig_a_device(struct capture_device *dev)
- {
- interrupt_disable(dev);
- capture_close(dev);
- /* TODO: Unconfigure the device, free the I/O port, etc. */
- mgavideo_release( dev->mga );
- if (dev->is_registered)
- {
- v4l2_unregister_device((struct v4l2_device *)dev);
- info_msg("Removed device %sn", dev->shortname);
- }
- memset(dev, 0, sizeof(capture[0]));
- }
- /*
- * M O D U L E I N I T A N D C L E A N U P
- */
- int
- init_module(void)
- {
- int i;
- for (i = 0; i < NBOARDS; ++i)
- {
- memset(&capture[i], 0, sizeof(capture[0]));
- if (!config_a_device(&capture[i]))
- {
- break;
- }
- if (v4l2_register_device(
- (struct v4l2_device *)&capture[i]) != 0)
- {
- err_msg("Couldn't register the driver.n");
- unconfig_a_device(&capture[i]);
- return 0;
- }
- capture[i].is_registered = 1;
- }
- if (i == 0)
- {
- err_msg("No devices found.n");
- return -ENODEV;/* cleanup will not be called */
- }
- return 0;
- }
- void
- cleanup_module(void)
- {
- int i;
- for (i = 0; i < NBOARDS; ++i)
- unconfig_a_device(&capture[i]);
- }