v4l2cap.c
上传用户:aoeyumen
上传日期:2007-01-06
资源大小:3329k
文件大小:87k
源码类别:

DVD

开发平台:

Unix_Linux

  1. /*        Video for Linux Two
  2.  *        Video Capture Driver
  3.  *        -- example code --
  4.  *
  5.  *        This software is in the public domain.
  6.  *        Written by Bill Dirks
  7.  *
  8.  *        This module is an example implementation of the Video for Linux Two
  9.  *        video capture API specification. The purposes of this software are
  10.  *        1) Serve as a starting point for a new V4L2 capture driver
  11.  *        2) Serve as a dummy driver for an application developer
  12.  *        3) Supplement the V4L2 API documentation 
  13.  *
  14.  *        gcc -c -O2 -Wall v4l2cap.c
  15.  */
  16. #ifndef __KERNEL__
  17. #define __KERNEL__
  18. #endif
  19. #ifndef MODULE
  20. #define MODULE
  21. #endif
  22. #include <linux/module.h>
  23. #include <linux/errno.h>
  24. #include <linux/kernel.h>
  25. #include <linux/malloc.h>
  26. #include <linux/mm.h>
  27. #include <linux/poll.h>
  28. #include <linux/ioport.h>
  29. #include <asm/io.h>
  30. #include <linux/videodev2.h>
  31. #include <linux/version.h>
  32. #include <asm/uaccess.h>
  33. #include <asm/pgtable.h>
  34. #include <asm/page.h>
  35. #include <linux/i2c.h>
  36. #include "mgavideo.h"
  37. #include "ks0127.h"
  38. #include "tuner.h"
  39. #include "msp3400.h"
  40. #include "zr36060.h"
  41. #define PKMOD "cap: "
  42. #if 0
  43. #define debug_msg(fmt,arg...) printk(KERN_DEBUG PKMOD fmt,##arg)
  44. #else
  45. #define debug_msg(fmt,arg...)
  46. #endif
  47. #if 1
  48. #define err_msg(fmt,arg...) printk(KERN_ERR PKMOD fmt,##arg)
  49. #else
  50. #define err_msg(fmt,arg...)
  51. #endif
  52. #if 1
  53. #define info_msg(fmt,arg...) printk(KERN_INFO PKMOD fmt,##arg)
  54. #else
  55. #define info_msg(fmt,arg...)
  56. #endif
  57. /*  Video controls  */
  58. static struct v4l2_queryctrl capture_control[] =
  59. {
  60.  {V4L2_CID_BRIGHTNESS, "Brightness", -126, 126, 1, 0, V4L2_CTRL_TYPE_INTEGER},
  61.  {V4L2_CID_CONTRAST,   "Contrast",   -126, 126, 1, 0, V4L2_CTRL_TYPE_INTEGER},
  62.  {V4L2_CID_SATURATION, "Saturation", -126, 126, 1, 0, V4L2_CTRL_TYPE_INTEGER},
  63.  {V4L2_CID_HUE,        "Hue",        -126, 126, 1, 0, V4L2_CTRL_TYPE_INTEGER},
  64. #if 1 /* extra stuff used for testing vidpanel */
  65.  {V4L2_CID_EXPOSURE, "Exposure", 0,4,0,0, V4L2_CTRL_TYPE_MENU},
  66.  {V4L2_CID_AUTOGAIN, "Auto Gain", 0, 1, 0, 1, V4L2_CTRL_TYPE_BOOLEAN},
  67.  {V4L2_CID_DO_WHITE_BALANCE, "White Balance", 0,0,0,0, V4L2_CTRL_TYPE_BUTTON},
  68. #endif
  69. };
  70. #define MAXCONTROLS        (sizeof(capture_control)/sizeof(capture_control[0]))
  71. #define VCTRL_BRIGHTNESS   0
  72. #define VCTRL_CONTRAST     1
  73. #define VCTRL_SATURATION   2
  74. #define VCTRL_HUE          3
  75. static int
  76. find_vctrl(int id)
  77. {
  78.         int i;
  79.         if (id == V4L2_CID_PRIVATE_BASE ||
  80.             id <  V4L2_CID_BASE ||
  81.             id >  V4L2_CID_LASTP1)
  82.                 return -EDOM;
  83.         for (i = MAXCONTROLS - 1; i >= 0; i--)
  84.                 if (capture_control[i].id == id)
  85.                         break;
  86.         if (i < 0)
  87.                 i = -EINVAL;
  88.         return i;
  89. }
  90. static int
  91. vctrl_querymenu(struct v4l2_querymenu *qm)
  92. {
  93.         static char *expo_menu[] = {
  94.                 "1/60",
  95.                 "1/100",
  96.                 "1/250",
  97.                 "1/1000",
  98.                 "1/5000",
  99.         };
  100.         if (qm->id == V4L2_CID_EXPOSURE)
  101.         {
  102.                 if (qm->index < 0 ||
  103.                     qm->index >= sizeof(expo_menu)/sizeof(char *))
  104.                         return -EINVAL;
  105.                 {
  106.                         memcpy(qm->name, expo_menu[qm->index],
  107.                                sizeof(qm->name));
  108.                 }
  109.                 return 0;
  110.         }
  111.         return -EINVAL;
  112. }
  113. struct capture_device;/* forward reference */
  114. struct video_source
  115. {
  116.         struct v4l2_input        input;
  117.         int                      control[MAXCONTROLS];
  118.         struct v4l2_tuner        tuner;
  119.         int                      freq;
  120.         struct v4l2_audio        audio;
  121.         int                      vcrmode;
  122. };
  123. /*  Bus-master scatter list  */
  124. struct scatter_node
  125. {
  126.         __u32                    addr;
  127.         __u32                    len;
  128. };
  129. #define END_OF_SCATTER_LIST 0x80000000
  130. /*  Image translations  */
  131. struct lookup_rgb24
  132. {
  133.         __u32                    u_rgb[256];
  134.         __u32                    v_rgb[256];
  135.         __u32                    y_rgb[256];
  136.         __u8                     sat[1024];
  137. __u32  sat8[1024];
  138. __u32  sat16[1024];
  139. __u32  uv_rgb[256 * 256];
  140. };
  141. struct lookup
  142. {
  143.         int                      type;
  144.         int                      size;
  145.         union
  146.         {
  147.                 void                *base;        /* vmalloc() */
  148.                 __u16               *rgb16;
  149.                 struct lookup_rgb24 *rgb24;
  150.         } table;
  151. };
  152. #define LUT_NULL                 0
  153. #define LUT_RGB555               1
  154. #define LUT_RGB565               2
  155. #define LUT_RGB24                3
  156. struct translation
  157. {
  158.         int                      type;
  159.         int                      width;
  160.         int                      height;
  161.         __u8                     *in;
  162.         int                      in_stride;
  163.         __u8                     *out;
  164.         int                      out_stride;
  165.         int                      output_size;
  166.         int                      output_is_user;
  167.         struct lookup            lut;
  168. };
  169. #define XLAT_NULL                0
  170. #define XLAT_YUYV_TO_UYVY        1
  171. #define XLAT_YUYV_TO_YUV420      2
  172. #define XLAT_YUYV_TO_GREY        4
  173. #define XLAT_YUYV_TO_RGB555      6
  174. #define XLAT_YUYV_TO_RGB565      7
  175. #define XLAT_YUYV_TO_RGB24       8
  176. #define XLAT_YUYV_TO_RGB32       9
  177. /*  Per-open data for handling multiple opens on one device */
  178. struct device_open
  179. {
  180.         int                      isopen;
  181.         int                      noncapturing;
  182.         struct capture_device    *dev;
  183. };
  184. #define MAX_OPENS        3
  185. /*  Streaming data buffer  */
  186. struct stream_buffer
  187. {
  188.         struct v4l2_q_node       qnode;
  189.         struct v4l2_buffer       vidbuf;
  190.         int                      requested;
  191.         __u8                     *vaddress;  /* vmalloc() */
  192.         struct scatter_node      *dma_list;  /* get_free_page() */
  193. };
  194. #define MAX_CAPTURE_BUFFERS      30
  195. #define MAX_LOCKED_MEMORY        2000000
  196. /*
  197.  *        Capture device structure
  198.  *
  199.  *        One for each handled device in the system.
  200.  *        This structure holds all the global information the driver
  201.  *        needs about each device.
  202.  */
  203. struct capture_device
  204. {
  205.         struct v4l2_device       v;        /*  Must be first */
  206.         struct mga_dev*          mga;
  207.         char                     shortname[16];
  208.         int                      is_registered;
  209.         int                      open_count;
  210.         struct device_open       open_data[MAX_OPENS];
  211.         int                      capturing_opens;
  212. /*        Per-bus index number for each device        */
  213.         int                      index;
  214. /*        General type of device  */
  215.         int                      type;
  216. /*        Interrupts        */
  217.         int                      ints_enabled;
  218.         struct tq_struct         tqnode_dpc;/* for Bottom Half routine */
  219.         struct wait_queue        *new_video_frame;
  220. /*        Video decoder stuff                */
  221.         __u32                    standards;
  222.         __u32                    standard;
  223.         __u32                    frame_period;
  224.         struct video_source      source[KS_INPUT_COUNT];
  225.         int                      source_width;
  226.         int                      source_height;
  227. /*        Client capture format and capture modes        */
  228.         struct v4l2_format       clientfmt;
  229.         struct v4l2_captureparm  capture;
  230.         int                      input;/* which video source is selected */
  231. /*        Hardware capture format         */
  232.         int                      capture_bypp;
  233.         int                      capture_size;
  234.         struct scatter_node      *capture_dma_list;/* get_free_page() */
  235. /*        Capture state        */
  236.         int                      ready_to_capture;
  237.         int                      grabber_enabled;
  238.         int                      capture_completed;
  239.         unsigned long            time_acquired;/* millisecond time stamp */
  240.         int                      streaming;
  241.         struct stream_buffer     stream_buf[MAX_CAPTURE_BUFFERS];
  242.         int                      stream_buffers_requested;
  243.         int                      stream_buffers_mapped;
  244.         int                      stream_contig_map;
  245.         struct v4l2_queue        stream_q_capture;
  246.         struct v4l2_queue        stream_q_done;
  247.         struct timeval           stream_begin;
  248.         unsigned long            stream_last_frame;
  249. /*        Image format conversions        */
  250.         struct translation       translation;
  251.         __u8                     *xlat_temp;/* vmalloc() */
  252. /*        Performance statistics        */
  253.         struct v4l2_performance  perf;
  254.         /* frame counter for test images */
  255.         int                      h,m,s,f;
  256.         /* video preview stuff */
  257.         struct v4l2_framebuffer  fbuf;
  258.         struct v4l2_window       window;
  259. };
  260. /*        Values for type field        */
  261. #define DEVICE_TYPE_0            0
  262. /*        Extreme video dimensions        */
  263. #define MIN_WIDTH                32
  264. #define MIN_HEIGHT               24
  265. #define MAX_WIDTH                704
  266. #define MAX_HEIGHT               290
  267. #define MAX_FRAME_AGE            200 /* ms */
  268. /*
  269.  *        The Capture device structure array. This is the only global
  270.  *        variable in the module besides those used by the device probing
  271.  *        and enumeration routines (command line overrides)
  272.  */
  273. #define NBOARDS                  2
  274. static struct capture_device capture[NBOARDS];
  275. static int unit_video[NBOARDS] = { 0, 1, };
  276. MODULE_PARM(unit_video, "1-"__MODULE_STRING(NBOARDS)"i");
  277. static inline struct capture_device *
  278. capture_device_from_file(struct file *file)
  279. {
  280.         return (struct capture_device *)v4l2_device_from_file(file);
  281. }
  282. /*  These macros can be used to make device I/O operations atomic  */
  283. /* static spinlock_t device_lock = SPIN_LOCK_UNLOCKED; */
  284. /* #define BEGIN_CRITICAL_SECTION         */
  285. /*         do{unsigned long flags;spin_lock_irqsave(&wavi_lock,flags) */
  286. /* #define END_CRITICAL_SECTION         */
  287. /*         spin_unlock_irqrestore(&wavi_lock,flags);}while(0) */
  288. /*
  289.  *        D E V I C E   F U N C T I O N S
  290.  */
  291. static void
  292. device_initialize(struct capture_device *dev)
  293. {
  294.         int in;
  295.         /*  TODO: Put hardware into a sensible state and        */
  296.         /*        do the one-time startup things                */
  297.         mgavideo_decoder( dev->mga, KS0127_RESET, 0 );
  298.         in = KS_INPUT_COMPOSITE;
  299.         mgavideo_decoder( dev->mga, KS0127_SET_INPUT, &in );
  300.         in = KS_STD_NTSC;
  301.         mgavideo_decoder( dev->mga, KS0127_SET_STANDARD, &in );
  302.         in = KS_OUTPUT_YUV656E;
  303.         mgavideo_decoder( dev->mga, KS0127_SET_OUTPUT, &in );
  304.         in = -1;
  305.         mgavideo_tuner( dev->mga, TUNER_SET_TYPE, &in );
  306. mgavideo_zr36060_reset(dev->mga);
  307. }
  308. static void
  309. device_brightness(struct capture_device *dev, int x)
  310. {
  311.         mgavideo_decoder( dev->mga, KS0127_SET_BRIGHTNESS, &x);
  312. }
  313. static void
  314. device_contrast(struct capture_device *dev, int x)
  315. {
  316.         mgavideo_decoder( dev->mga, KS0127_SET_CONTRAST, &x);
  317. }
  318. static void
  319. device_saturation(struct capture_device *dev, int x)
  320. {
  321.         mgavideo_decoder( dev->mga, KS0127_SET_SATURATION, &x);
  322. }
  323. static void
  324. device_hue(struct capture_device *dev, int x)
  325. {
  326.         mgavideo_decoder( dev->mga, KS0127_SET_HUE, &x);
  327. }
  328.   
  329. static void
  330. device_tone_controls(struct capture_device *dev)
  331. {
  332.         int        *ctrl;
  333.         ctrl = dev->source[dev->input].control;
  334.         device_brightness(dev, ctrl[VCTRL_BRIGHTNESS]);
  335.         device_contrast(dev, ctrl[VCTRL_CONTRAST]);
  336.         device_saturation(dev, ctrl[VCTRL_SATURATION]);
  337.         device_hue(dev, ctrl[VCTRL_HUE]);
  338. }
  339. static void
  340. grabbing_enable(struct capture_device *dev, int x)
  341. {
  342. // XXX
  343. }
  344. static unsigned long
  345. current_time_ms(void)
  346. {
  347.         struct timeval now;
  348.         do_gettimeofday(&now);
  349.         return now.tv_sec * 1000 + now.tv_usec / 1000;
  350. }
  351. static void
  352. set_video_input(struct capture_device *dev, int i)
  353. {
  354.         if (i < 0 || i >= KS_INPUT_COUNT)
  355.                 return;
  356.         dev->input = i;
  357.         mgavideo_decoder( dev->mga, KS0127_SET_INPUT, &i );
  358.         device_tone_controls(dev);
  359. }
  360. static void
  361. set_video_standard(struct capture_device *dev, int x)
  362. {
  363.         int in;
  364.         dev->standard = x;
  365.         switch (x)
  366.         {
  367.         case V4L2_STD_NTSC:
  368.                 dev->frame_period = 333667;
  369.                 in = KS_STD_NTSC;
  370.                 break;
  371.         case V4L2_STD_PAL:
  372.                 dev->frame_period = 400000;
  373.                 in = KS_STD_PAL;
  374.                 break;
  375.         case V4L2_STD_SECAM:
  376.                 dev->frame_period = 400000;
  377.                 in = KS_STD_SECAM;
  378.                 break;
  379.         }
  380.         mgavideo_decoder( dev->mga, KS0127_SET_STANDARD, &in );
  381. }
  382. static void
  383. set_video_freq(struct capture_device *dev, int *freq)
  384. {
  385.         int norm = 0;
  386.         mgavideo_audio( dev->mga, MSP_SWITCH_MUTE, 0 );
  387.         mgavideo_tuner( dev->mga, TUNER_SET_TVFREQ, freq );
  388.         mgavideo_audio( dev->mga, MSP_SET_TVNORM, &norm );
  389.         mgavideo_audio( dev->mga, MSP_NEWCHANNEL, 0 );
  390.         dev->source[dev->input].freq = *freq;
  391. }
  392. /*
  393.  *
  394.  *        I M A G E   F O R M A T   T R A N S L A T I O N
  395.  *
  396.  */
  397. static int
  398. translate_yuyv_grey(struct translation *xlat)
  399. {
  400.         __u8        *esi, *edi;
  401.         __u32        eax, ebx, ecx, edx;
  402.         int        row;
  403.         esi = xlat->in;
  404.         edi = xlat->out;
  405.         eax = xlat->in_stride - xlat->width * 2;
  406.         ebx = xlat->out_stride - xlat->width;
  407.         for (row = xlat->height; row; esi += eax, edi += ebx, --row)
  408.         {
  409.                 for (ecx = xlat->width >> 2; ecx; --ecx)
  410.                 {
  411.                         edx = esi[4] | (esi[6] << 8);
  412.                         edx <<= 16;
  413.                         edx |= esi[0] | (esi[2] << 8);
  414.                         esi += 8;
  415.                         *(__u32 *)edi = edx;
  416.                         edi += 4;
  417.                 }
  418.         }
  419.         return 1;
  420. }
  421. static int
  422. translate_yuyv_yuv420(struct translation *xlat)
  423. {
  424.         __u8        *esi, *edi;
  425.         __u32        eax, ebx, ecx, edx;
  426.         __u8        dl;
  427.         int        row;
  428.         /* Y's */
  429.         esi = xlat->in;
  430.         edi = xlat->out;
  431.         eax = xlat->in_stride - xlat->width * 2;
  432.         ebx = xlat->out_stride - xlat->width;
  433.         for (row = xlat->height; row; esi += eax, edi += ebx, --row)
  434.         {
  435.                 for (ecx = xlat->width >> 2; ecx; --ecx)
  436.                 {
  437.                         edx = esi[4] | (esi[6] << 8);
  438.                         edx <<= 16;
  439.                         edx |= esi[0] | (esi[2] << 8);
  440.                         esi += 8;
  441.                         *(__u32 *)edi = edx;
  442.                         edi += 4;
  443.                 }
  444.         }
  445.         /* U's */
  446.         esi = xlat->in + xlat->in_stride;
  447.         eax = xlat->in_stride * 2 - xlat->width * 2;
  448.         ebx >>= 1;
  449.         for (row = xlat->height >> 1; row; esi += eax, edi += ebx, --row)
  450.         {
  451.                 for (ecx = xlat->width >> 1; ecx; ++edi, --ecx)
  452.                 {
  453.                         dl = esi[1];
  454.                         esi += 4;
  455.                         *edi = dl;
  456.                 }
  457.         }
  458.         /* V's */
  459.         esi = xlat->in + xlat->in_stride;
  460.         for (row = xlat->height >> 1; row; esi += eax, edi += ebx, --row)
  461.         {
  462.                 for (ecx = xlat->width >> 1; ecx; ++edi, --ecx)
  463.                 {
  464.                         dl = esi[3];
  465.                         esi += 4;
  466.                         *edi = dl;
  467.                 }
  468.         }
  469.         return 1;
  470. }
  471. #define K12_1         4096
  472. #define K12_S           12
  473. #define K12_GU        -1409
  474. #define K12_BU         7258
  475. #define K12_RV         5743
  476. #define K12_GV        -2925
  477. static int
  478. translate_expand_y(int y)
  479. {
  480.         y = (255 * y + 110) / 220;
  481.         if (y < 0) y = 0; else if (y > 255) y = 255;
  482.         return y;
  483. }
  484. static int
  485. translate_expand_c(int c)
  486. {
  487.         c = (127 * c + 56) / 112;
  488.         if (c < -128) c = -128; else if (c > 127) c = 127;
  489.         return c;
  490. }
  491. static int
  492. translate_make_rgb16_lut(struct translation *xlat)
  493. {
  494.         __u16        *lut;
  495.         long        gu[32], bu[32], rv[32], gv[32];
  496.         int        rscale[256], gscale[256], bscale[256];
  497.         int        rrange, grange, brange;
  498.         int        rshift, gshift, bshift;
  499.         long        x;
  500.         int        y, u, v;
  501.         int        r, g, b;
  502.         int        i, t;
  503.         if ((xlat->type == XLAT_YUYV_TO_RGB555 &&
  504.              xlat->lut.type == LUT_RGB555) ||
  505.             (xlat->type == XLAT_YUYV_TO_RGB565 &&
  506.              xlat->lut.type == LUT_RGB565))
  507.                 return 1;
  508.         if (xlat->lut.table.base)
  509.                 vfree(xlat->lut.table.base);
  510.         xlat->lut.table.base = vmalloc(1 << 17);
  511.         if (xlat->lut.table.base == NULL)
  512.         {
  513.                 err_msg("vmalloc() failed in make_rgb16_lutn");
  514.                 return 0;
  515.         }
  516.         lut = xlat->lut.table.rgb16;
  517.         //  Compute all different chroma components to 8-bit precision
  518.         for (i = 0, t = -128; t < 128; t += 8, ++i)
  519.         {
  520.                 x = translate_expand_c(t) + 2;
  521.                 gu[i] = (K12_GU * x + K12_1/2) >> K12_S;
  522.                 bu[i] = (K12_BU * x + K12_1/2) >> K12_S;
  523.                 rv[i] = (K12_RV * x + K12_1/2) >> K12_S;
  524.                 gv[i] = (K12_GV * x + K12_1/2) >> K12_S;
  525.         }
  526.         //  8-bit to ?-bit scaling tables
  527.         if (xlat->type == XLAT_YUYV_TO_RGB555)
  528.         {
  529.                 xlat->lut.type = LUT_RGB555;
  530.                 rrange = grange = brange = 31;
  531.                 rshift = 10; gshift = 5; bshift = 0;
  532.         }
  533.         else
  534.         {
  535.                 xlat->lut.type = LUT_RGB565;
  536.                 rrange = brange = 31;
  537.                 grange = 63;
  538.                 rshift = 11; gshift = 5; bshift = 0;
  539.         }
  540.         for (i = 0; i < 256; ++i)
  541.         {
  542.                 rscale[i] = ((i * rrange + 127) / 255) << rshift;
  543.                 gscale[i] = ((i * grange + 127) / 255) << gshift;
  544.                 bscale[i] = ((i * brange + 127) / 255) << bshift;
  545.         }
  546.         //  Fill in the RGB values for each combination of YUV
  547.         for (i = 0; i < 256; i += 4)
  548.         {
  549.                 y = translate_expand_y(i) + 2;
  550.                 if (y > 255) y = 255;
  551.                 for (u = 0; u < 32; ++u)
  552.                         for (v = 0; v < 32; ++v)
  553.                         {
  554.                                 //  Red, Green and Blue
  555.                                 r = y + rv[v];
  556.                                 g = y + gu[u] + gv[v];
  557.                                 b = y + bu[u];
  558.                                 //  Saturate
  559.                                 if (r < 0) r = 0; else if (r > 254) r = 254;
  560.                                 if (g < 0) g = 0; else if (g > 254) g = 254;
  561.                                 if (b < 0) b = 0; else if (b > 254) b = 254;
  562.                                 //  scale, shift and combine
  563.                                 *lut++ = rscale[r] + gscale[g] + bscale[b];
  564.                         }
  565.         }
  566.         return 1;
  567. }
  568. static int
  569. translate_yuyv_rgb16(struct translation *xlat)
  570. {
  571.         __u32        *src, *dst;
  572.         __u32        uv, yuv0, yuv1, dual;
  573.         __u16        *lut;
  574.         int        stride;
  575.         int        row, i;
  576.         if (!translate_make_rgb16_lut(xlat))
  577.                 return 0;
  578.         lut = xlat->lut.table.rgb16;
  579.         src = (__u32 *)xlat->in;
  580.         dst = (__u32 *)xlat->out;
  581.         if (src == NULL || dst == NULL)
  582.                 return 0;
  583.         stride = (xlat->out_stride - xlat->width * 2) >> 2;
  584.         for (row = xlat->height; row; --row)
  585.         {
  586.                 for (i = xlat->width >> 1; i; --i)
  587.                 {
  588.                         dual = *src++;
  589.                         uv   =    ((dual & 0x0000F800) >> 6)
  590.                                 + ((dual & 0xF8000000) >> 27);
  591.                         yuv1 =           ((dual & 0x00FC0000) >> 8) + uv;
  592.                         yuv0 =           ((dual & 0x000000FC) << 8) + uv;
  593.                         *dst++ = ((__u32)lut[yuv1] << 16) | lut[yuv0];
  594.                 }
  595.                 dst += stride;
  596.         }
  597.         return 1;
  598. }
  599. static int
  600. translate_make_rgb24_lut(struct translation *xlat)
  601. {
  602.         struct lookup_rgb24        *lut;
  603.         int                        r, g, b;
  604.         int                        i;
  605. int    j;
  606.         int                        x;
  607.         if (xlat->lut.type == LUT_RGB24)
  608.                 return 1;
  609.         if (xlat->lut.table.base)
  610.                 vfree(xlat->lut.table.base);
  611.         xlat->lut.table.base = vmalloc(sizeof(struct lookup_rgb24));
  612.         if (xlat->lut.table.base == NULL)
  613.         {
  614.                 err_msg("vmalloc() failed in make_rgb24_lutn");
  615.                 return 0;
  616.         }
  617.         xlat->lut.type = LUT_RGB24;
  618.         lut = xlat->lut.table.rgb24;
  619.         for (i = 0; i < 256; ++i)
  620.         {
  621.                 x = i;                // Value is in excess-128 format
  622.                 if (x < 128)
  623.                         ++x;        // Add 1 to negative values for noise rejection
  624.                 x -= 128;        // Convert to two's complement format
  625.                 x = translate_expand_c(x);
  626.                 g = (K12_GU * x + K12_1/2) >> K12_S;
  627.                 b = (K12_BU * x + K12_1/2) >> K12_S;
  628.                 lut->u_rgb[i] = ((g & 0x3FF) << 11) | (b & 0x3FF);
  629.                 r = (K12_RV * x + K12_1/2) >> K12_S;
  630.                 g = (K12_GV * x + K12_1/2) >> K12_S;
  631.                 lut->v_rgb[i] = (r << 22) | ((g & 0x3FF) << 11);
  632.                 
  633.                 x = translate_expand_y(i);
  634.                 lut->y_rgb[i] = (x << 22) | (x << 11) | x;
  635.         }
  636.         for (i = 0; i < 1024; ++i)
  637.         {
  638.                 x = (i > 511) ? 0 : ((i > 255) ? 255 : i);
  639.                 lut->sat[i] = x;
  640. lut->sat8[i] = x << 8;
  641. lut->sat16[i] = x << 16;
  642.         }
  643. for(i=0; i < 256; i++) {
  644. for(j = 0; j < 256; j++) {
  645. lut->uv_rgb[i*256 + j] = lut->u_rgb[i]  + lut->v_rgb[j];
  646. }
  647. }
  648.         return 1;
  649. }
  650. static int
  651. translate_yuyv_rgb24(struct translation *xlat)
  652. {
  653.         struct lookup_rgb24        *lut;
  654.         __u32                        *src;
  655.         __u32                        *dst;
  656. union pixel {
  657. __u32 yuyv;
  658. __u8 part[4];
  659. } pixel_data;
  660.         int                        i;
  661.         int                        row;
  662.         int                        stride;
  663.         __u32                        pela, pelb, pelc, peld;
  664.         if (!translate_make_rgb24_lut(xlat))
  665.                 return 0;
  666.         lut = xlat->lut.table.rgb24;
  667.         src = (__u32 *)xlat->in;
  668.         dst = (__u32 *)xlat->out;
  669.         if (src == NULL || dst == NULL)
  670.                 return 0;
  671.         stride = (xlat->out_stride - 3 * xlat->width) >> 2;
  672.         for (row = xlat->height; row; --row)        {
  673.                 for (i = xlat->width >> 2; i; --i)
  674.                 {
  675. pixel_data.yuyv = *src++;
  676.                         pelb  = lut->u_rgb[pixel_data.part[1]]
  677.                               + lut->v_rgb[pixel_data.part[3]];
  678.                         pela  = lut->y_rgb[pixel_data.part[0]] + pelb;
  679.                         pelb += lut->y_rgb[pixel_data.part[2]];
  680. pixel_data.yuyv = *src++;
  681.                         peld  = lut->u_rgb[pixel_data.part[1]]
  682.                               + lut->v_rgb[pixel_data.part[3]];
  683.                         pelc  = lut->y_rgb[pixel_data.part[0]] + peld;
  684.                         peld += lut->y_rgb[pixel_data.part[2]];
  685.                         dst[0] = ((u32)lut->sat[pela & 0x3FF])
  686.                                + ((u32)lut->sat[(pela >> 11) & 0x3FF] << 8)
  687.                                + ((u32)lut->sat[pela >> 22] << 16)
  688.                                + ((u32)lut->sat[pelb & 0x3FF] << 24);
  689.                         dst[1] = ((u32)lut->sat[(pelb >> 11) & 0x3FF])
  690.                                + ((u32)lut->sat[pelb >> 22] << 8)
  691.                                + ((u32)lut->sat[pelc & 0x3FF] << 16)
  692.                                + ((u32)lut->sat[(pelc >> 11) & 0x3FF] << 24);
  693.                         dst[2] = ((u32)lut->sat[pelc >> 22])
  694.                                + ((u32)lut->sat[peld & 0x3FF] << 8)
  695.                                + ((u32)lut->sat[(peld >> 11) & 0x3FF] << 16)
  696.                                + ((u32)lut->sat[peld >> 22] << 24);
  697.                         dst += 3;
  698.                 }
  699.                 dst += stride;
  700.         }
  701.         return 1;
  702. }
  703. static int
  704. translate_yuyv_rgb32(struct translation *xlat)
  705. {
  706.         struct lookup_rgb24        *lut;
  707.         __u32                        *src;
  708.         __u32                        *dst;
  709. union pixel {
  710. __u32 yuyv;
  711. __u8 part[4];
  712. } pixel_data;
  713.         int                        i;
  714.         int                        row;
  715.         int                        stride;
  716.         __u32                        pela, pelb;
  717.         if (!translate_make_rgb24_lut(xlat))
  718.                 return 0;
  719.         lut = xlat->lut.table.rgb24;
  720.         src = (__u32 *)xlat->in;
  721.         dst = (__u32 *)xlat->out;
  722.         if (src == NULL || dst == NULL)
  723.                 return 0;
  724.         stride = (xlat->out_stride - 4 * xlat->width) >> 2;
  725.         for (row = xlat->height; row; --row)
  726.         {
  727.                 for (i = xlat->width >> 1; i--;)
  728.                 {
  729. pixel_data.yuyv = *src++;
  730.                         pelb  = lut->u_rgb[pixel_data.part[1]] + 
  731. lut->v_rgb[pixel_data.part[3]];
  732.                         pela  = lut->y_rgb[pixel_data.part[0]] + pelb;
  733.                         pelb += lut->y_rgb[pixel_data.part[2]];
  734.                         dst[0] =  lut->sat[pela & 0x3FF]
  735.                                + (lut->sat8[(pela >> 11) & 0x3FF])
  736.                                + (lut->sat16[pela >> 22]);
  737.                         dst[1] =  lut->sat[pelb & 0x3FF]
  738.                                + (lut->sat8[(pelb >> 11) & 0x3FF])
  739.                                + (lut->sat16[pelb >> 22]);
  740.                         dst += 2;
  741.                 }
  742.                 dst += stride;
  743.         }
  744.         return 1;
  745. }
  746. static void
  747. translate_close(struct capture_device *dev)
  748. {
  749.         dev->translation.type = XLAT_NULL;
  750.         dev->translation.in = NULL;
  751.         dev->translation.out = NULL;
  752.         dev->translation.lut.type = LUT_NULL;
  753.         if (dev->translation.lut.table.base)
  754.                 vfree(dev->translation.lut.table.base);
  755.         dev->translation.lut.table.base = NULL;
  756.         if (dev->xlat_temp)
  757.                 vfree(dev->xlat_temp);
  758.         dev->xlat_temp = NULL;
  759. }
  760. static int
  761. translate_setup(struct capture_device *dev)
  762. {
  763.         int        npix2;
  764.         translate_close(dev);
  765.         /*  Translation: YUYV to client format */
  766.         dev->translation.width = dev->clientfmt.width;
  767.         dev->translation.height = dev->clientfmt.height;
  768.         dev->translation.in_stride = dev->translation.width * 2;
  769.         dev->translation.output_size = dev->clientfmt.sizeimage;
  770.         npix2 = dev->translation.width * dev->translation.height;
  771.         switch (dev->clientfmt.pixelformat)
  772.         {
  773.         case V4L2_PIX_FMT_YUYV:
  774.                 dev->translation.type = XLAT_NULL;
  775.                 break;
  776.         case V4L2_PIX_FMT_GREY:
  777.                 dev->translation.type = XLAT_YUYV_TO_GREY;
  778.                 dev->translation.out_stride = dev->translation.width;
  779.                 break;
  780.         case V4L2_PIX_FMT_YUV420:
  781.                 dev->translation.type = XLAT_YUYV_TO_YUV420;
  782.                 dev->translation.out_stride = dev->translation.width;
  783.                 break;
  784.         case V4L2_PIX_FMT_RGB555:
  785.         case V4L2_PIX_FMT_RGB565:
  786.                 dev->translation.type = (dev->clientfmt.pixelformat == 
  787.                         V4L2_PIX_FMT_RGB555) ? XLAT_YUYV_TO_RGB555 
  788.                         : XLAT_YUYV_TO_RGB565;
  789.                 dev->translation.out_stride = dev->translation.width * 2;
  790.                 if (!translate_make_rgb16_lut(&dev->translation))
  791.                         return 0;
  792.                 break;
  793.         case V4L2_PIX_FMT_BGR24:
  794.                 dev->translation.type = XLAT_YUYV_TO_RGB24;
  795.                 dev->translation.out_stride = dev->translation.width * 3;
  796.                 if (!translate_make_rgb24_lut(&dev->translation))
  797.                         return 0;
  798.                 break;
  799. #ifdef NEVER
  800.         case V4L2_PIX_FMT_BGR24:
  801.                 dev->translation.type = XLAT_NULL;
  802.                 break;
  803. #endif
  804.         case V4L2_PIX_FMT_BGR32:
  805.                 dev->translation.type = XLAT_YUYV_TO_RGB32;
  806.                 dev->translation.out_stride = dev->translation.width * 4;
  807.                 if (!translate_make_rgb24_lut(&dev->translation))
  808.                         return 0;
  809.                 break;
  810.         }
  811.         return 1;
  812. }
  813. static void
  814. translate_inandout(struct capture_device *dev,
  815.                    __u8 *input_buffer,
  816.                    __u8 *output_buffer,
  817.                    __u8 output_is_user_space)
  818. {
  819.         /*  Translation: YUYV to client format */
  820.         dev->translation.in = input_buffer;
  821.         dev->translation.out = output_buffer;
  822.         dev->translation.output_is_user = output_is_user_space;
  823. }
  824. static int /* length of output image or negative error */
  825. translate_image(struct capture_device *dev,
  826.                 __u8        *input_buffer,
  827.                 __u8        *output_buffer,
  828.                 int        len,
  829.                 int        output_is_user)
  830. {
  831.         int        err;
  832.         /* The buffer must be large enough for the whole image */
  833.         if (len < dev->translation.output_size)
  834.         {
  835.                 debug_msg("Read buffer too small, %d < %dn",
  836.                           len, dev->translation.output_size);
  837.                 return -EFAULT;
  838.         }
  839.         if (len > dev->translation.output_size)
  840.                 len = dev->translation.output_size;
  841.         translate_inandout(dev, input_buffer, output_buffer, output_is_user);
  842.         /*  Translation: YUYV to client format */
  843.         if (dev->translation.type == XLAT_NULL)
  844.         {
  845.                 if (dev->translation.in == dev->translation.out)
  846.                         return len;
  847.                 if (!output_is_user) 
  848. {
  849.                         memcpy(output_buffer, dev->translation.in, len);
  850. }
  851.                 else
  852.                 {
  853.                         err = copy_to_user(output_buffer, 
  854.                                            dev->translation.in, len);
  855.                         len = (err) ? -EFAULT : len;
  856.                 }
  857.                 return len;
  858.         }
  859.         if (output_is_user && !access_ok(VERIFY_WRITE, output_buffer, len))
  860.         {
  861.                 debug_msg("Buffer verify failed in translate_imagen");
  862.                 return -EFAULT;
  863.         }
  864.         switch (dev->translation.type)
  865.         {
  866.         case XLAT_YUYV_TO_GREY:
  867.                 translate_yuyv_grey(&dev->translation);
  868.                 break;
  869.         case XLAT_YUYV_TO_YUV420:
  870.                 translate_yuyv_yuv420(&dev->translation);
  871.                 break;
  872.         case XLAT_YUYV_TO_RGB555:
  873.         case XLAT_YUYV_TO_RGB565:
  874.                 translate_yuyv_rgb16(&dev->translation);
  875.                 break;
  876.         case XLAT_YUYV_TO_RGB24:
  877.                 translate_yuyv_rgb24(&dev->translation);
  878.                 break;
  879.         case XLAT_YUYV_TO_RGB32:
  880.                 translate_yuyv_rgb32(&dev->translation);
  881.                 break;
  882.         default:
  883.                 debug_msg("Unknown image translationn");
  884.                 break;
  885.         }
  886.         dev->translation.out = NULL;
  887.         return len;
  888. }
  889. /*
  890.  *
  891.  *        V I D E O   C A P T U R E   F U N C T I O N S
  892.  *
  893.  */
  894. /*
  895.  *  Supported capture formats (for VIDIOC_ENUM_CAPFMT)
  896.  */
  897. static struct v4l2_fmtdesc capfmt[] = 
  898. {
  899.         {        0, {"RGB-16 (5-5-5)"},
  900.                 V4L2_PIX_FMT_RGB555,  V4L2_FMT_FLAG_SWCONVERSION, 16, {0, 0},
  901.         },
  902.         {        1, {"RGB-16 (5-6-5)"},
  903.                 V4L2_PIX_FMT_RGB565,  V4L2_FMT_FLAG_SWCONVERSION, 16, {0, 0},
  904.         },
  905.         {        2, {"RGB-24 (B-G-R)"},
  906.                 V4L2_PIX_FMT_BGR24,   V4L2_FMT_FLAG_SWCONVERSION, 24, {0, 0},
  907.         },
  908.         {        3, {"RGB-32 (B-G-R-?)"},
  909.                 V4L2_PIX_FMT_BGR32,   V4L2_FMT_FLAG_SWCONVERSION, 32, {0, 0},
  910.         },
  911.         {        4, {"Greyscale-8"},
  912.                 V4L2_PIX_FMT_GREY,    V4L2_FMT_CS_601YUV | V4L2_FMT_FLAG_SWCONVERSION, 8, {0, 0},
  913.         },
  914.         {        5, {"YUV 4:2:2 (Y-U-Y-V)"},
  915.                 V4L2_PIX_FMT_YUYV,    V4L2_FMT_CS_601YUV, 16, {0, 0},
  916.         },
  917.         {        6, {"YUV 4:2:0 (planar)"},
  918.                 V4L2_PIX_FMT_YUV420,  V4L2_FMT_CS_601YUV | V4L2_FMT_FLAG_SWCONVERSION, 12, {0, 0},
  919.         },
  920. };
  921. #define NUM_CAPFMT (sizeof(capfmt)/sizeof(capfmt[0]))
  922. static void interrupt_enable(struct capture_device *dev);
  923. /*  The image format has changed, width, height, pixel format.
  924.  *  Decide if the format is ok or take the closest valid format.
  925.  */
  926. static int
  927. capture_new_format(struct capture_device *dev)
  928. {
  929.         dev->ready_to_capture = 0;
  930.         switch (dev->standard) {
  931.                 case V4L2_STD_NTSC:
  932.                         dev->source_width = 704;
  933.                         dev->source_height = 240;
  934.                         break;
  935.                 case V4L2_STD_PAL:
  936.                         dev->source_width = 704;
  937.                         dev->source_height = 290;
  938.                         break;
  939.                 case V4L2_STD_SECAM:
  940.                         dev->source_width = 704;
  941.                         dev->source_height = 290;
  942.                         break;
  943.         }
  944. dev->clientfmt.flags &= ~V4L2_FMT_CS_field;
  945.         dev->clientfmt.flags |= V4L2_FMT_CS_601YUV;
  946.         switch (dev->clientfmt.pixelformat)
  947.         {
  948.         case V4L2_PIX_FMT_GREY:
  949.                 dev->clientfmt.depth = 8;
  950.                 break;
  951.         case V4L2_PIX_FMT_YUV420:
  952.                 dev->clientfmt.depth = 12;
  953.                 break;
  954.         case V4L2_PIX_FMT_RGB555:
  955.         case V4L2_PIX_FMT_RGB565:
  956.                 dev->clientfmt.flags = 0;
  957.                 /* fall thru */
  958.         case V4L2_PIX_FMT_YUYV:
  959.         case V4L2_PIX_FMT_UYVY:
  960.                 dev->clientfmt.depth = 16;
  961.                 break;
  962.         case V4L2_PIX_FMT_BGR24:
  963.                 dev->clientfmt.depth = 24;
  964.                 dev->clientfmt.flags = 0;
  965.                 break;
  966.         case V4L2_PIX_FMT_BGR32:
  967.                 dev->clientfmt.depth = 32;
  968.                 dev->clientfmt.flags = 0;
  969.                 break;
  970.         default:
  971.                 debug_msg("unknown format %4.4sn",
  972.                           (char *)&dev->clientfmt.pixelformat);
  973.                 dev->clientfmt.depth = 16;
  974.                 dev->clientfmt.pixelformat = V4L2_PIX_FMT_YUYV;
  975.                 dev->clientfmt.flags = 0;
  976.                 break;
  977.         }
  978.         dev->capture_bypp = 2;
  979. if (dev->clientfmt.height <= dev->source_height)
  980. dev->clientfmt.flags &= ~V4L2_FMT_FLAG_INTERLACED;
  981. if (dev->clientfmt.flags & V4L2_FMT_FLAG_INTERLACED) {
  982. dev->clientfmt.flags |= V4L2_FMT_FLAG_TOPFIELD | 
  983. V4L2_FMT_FLAG_BOTFIELD;
  984. if (dev->clientfmt.height > dev->source_height * 2)
  985. dev->clientfmt.height = dev->source_height * 2;
  986. if (dev->clientfmt.height < 32)
  987. dev->clientfmt.height = 32;
  988. } else {
  989. /* Make sure that at least 1 field is requested */
  990. if ((dev->clientfmt.flags & 
  991.      (V4L2_FMT_FLAG_TOPFIELD | V4L2_FMT_FLAG_BOTFIELD)) == 0) 
  992. dev->clientfmt.flags |= V4L2_FMT_FLAG_TOPFIELD;
  993. if (dev->clientfmt.height > dev->source_height)
  994. dev->clientfmt.height = dev->source_height;
  995. if (dev->clientfmt.height < 32)
  996. dev->clientfmt.height = 32;
  997. }
  998. if (dev->clientfmt.width > dev->source_width)
  999. dev->clientfmt.width = dev->source_width;
  1000. if (dev->clientfmt.width < 32)
  1001. dev->clientfmt.width = 32;
  1002. dev->clientfmt.width  &= ~3;
  1003. mgavideo_set_dims(dev->mga, dev->clientfmt.width, dev->clientfmt.height);
  1004. dev->clientfmt.sizeimage = (dev->clientfmt.width
  1005.                 * dev->clientfmt.height
  1006.                 * dev->clientfmt.depth)
  1007.                 / 8;
  1008. dev->clientfmt.flags |= V4L2_FMT_FLAG_BYTESPERLINE;
  1009. dev->clientfmt.bytesperline = (dev->clientfmt.width * 
  1010. dev->clientfmt.depth) / 8;
  1011.         dev->capture_size = dev->clientfmt.width
  1012.                 * dev->clientfmt.height
  1013.                 * dev->capture_bypp;
  1014.         /*  TODO: Any other driver state related to the image format  */
  1015.         return 1;
  1016. }
  1017. /*  Stop the music!
  1018.  */
  1019. static void
  1020. capture_abort(struct capture_device *dev)
  1021. {
  1022.         dev->grabber_enabled = 0;
  1023.         /*  Turn off the capture hardware  */
  1024.         grabbing_enable(dev, 0);
  1025. }
  1026. /*  Allocate buffers, and get everything ready to capture
  1027.  *  an image, but don't start capturing yet.
  1028.  */
  1029. static int
  1030. capture_begin(struct capture_device *dev)
  1031. {
  1032.         capture_abort(dev);
  1033.         if (dev->ready_to_capture)
  1034.                 return dev->ready_to_capture;
  1035.         if (!translate_setup(dev))
  1036.                 return dev->ready_to_capture;
  1037.         interrupt_enable(dev);
  1038.         return (dev->ready_to_capture = 1);
  1039. }
  1040. /*  Start an image capture
  1041.  */
  1042. static void
  1043. capture_grab_frame(struct capture_device *dev)
  1044. {
  1045.         if (dev->ready_to_capture && dev->grabber_enabled) {
  1046.                 return;
  1047. }
  1048.         capture_begin(dev);
  1049.         if (!dev->ready_to_capture) 
  1050.                 return;
  1051.         /*  TODO: Prepare the hardware for the next capture  */
  1052.         /*  Set up stream_capture_buffer to point to the buffer to  */
  1053.         /*  capture the next frame into  */
  1054.         if (dev->streaming)
  1055.         {
  1056.                 struct stream_buffer        *buf;
  1057.                 /* Go straight into streaming buffer? */
  1058.                 if (dev->translation.type == XLAT_NULL)
  1059.                 {
  1060.                         buf = v4l2_q_peek_head(&dev->stream_q_capture);
  1061.                         if (buf != NULL)
  1062.                         {
  1063.                                 //XXX no streaming yet dev->stream_capture_buffer = buf->vaddress;
  1064.                                 //list = buf->dma_list;
  1065.                         }
  1066.                 }
  1067.         }
  1068.         /*  Start the hardware  */
  1069.         grabbing_enable(dev, 1);
  1070.         dev->grabber_enabled = 1;
  1071.         dev->capture_completed = 0;
  1072. }
  1073. /*
  1074.  *        STREAMING CAPTURE
  1075.  */
  1076. static int/* 1 = success; 0 = failed */
  1077. capture_queuebuffer(struct capture_device *dev,
  1078.                     struct v4l2_buffer          *vidbuf)
  1079. {
  1080.         int                        i        = vidbuf->index;
  1081.         struct stream_buffer        *buf        = NULL;
  1082.         if (!dev->stream_buffers_mapped)
  1083.         {
  1084.                 debug_msg("QBUF no buffers mappedn");
  1085.                 return 0;
  1086.         }
  1087.         if (vidbuf->type != V4L2_BUF_TYPE_CAPTURE)
  1088.         {
  1089.                 debug_msg("QBUF wrong typen");
  1090.                 return 0;
  1091.         }
  1092.         if (i < 0 || i >= MAX_CAPTURE_BUFFERS || !dev->stream_buf[i].requested)
  1093.         {
  1094.                 debug_msg("QBUF buffer index %d is out of rangen", i);
  1095.                 return 0;
  1096.         }
  1097.         buf = &dev->stream_buf[i];
  1098.         if (!(buf->vidbuf.flags & V4L2_BUF_FLAG_MAPPED))
  1099.         {
  1100.                 debug_msg("QBUF buffer %d is not mappedn", i);
  1101.                 return 0;
  1102.         }
  1103.         if ((buf->vidbuf.flags & V4L2_BUF_FLAG_QUEUED))
  1104.         {
  1105.                 debug_msg("QBUF buffer %d is already queuedn", i);
  1106.                 return 0;
  1107.         }
  1108.         buf->vidbuf.flags &= ~V4L2_BUF_FLAG_DONE;
  1109.         v4l2_q_add_tail(&dev->stream_q_capture, &buf->qnode);
  1110.         buf->vidbuf.flags |= V4L2_BUF_FLAG_QUEUED;
  1111.         return 1;
  1112. }
  1113. static int/* 1 = got a buffer; 0 = no buffers */
  1114. capture_dequeuebuffer(struct capture_device *dev,
  1115.                       struct v4l2_buffer *buf)
  1116. {
  1117.         struct stream_buffer *newbuf;
  1118.         if (!dev->streaming || buf->type != V4L2_BUF_TYPE_CAPTURE)
  1119.         {
  1120.                 debug_msg("DQBUF not streaming or wrong buffer typen");
  1121.                 return 0;
  1122.         }
  1123.         newbuf = v4l2_q_del_head(&dev->stream_q_done);
  1124.         if (newbuf == NULL)
  1125.         {
  1126.                 debug_msg("DQBUF nothing on done queuen");
  1127.                 return 0;
  1128.         }
  1129.         newbuf->vidbuf.flags &= ~V4L2_BUF_FLAG_QUEUED;
  1130.         *buf = newbuf->vidbuf;
  1131.         return 1;
  1132. }
  1133. static int
  1134. capture_streamon(struct capture_device        *dev,
  1135.                  __u32                        type)
  1136. {
  1137.         struct stream_buffer *buf;
  1138.         if (dev->streaming)
  1139.                 return 1;
  1140.         if (type != V4L2_BUF_TYPE_CAPTURE)
  1141.         {
  1142.                 debug_msg("STREAMON wrong buffer typen");
  1143.                 return 0;
  1144.         }
  1145.         capture_abort(dev);/* cancel any capture that might be in progress */
  1146.         /*  -2 is a magic number that triggers start-of-stream logic in */
  1147.         /*    capture_interrupt()  */
  1148.         dev->stream_last_frame = -2;
  1149.         dev->perf.frames = 0;
  1150.         dev->perf.framesdropped = 0;
  1151.         dev->perf.bytesout = 0;
  1152.         /*  Can't capture frames faster than the video input  */
  1153.         if (dev->capture.timeperframe < dev->frame_period)
  1154.                 dev->capture.timeperframe = dev->frame_period;
  1155.         /*  Move any leftover DONE buffers to the free pool */
  1156.         while ((buf = v4l2_q_del_head(&dev->stream_q_done)))
  1157.                 buf->vidbuf.flags &= ~V4L2_BUF_FLAG_QUEUED;
  1158.         /*  Kick off the machine */
  1159.         dev->streaming = 1;
  1160.         capture_grab_frame(dev);
  1161.         return 1;
  1162. }
  1163. static void
  1164. capture_streamoff(struct capture_device        *dev,
  1165.                  __u32                        type)
  1166. {
  1167.         if (!dev->streaming)
  1168.                 return;
  1169.         if (type != V4L2_BUF_TYPE_CAPTURE)
  1170.         {
  1171.                 debug_msg("STREAMOFF wrong buffer typen");
  1172.                 return;
  1173.         }
  1174.         capture_abort(dev);
  1175.         dev->streaming = 0;
  1176.         /* Note: should really delay this till next capture */
  1177.         dev->perf.frames = 0;
  1178.         dev->perf.framesdropped = 0;
  1179.         dev->perf.bytesout = 0;
  1180. }
  1181. /*        Read out and convert the next frame
  1182.  */
  1183. static int /* returns length of data or negative for error */
  1184. capture_imagereadout(struct capture_device        *dev,
  1185.                      __u8                        *output_buffer,
  1186.                      int                        output_size,
  1187.                      int                        output_is_user)
  1188. {
  1189.         int        len;
  1190.         __u8*      input_buffer;
  1191.         dev->grabber_enabled = 0;
  1192.         grabbing_enable(dev, 0);
  1193. // XXX
  1194.         input_buffer = mgavideo_lock_video( dev->mga );
  1195.         len = translate_image(dev, input_buffer, output_buffer,
  1196.                               output_size, output_is_user);
  1197.         mgavideo_unlock_video( dev->mga );
  1198.         if (len < 0)
  1199.                 return len;
  1200.         ++dev->perf.frames;
  1201.         dev->perf.bytesout += len;
  1202.         return len;
  1203. }
  1204. /*  The hardware has issued the interrupt signal, do any post-capture
  1205.  *  processing that may be necessary.
  1206.  *  [This function is called indirectly through the immediate task queue;
  1207.  *  it executes at elevated IRQL, but it is interruptible. (It's a b.h.)]
  1208.  */
  1209. static void
  1210. capture_interrupt(void *v)
  1211. {
  1212.         struct capture_device        *dev = (struct capture_device *)v;
  1213.         struct stream_buffer        *buf;
  1214.         int                        len;
  1215.         struct timeval                timestamp_rough;
  1216.         unsigned long                raw_frame_num;
  1217.         unsigned long                next_raw_frame_to_keep;
  1218.         unsigned long                stream_frame_num;
  1219.         u64                        temp64;
  1220.         /*  TODO: Check for an interrupt pending on the device, and  */
  1221.         /*        return if there is no interrupt pending  */
  1222.         /*  (In this hardware-less demo I'll just check the completed flag) */
  1223.         if (!dev->grabber_enabled ||
  1224.             dev->capture_completed)
  1225.                 return;
  1226.         if (!dev->ints_enabled)
  1227.         {
  1228.                 err_msg("Can't process the interruptn");
  1229.                 return;
  1230.         }
  1231.         dev->capture_completed = 1;
  1232. {
  1233. struct timeval curr;
  1234. do_gettimeofday(&curr);
  1235. debug_msg("cap interrupt: time: %d:%dn", curr.tv_sec, curr.tv_usec);
  1236. }
  1237.         if (!dev->streaming)
  1238.         {
  1239.                 dev->time_acquired = current_time_ms();
  1240.                 /* DMA might not have finished, but we'll check in read() */
  1241.                 //debug_msg("New frame readyn");
  1242. debug_msg("interupt_cap: non_streaming wakup new_video_framen");
  1243.                 wake_up_interruptible(&dev->new_video_frame);
  1244.                 return;
  1245.         }
  1246.         /*  Only get here in streaming mode  */
  1247.         if (dev->stream_last_frame == -2)
  1248.         {/*        First frame of the stream  */
  1249.                 v4l2_masterclock_gettime(&dev->stream_begin);
  1250.                 dev->stream_last_frame = -1;
  1251.         }
  1252.         buf = v4l2_q_peek_head(&dev->stream_q_capture);
  1253.         if (buf == NULL)
  1254.         {/*        No available buffers. Skip this frame. This is not an  */
  1255.          /*        error, it's a normal way to throttle the capture rate  */
  1256.                 dev->grabber_enabled = 0;
  1257.                 grabbing_enable(dev, 0);
  1258.                 capture_grab_frame(dev);
  1259.                 return;
  1260.         }
  1261.         /*  Compute current stream time  */
  1262.         v4l2_masterclock_gettime(&timestamp_rough);
  1263.         v4l2_timeval_delta(&timestamp_rough,
  1264.                            &dev->stream_begin, &timestamp_rough);
  1265.         /*  Capture rate control  */
  1266.         raw_frame_num = v4l2_timeval_divide(
  1267.                 &timestamp_rough, dev->frame_period);
  1268.         temp64 = (u64)dev->capture.timeperframe
  1269.                 * (dev->stream_last_frame + 1)
  1270.                 + (dev->frame_period >> 1);
  1271.         next_raw_frame_to_keep = 
  1272.                 v4l2_math_div6432(temp64, dev->frame_period, NULL);
  1273. #if 0
  1274.         if (raw_frame_num < next_raw_frame_to_keep)
  1275.         {/*        Not time yet, don't keep this frame */
  1276.                 dev->grabber_enabled = 0;
  1277.                 grabbing_enable(dev, 0);
  1278.                 capture_grab_frame(dev);
  1279.                 return;
  1280.         }
  1281. #endif
  1282. {
  1283. struct timeval curr;
  1284. do_gettimeofday(&curr);
  1285. debug_msg("cap start: time: %d:%dn", curr.tv_sec, curr.tv_usec);
  1286. }
  1287.         /*  Want this frame  */
  1288.         len = capture_imagereadout(dev, buf->vaddress, buf->vidbuf.length, 0);
  1289. {
  1290. struct timeval curr;
  1291. do_gettimeofday(&curr);
  1292. debug_msg("cap end  : time: %d:%dn", curr.tv_sec, curr.tv_usec);
  1293. }
  1294.         if (len <= 0)
  1295.         {/*        Frame no good, DMA did not finish, etc. */
  1296.                 /*  Begin capturing the next frame now  */
  1297.                 capture_grab_frame(dev);
  1298.                 return;
  1299.         }
  1300.         /*  Fill in the buffer information fields  */
  1301.         buf->vidbuf.bytesused = len;
  1302. /* only mark as done if both frames for interlace have been
  1303.  * seen
  1304.  */
  1305.         buf->vidbuf.flags |= V4L2_BUF_FLAG_DONE | V4L2_BUF_FLAG_KEYFRAME;
  1306.         buf->vidbuf.timestamp = timestamp_rough;
  1307.         stream_frame_num = v4l2_timeval_correct(&buf->vidbuf.timestamp,
  1308.                                                 dev->capture.timeperframe);
  1309.         //debug_msg("Stream frame %4lu T= %lu.%06lun", stream_frame_num,
  1310.         //          buf->vidbuf.timestamp.tv_sec,buf->vidbuf.timestamp.tv_usec);
  1311.         if (stream_frame_num > dev->stream_last_frame + 1)
  1312.         {/*        We have missed one or more frames  */
  1313.                 dev->perf.framesdropped += stream_frame_num
  1314.                         - dev->stream_last_frame + 1;
  1315.         }
  1316.         dev->stream_last_frame = stream_frame_num;
  1317.         /*  Move buffer to done queue  */
  1318.         buf = v4l2_q_del_head(&dev->stream_q_capture);
  1319.         v4l2_q_add_tail(&dev->stream_q_done, &buf->qnode);
  1320.         /*  Begin capturing the next frame now  */
  1321.         capture_grab_frame(dev);
  1322.         /*  A new frame is ready!  */
  1323. debug_msg("interrupt_cap: waking up new_video_framen");
  1324.         wake_up_interruptible(&dev->new_video_frame);
  1325. }
  1326. /*  Read captured data into a user buffer.
  1327.  *  Return: negative = error
  1328.  *            0        = keep waiting
  1329.  *            positive = count of bytes read successfully
  1330.  */
  1331. static long
  1332. capture_read(struct capture_device *dev,
  1333.              __u8        *user_buffer,
  1334.              int        user_buffer_size)
  1335. {
  1336.         int                len = user_buffer_size;
  1337.         unsigned long        now;
  1338.         if (!dev->ints_enabled)
  1339.                 return -EIO;
  1340.         if (!dev->capture_completed)
  1341.         {/* No interrupt has occurred yet, or DMA didn't finish.  */
  1342.                 //debug_msg("No data ready.n");
  1343.                 if (!dev->grabber_enabled)
  1344.                         capture_grab_frame(dev);
  1345.                 return 0;/* caller should keep waiting */
  1346.         }
  1347.         
  1348.         now = current_time_ms();
  1349.         if (now - dev->time_acquired > MAX_FRAME_AGE)
  1350.         {/* Frame in buffer is stale, get a new one */
  1351.                 debug_msg("Stale frame, re-acquiring.n");
  1352.                 dev->grabber_enabled = 0;
  1353.                 grabbing_enable(dev, 0);
  1354.                 capture_grab_frame(dev);
  1355.                 return 0;/* caller should keep waiting */
  1356.         }
  1357.         len = capture_imagereadout(dev, user_buffer, user_buffer_size, 1);
  1358.         capture_grab_frame(dev);
  1359.         return len;
  1360. }
  1361. /*  Stop capturing and free all resources used for capture.
  1362.  */
  1363. static void
  1364. capture_close(struct capture_device *dev)
  1365. {
  1366.   //        int        i;
  1367.         if (dev->streaming)
  1368.                 capture_streamoff(dev, V4L2_BUF_TYPE_CAPTURE);
  1369.         capture_abort(dev);
  1370.         dev->ready_to_capture = 0;
  1371.         translate_close(dev);
  1372.         if (dev->capture_dma_list)
  1373.                 free_page((unsigned long)dev->capture_dma_list);
  1374.         dev->capture_dma_list = 0;
  1375. #if 0
  1376.         for (i = 0; i < MAX_CAPTURE_BUFFERS; ++i)
  1377.         {
  1378.                 dev->stream_buf[i].requested = 0;
  1379.                 if (dev->stream_buf[i].vaddress)
  1380.                         vfree(dev->stream_buf[i].vaddress);
  1381.                 dev->stream_buf[i].vaddress = NULL;
  1382.                 if (dev->stream_buf[i].dma_list)
  1383.                         free_page((unsigned long)dev->stream_buf[i].dma_list);
  1384.                 dev->stream_buf[i].dma_list = NULL;
  1385.         }
  1386. #endif
  1387. }
  1388. /*
  1389.  *
  1390.  *        I N T E R R U P T   R O U T I N E S
  1391.  *
  1392.  */
  1393. static void
  1394. interrupt_disable(struct capture_device *dev)
  1395. {
  1396.         if (!dev->ints_enabled)
  1397.                 return;
  1398.         dev->ints_enabled = 0;
  1399.         /*  TODO: Disable interrupts on the device  */
  1400.         mgavideo_ivsync_enable( dev->mga, 0 );
  1401.         /*  Wake up any processes that might be waiting for a frame  */
  1402.         /*  and let them return an error  */
  1403. debug_msg("interrupt_disable: waking up new_video_framen");
  1404.         wake_up_interruptible(&dev->new_video_frame);
  1405. }
  1406. static void
  1407. interrupt_enable(struct capture_device *dev)
  1408. {
  1409. int even_fields;
  1410. int odd_fields;
  1411.         if (dev->ints_enabled)
  1412.                 interrupt_disable(dev);
  1413.         dev->ints_enabled = 1;
  1414.         /*  TODO: Enable interrupts on the device  */
  1415. even_fields= (dev->clientfmt.flags & V4L2_FMT_FLAG_TOPFIELD) ? 1:0;
  1416. odd_fields = (dev->clientfmt.flags & V4L2_FMT_FLAG_BOTFIELD) ? 1:0;
  1417.         mgavideo_register_bh( dev->mga, even_fields, odd_fields,
  1418. capture_interrupt, dev );
  1419.         mgavideo_ivsync_enable( dev->mga, 1 );
  1420. }
  1421. /*
  1422.  *
  1423.  *        M E M O R Y   M A P P I N G
  1424.  *
  1425.  */
  1426. static struct stream_buffer *
  1427. mmap_stream_buffer_from_offset(struct capture_device *dev,
  1428.                                unsigned long offset)
  1429. {
  1430.         int        i;
  1431.         for (i = 0; i < MAX_CAPTURE_BUFFERS; ++i)
  1432.                 if (offset == dev->stream_buf[i].vidbuf.offset)
  1433.                         return &dev->stream_buf[i];
  1434.         return NULL;
  1435. }
  1436. static int
  1437. mmap_request_buffers(struct capture_device *dev,
  1438.                      struct v4l2_requestbuffers *req)
  1439. {
  1440.         int        i;
  1441.         u32        buflen;
  1442.         u32        type;
  1443.         if (dev->stream_buffers_mapped)
  1444.                 return 0;/* can't make requests if buffers are mapped */
  1445.         if (req->count < 1)
  1446.                 req->count = 1;
  1447.         if (req->count > MAX_CAPTURE_BUFFERS)
  1448.                 req->count = MAX_CAPTURE_BUFFERS;
  1449.         type = V4L2_BUF_TYPE_CAPTURE;
  1450.         dev->stream_contig_map = 0;
  1451.         if (req->type & V4L2_BUF_REQ_CONTIG)
  1452.         {
  1453.                 dev->stream_contig_map = 1;
  1454.                 req->type = type | V4L2_BUF_REQ_CONTIG;
  1455.                 /* note: _REQ_CONTIG is only used in v4l2_requestbuffers */
  1456.         }
  1457.         /*  The buffer length needs to be a multiple of the page size  */
  1458.         buflen = (dev->clientfmt.sizeimage + PAGE_SIZE - 1)
  1459.                 & ~(PAGE_SIZE - 1);
  1460.         debug_msg("Granting %d buffersn",req->count);
  1461.         /*  Now initialize the buffer structures. Don't allocate the */
  1462.         /*  buffers until they're mapped. */
  1463.         for (i = 0; i < req->count; ++i)
  1464.         {
  1465.                 dev->stream_buf[i].requested = 1;
  1466.                 dev->stream_buf[i].vidbuf.index = i;
  1467.                 dev->stream_buf[i].vidbuf.type = type;
  1468.                 dev->stream_buf[i].vidbuf.offset = 4*i;/* anything unique */
  1469.                 dev->stream_buf[i].vidbuf.length = buflen;
  1470.                 dev->stream_buf[i].vidbuf.bytesused = 0;
  1471.                 dev->stream_buf[i].vidbuf.timestamp.tv_sec = 0;
  1472.                 dev->stream_buf[i].vidbuf.timestamp.tv_usec = 0;
  1473.                 dev->stream_buf[i].vidbuf.flags = 0;
  1474.         }
  1475.         for (i = req->count; i < MAX_CAPTURE_BUFFERS; ++i)
  1476.                 dev->stream_buf[i].requested = 0;
  1477.         dev->stream_buffers_requested = req->count;
  1478.         return 1;
  1479. }
  1480. static void
  1481. mmap_unrequest_buffers(struct capture_device *dev)
  1482. {
  1483.         int        i;
  1484.         if (dev->stream_buffers_requested == 0 ||
  1485.             dev->stream_buffers_mapped)
  1486.                 return;
  1487.         for (i = 0; i < MAX_CAPTURE_BUFFERS; ++i)
  1488.                 dev->stream_buf[i].requested = 0;
  1489.         dev->stream_buffers_requested = 0;
  1490. }
  1491. static void
  1492. mmap_vma_open(struct vm_area_struct *vma)
  1493. {
  1494.         struct capture_device *dev =
  1495.                 capture_device_from_file(vma->vm_file);
  1496.         if (dev == NULL)
  1497.                 return;
  1498.         //debug_msg("vma_open calledn");
  1499.         //MOD_INC_USE_COUNT;
  1500. }
  1501. static void
  1502. mmap_vma_close(struct vm_area_struct *vma)
  1503. {
  1504.         struct capture_device *dev =
  1505.                 capture_device_from_file(vma->vm_file);
  1506.         struct stream_buffer *buf =
  1507.                 mmap_stream_buffer_from_offset(dev, vma->vm_offset);
  1508.         int        i, n = 1;
  1509.         if (dev->stream_contig_map)
  1510.         {/*        Unmap all the buffers in one stroke  */
  1511.                 n = dev->stream_buffers_mapped;
  1512.                 buf = &dev->stream_buf[0];
  1513.         }
  1514.         for (i = 0; i < n; ++i)
  1515.         {
  1516.                 if (dev->streaming)
  1517.                 {
  1518.                         info_msg("Warning- munmap() called while streamingn");
  1519.                         capture_streamoff(dev, buf->vidbuf.type);
  1520.                 }
  1521.                 v4l2_q_yank_node(&dev->stream_q_capture, &buf->qnode);
  1522.                 v4l2_q_yank_node(&dev->stream_q_done, &buf->qnode);
  1523.                 if (buf->vaddress != NULL && i == 0)
  1524.                         vfree(buf->vaddress);
  1525.                 buf->vaddress = NULL;
  1526.                 if (buf->dma_list)
  1527.                         free_page((unsigned long)buf->dma_list);
  1528.                 buf->dma_list = NULL;
  1529.                 buf->vidbuf.flags = 0;
  1530.                 //debug_msg("Buffer %d deallocatedn",(int)vma->vm_offset/4);
  1531.                 ++buf;
  1532.                 if (dev->stream_buffers_mapped > 0)
  1533.                         --dev->stream_buffers_mapped;
  1534.         }
  1535.         //MOD_DEC_USE_COUNT;
  1536. }
  1537. static unsigned long
  1538. mmap_vma_nopage(struct vm_area_struct *vma,
  1539.                 unsigned long address, int write)
  1540. {
  1541.         struct capture_device        *dev;
  1542.         struct stream_buffer        *buf;
  1543.         unsigned long                offset_into_buffer;
  1544.         unsigned long                page;
  1545.         int                        n        = 1;
  1546.         dev = capture_device_from_file(vma->vm_file);
  1547.         if (dev == NULL)
  1548.                 return 0;
  1549.         if (dev->stream_contig_map)
  1550.         {
  1551.                 buf = &dev->stream_buf[0];
  1552.                 n = dev->stream_buffers_requested;
  1553.         }        
  1554.         else
  1555.                 buf = mmap_stream_buffer_from_offset(dev, vma->vm_offset);
  1556.         if (buf == NULL)
  1557.                 return 0;
  1558.         offset_into_buffer = address - vma->vm_start;
  1559.         if (offset_into_buffer >= buf->vidbuf.length * n)
  1560.         {
  1561.                 err_msg("Attempt to read past end of mmap() buffern");
  1562.                 return 0;
  1563.         }
  1564.         page = v4l2_vmalloc_to_page(buf->vaddress + offset_into_buffer);
  1565.         if (page == 0)
  1566.                 return 0;
  1567.         atomic_inc(&mem_map[MAP_NR(page)].count);
  1568.         return page;
  1569. }
  1570. static struct vm_operations_struct capture_vma_operations =
  1571. {
  1572.         mmap_vma_open, mmap_vma_close, NULL, NULL, NULL, NULL,
  1573.         mmap_vma_nopage,
  1574. };
  1575. /*
  1576.  *
  1577.  *        V I D E O   F O R   L I N U X   I N T E R F A C I N G
  1578.  *
  1579.  */
  1580. static int
  1581. v4l2_open(struct v4l2_device *v, int flags, void **idptr)
  1582. {
  1583.         struct capture_device *dev = (struct capture_device *)v;
  1584.         int        i, n;
  1585.         int        cap;
  1586.         for (i = 0, n = -1, cap = 0; i < MAX_OPENS; ++i)
  1587.         {
  1588.                 if (!dev->open_data[i].isopen)
  1589.                         n = i;/* available open_data structure */
  1590.                 else if (!dev->open_data[i].noncapturing)
  1591.                         cap = 1;/* another open is already capturing */
  1592.         }
  1593.         if (n == -1)/* No available open_data structures */
  1594.         {
  1595.                 debug_msg("No more opens on this devicen");
  1596.                 return -EBUSY;
  1597.         }
  1598.         if (flags & O_NONCAP)/*  Non-capturing open */
  1599.                 dev->open_data[n].noncapturing = 1;
  1600.         else if (cap)
  1601.         {
  1602.                 debug_msg("No more capturing opens on this devicen");
  1603.                 return -EBUSY;
  1604.         }
  1605.         else
  1606.         {
  1607.                 dev->open_data[n].noncapturing = 0;
  1608.                 /*  Keep track of whether there is a capturing open  */
  1609.                 ++dev->capturing_opens;
  1610.                 dev->perf.frames = 0;
  1611.                 dev->perf.framesdropped = 0;
  1612.                 dev->perf.bytesout = 0;
  1613.         }
  1614.         //MOD_INC_USE_COUNT;
  1615.         ++dev->open_count;
  1616.         dev->open_data[n].isopen = 1;
  1617.         dev->open_data[n].dev = dev;
  1618.         *idptr = &dev->open_data[n];
  1619.         if (dev->open_count == 1)
  1620.         {
  1621.                 dev->ready_to_capture = 0;/* benchmark changes parameters! */
  1622.                 dev->capture_completed = 0;
  1623.                 dev->grabber_enabled = 0;
  1624.                 v4l2_q_init(&dev->stream_q_capture);
  1625.                 v4l2_q_init(&dev->stream_q_done);
  1626.         }
  1627.         debug_msg("Open succeededn");
  1628.         /* frame counter for test images only */
  1629.         if (!dev->open_data[n].noncapturing)
  1630.                 dev->h=dev->m=dev->s=dev->f=0;
  1631.         return 0;
  1632. }
  1633. static void
  1634. v4l2_close(void *id)
  1635. {
  1636.         struct device_open *o = (struct device_open *)id;
  1637.         struct capture_device *dev = o->dev;
  1638.         if (!o->noncapturing)
  1639.         {
  1640.                 --dev->capturing_opens;
  1641.                 debug_msg("Closen");
  1642.         }
  1643.         o->isopen = 0;
  1644.         --dev->open_count;
  1645.         if (dev->open_count == 0)
  1646.         {
  1647.                 interrupt_disable(dev);
  1648.                 capture_close(dev);
  1649.         }
  1650.         //MOD_DEC_USE_COUNT;
  1651. }
  1652. static long
  1653. v4l2_write(void                *id, 
  1654.              const char                *buf, 
  1655.              unsigned long        count, 
  1656.              int                noblock)
  1657. {
  1658.         debug_msg("Write() not handledn");
  1659.         return -EINVAL;
  1660. }
  1661. /*  The arguments are already copied into kernel memory, so don't use
  1662.     copy_from_user() or copy_to_user() on arg.  */
  1663. static int
  1664. v4l2_ioctl(void                *id,
  1665.            unsigned int        cmd,
  1666.            void                *arg)
  1667. {
  1668.         struct device_open *o = (struct device_open *)id;
  1669.         struct capture_device *dev = o->dev;
  1670. //debug_msg("ioctl %dn", _IOC_NR(cmd));
  1671.         switch(cmd)
  1672.         {
  1673.         case VIDIOC_QUERYCAP:
  1674.         {
  1675.                 struct v4l2_capability *b = arg;
  1676.                 strcpy(b->name, dev->v.name);
  1677.                 b->type = V4L2_TYPE_CAPTURE;
  1678.                 b->flags = V4L2_FLAG_READ |
  1679.                            V4L2_FLAG_STREAMING |
  1680.                            V4L2_FLAG_PREVIEW |
  1681.                            V4L2_FLAG_TUNER |
  1682.                            V4L2_FLAG_SELECT;
  1683.                 b->inputs = KS_INPUT_COUNT;
  1684.                 b->outputs = 0;
  1685.                 b->audios = 0;
  1686.                 b->maxwidth = MAX_WIDTH;
  1687.                 b->maxheight = MAX_HEIGHT;
  1688.                 b->minwidth = MIN_WIDTH;
  1689.                 b->minheight = MIN_HEIGHT;
  1690.                 b->maxframerate = 30;
  1691.                 return 0;
  1692.         }
  1693.         case VIDIOC_ENUM_CAPFMT:
  1694.         {
  1695.                 struct v4l2_fmtdesc *f = arg;
  1696.                 if (f->index < 0 || f->index >= NUM_CAPFMT)
  1697.                         return -EINVAL;
  1698.                 *f = capfmt[f->index];
  1699.                 return 0;
  1700.         }
  1701.         case VIDIOC_G_FMT:
  1702.         {
  1703.                 memcpy(arg, &dev->clientfmt, sizeof(dev->clientfmt));
  1704.                 return 0;
  1705.         }
  1706.         case VIDIOC_S_FMT:
  1707.         {
  1708.                 struct v4l2_format *fmt = arg;
  1709.                 if (o->noncapturing)
  1710.                 {
  1711.                         debug_msg("S_FMT illegal in non-capturing openn");
  1712.                         return -EPERM;
  1713.                 }
  1714.                 dev->clientfmt = *fmt;
  1715.                 if (!capture_new_format(dev))
  1716.                         return -EINVAL;
  1717.                 mmap_unrequest_buffers(dev);
  1718.                 *fmt = dev->clientfmt;
  1719.                 return 0;
  1720.         }
  1721.         case VIDIOC_G_COMP:        return -EINVAL;
  1722.         case VIDIOC_S_COMP:        return -EINVAL;
  1723.         case VIDIOC_REQBUFS:
  1724.         {
  1725.                 struct v4l2_requestbuffers *req = arg;
  1726.                 if (o->noncapturing)
  1727.                 {
  1728.                         debug_msg("REQBUFS illegal in non-capturing openn");
  1729.                         return -EPERM;
  1730.                 }
  1731.                 if (dev->stream_buffers_mapped)
  1732.                 {
  1733.                         debug_msg("Can't request buffers if buffers are "
  1734.                                   "already mappedn");
  1735.                         return -EPERM;
  1736.                 }
  1737.                 capture_begin(dev);
  1738.                 if (!mmap_request_buffers(dev, req))
  1739.                         return -EINVAL;
  1740.                 return 0;
  1741.         }
  1742.         case VIDIOC_QUERYBUF:
  1743.         {
  1744.                 struct v4l2_buffer *buf = arg;
  1745.                 int        i;
  1746.                 if (o->noncapturing)
  1747.                 {
  1748.                         debug_msg("QUERYBUF illegal in non-capturing openn");
  1749.                         return -EPERM;
  1750.                 }
  1751.                 i = buf->index;
  1752.                 if (i < 0 || i >= MAX_CAPTURE_BUFFERS ||
  1753.                     !dev->stream_buf[i].requested ||
  1754.                     (buf->type & V4L2_BUF_TYPE_field) != 
  1755.                      (dev->stream_buf[i].vidbuf.type & V4L2_BUF_TYPE_field))
  1756.                 {
  1757.                         debug_msg("QUERYBUF bad parametern");
  1758.                         return -EINVAL;
  1759.                 }
  1760.                 *buf = dev->stream_buf[i].vidbuf;
  1761.                 return 0;
  1762.         }
  1763.         case VIDIOC_QBUF:
  1764.         {
  1765.                 struct v4l2_buffer *buf = arg;
  1766.                 if (o->noncapturing)
  1767.                 {
  1768.                         debug_msg("QBUF illegal in non-capturing openn");
  1769.                         return -EPERM;
  1770.                 }
  1771.                 if (!dev->stream_buffers_mapped)
  1772.                 {
  1773.                         debug_msg("QBUF no buffers are mappedn");
  1774.                         return -EINVAL;
  1775.                 }
  1776.                 if (!capture_queuebuffer(dev, buf))
  1777.                         return -EINVAL;
  1778.                 return 0;
  1779.         }
  1780.         case VIDIOC_DQBUF:
  1781.         {
  1782.                 struct v4l2_buffer *buf = arg;
  1783.                 if (o->noncapturing)
  1784.                 {
  1785.                         debug_msg("DQBUF illegal in non-capturing openn");
  1786.                         return -EPERM;
  1787.                 }
  1788.                 if (!capture_dequeuebuffer(dev, buf))
  1789.                         return -EINVAL;
  1790.                 return 0;
  1791.         }
  1792.         case VIDIOC_STREAMON:
  1793.         {
  1794.                 __u32        type = (__u32)arg;
  1795.                 if (o->noncapturing)
  1796.                 {
  1797.                         debug_msg("STREAMON illegal in non-capturing openn");
  1798.                         return -EPERM;
  1799.                 }
  1800.                 if (!capture_streamon(dev, type))
  1801.                         return -EINVAL;
  1802.                 return 0;
  1803.         }
  1804.         case VIDIOC_STREAMOFF:
  1805.         {
  1806.                 __u32        type = (__u32)arg;
  1807.                 if (o->noncapturing)
  1808.                 {
  1809.                         debug_msg("STREAMOFF illegal in non-capturing openn");
  1810.                         return -EPERM;
  1811.                 }
  1812.                 capture_streamoff(dev, type);
  1813.                 return 0;
  1814.         }
  1815.         /* Video Preview support */
  1816.         case VIDIOC_ENUM_FBUFFMT:
  1817.         {
  1818.                 struct v4l2_fmtdesc* fmt = (struct v4l2_fmtdesc*)arg;
  1819.                 strcpy( fmt->description, "Video Overlay" );
  1820.                 fmt->pixelformat = 0;
  1821.                 fmt->flags = 0;
  1822.                 fmt->depth = 0;
  1823.                 return 0;
  1824.         }
  1825.         case VIDIOC_G_FBUF:
  1826.                 (struct v4l2_framebuffer*)arg = &dev->fbuf;
  1827.                 return 0;
  1828.         case VIDIOC_S_FBUF:                return -EINVAL;
  1829.         case VIDIOC_G_WIN:
  1830.                 (struct v4l2_window*)arg = &dev->window;
  1831.                 return 0;
  1832.         case VIDIOC_S_WIN:
  1833.         {
  1834.                 struct v4l2_window* win = (struct v4l2_window*)arg;
  1835.                 int red, green, blue;
  1836.                 if( ( win->clips != NULL ) || ( win->clipcount != 0 ) ) 
  1837.                         return -EINVAL;
  1838.                 memcpy( &dev->window, win, sizeof(dev->window) );
  1839.                 
  1840.                 /* set on hardware */
  1841.                 mgavideo_set_window( dev->mga, win->x, win->y,
  1842.                                      win->width, win->height );
  1843. if ((long)win->chromakey < 0) {
  1844. mgavideo_set_overlay(dev->mga);
  1845. } else {
  1846. red = (win->chromakey >> 16) & 0xff;
  1847. green = (win->chromakey >> 8) & 0xff;
  1848. blue = (win->chromakey >> 0) & 0xff;
  1849. mgavideo_set_colorkey( dev->mga, red, green, blue );
  1850. }
  1851.                 return 0;
  1852.         }
  1853.         
  1854.         case VIDIOC_PREVIEW:
  1855. mgavideo_ivsync_enable( dev->mga, *(int *)arg );
  1856.                 mgavideo_preview_enable( dev->mga, *(int*)arg );
  1857.                 return 0;
  1858.         case VIDIOC_G_PERF:
  1859.         {
  1860.                 memcpy(arg, &dev->perf, sizeof(dev->perf));
  1861.                 return 0;
  1862.         }
  1863.         case VIDIOC_G_INPUT:
  1864.         {
  1865.                 memcpy(arg, &dev->input, sizeof(dev->input));
  1866.                 return 0;
  1867.         }
  1868.         case VIDIOC_S_INPUT:
  1869.         {
  1870.                 int        input = (int)arg;
  1871.                 if (input < 0 || input >= KS_INPUT_COUNT)
  1872.                 {
  1873.                         debug_msg("Input out of range %dn", input);
  1874.                         return -EINVAL;
  1875.                 }
  1876.                 if (input != dev->input)
  1877.                 {
  1878.                         dev->input = input;
  1879.                         set_video_input(dev, input);
  1880.                 }
  1881.                 return 0;
  1882.         }
  1883.         case VIDIOC_G_PARM:
  1884.         {
  1885.                 memcpy(arg, &dev->capture, sizeof(dev->capture));
  1886.                 return 0;
  1887.         }
  1888.         case VIDIOC_S_PARM:
  1889.         {
  1890.                 struct v4l2_captureparm *vp = arg;
  1891.                 if (vp->capturemode & ~dev->capture.capability)
  1892.                 {
  1893.                         debug_msg("PARM unsupported capture capabilityn");
  1894.                         return -EINVAL;
  1895.                 }
  1896.                 if ((dev->capture.capability & V4L2_CAP_TIMEPERFRAME) &&
  1897.                     vp->timeperframe < 10000)
  1898.                 {
  1899.                         debug_msg("PARM time per frame out of range %ldn",
  1900.                                   vp->timeperframe);
  1901.                         return -EINVAL;
  1902.                 }
  1903.                 if (vp->capturemode != dev->capture.capturemode &&
  1904.                     !o->noncapturing && dev->streaming)
  1905.                         return -EINVAL;
  1906.                 if (o->noncapturing)
  1907.                         return 0;
  1908.                 if (vp->capturemode != dev->capture.capturemode)
  1909.                 {
  1910.                         dev->capture.capturemode = vp->capturemode;
  1911.                         capture_new_format(dev);
  1912.                 }
  1913.                 if ((vp->capturemode & V4L2_CAP_TIMEPERFRAME) &&
  1914.                     vp->timeperframe >= dev->frame_period)
  1915.                         dev->capture.timeperframe = vp->timeperframe;
  1916.                 else
  1917.                         dev->capture.timeperframe = dev->frame_period;
  1918.                 return 0;
  1919.         }
  1920.         case VIDIOC_G_STD:
  1921.         {
  1922.                 struct v4l2_standard *std = arg;
  1923.                 v4l2_video_std_construct(std, dev->standard, 0);
  1924.                 return 0;
  1925.         }
  1926.         case VIDIOC_S_STD:
  1927.         {
  1928.                 struct v4l2_standard        *std = arg;
  1929.                 int                        id;
  1930.                 if ((o->noncapturing && dev->capturing_opens) ||
  1931.                     dev->stream_buffers_mapped) {
  1932. printk("noncapturing=%d capturing_opens=%d mapped=%dn",
  1933. o->noncapturing, dev->capturing_opens, dev->stream_buffers_mapped);
  1934.                         return -EPERM;
  1935. }
  1936.                 id = v4l2_video_std_confirm(std);
  1937.                 if (!((1 << id) & dev->standards))
  1938.                 {
  1939. printk("Bad standard: %un", (unsigned)id);
  1940.                         debug_msg("Bad standard: %un", (unsigned)id);
  1941.                         return -EINVAL;
  1942.                 }
  1943.                 set_video_standard(dev, id);
  1944.                 return 0;
  1945.         }
  1946.         case VIDIOC_ENUMSTD:
  1947.         {
  1948.                 struct v4l2_enumstd *estd = arg;
  1949.                 __u32        b, i;
  1950.                 if (estd->index < 0 || estd->index > 30)
  1951.                         return -EINVAL;
  1952.                 for (b = 1, i = 0; b < 32; ++b)
  1953.                 {
  1954.                         if (((1 << b) & dev->standards) == 0)
  1955.                                 continue;
  1956.                         if (i == estd->index)
  1957.                         {
  1958.                                 v4l2_video_std_construct(&estd->std, b, 0);
  1959.                                 estd->inputs = (__u32)-1; /* all inputs */
  1960.                                 estd->outputs = 0;
  1961.                                 return 0;
  1962.                         }
  1963.                         ++i;
  1964.                 }
  1965.                 return -EINVAL;
  1966.         }
  1967.         case VIDIOC_ENUMINPUT:
  1968.         {
  1969.                 struct v4l2_input *vi = arg;
  1970.                 if (vi->index < 0 || vi->index >= KS_INPUT_COUNT)
  1971.                         return -EINVAL;
  1972.                 *vi = dev->source[vi->index].input;
  1973.                 return 0;
  1974.         }
  1975.         case VIDIOC_QUERYCTRL:
  1976.         {
  1977.                 struct v4l2_queryctrl        *qc = arg;
  1978.                 int                        i;
  1979.                 i = find_vctrl(qc->id);
  1980.                 if (i < 0)
  1981.                 {
  1982.                         return i;
  1983.                 }
  1984.                 /*  V4L2 filled in category and catname, preserve them */
  1985.                 capture_control[i].category = qc->category;
  1986.                 memcpy(capture_control[i].catname, qc->catname, 
  1987.                        sizeof(qc->catname));
  1988.                 *qc = capture_control[i];
  1989.                 return 0;
  1990.         }
  1991.         case VIDIOC_QUERYMENU:
  1992.         {
  1993.                 struct v4l2_querymenu        *qm = arg;
  1994.                 return vctrl_querymenu(qm);
  1995.         }
  1996.         case VIDIOC_G_CTRL:
  1997.         {
  1998.                 struct v4l2_control        *vc = arg;
  1999.                 int                        i;
  2000.                 i = find_vctrl(vc->id);
  2001.                 if (i < 0)
  2002.                         return i;
  2003.                 vc->value = dev->source[dev->input].control[i];
  2004.                 return 0;
  2005.         }
  2006.         case VIDIOC_S_CTRL:
  2007.         {
  2008.                 struct v4l2_control        *vc = arg;
  2009.                 int                        i;
  2010.                 i = find_vctrl(vc->id);
  2011.                 if (i < 0)
  2012.                         return i;
  2013.                 dev->source[dev->input].control[i] = vc->value;
  2014.                 device_tone_controls(dev);
  2015.                 return 0;
  2016.         }
  2017.         case VIDIOC_G_TUNER:
  2018.         {
  2019.                 struct v4l2_tuner* tuner = (struct v4l2_tuner*)arg;
  2020.                 if( dev->source[tuner->input].input.type !=
  2021.                                         V4L2_INPUT_TYPE_TUNER ) {
  2022.                         return -EINVAL;
  2023.                 }
  2024.                 memcpy( tuner, &dev->source[tuner->input].tuner,
  2025.                                         sizeof( *tuner ) );
  2026.                 break;
  2027.         }
  2028.         case VIDIOC_S_TUNER:        return -EINVAL;
  2029.         case VIDIOC_G_FREQ:
  2030.                 memcpy(arg, &dev->source[dev->input].freq, sizeof(int));
  2031.                 break;
  2032.         
  2033.         case VIDIOC_S_FREQ:
  2034.                 set_video_freq( dev, arg );
  2035.                 break;
  2036.         case VIDIOC_G_AUDIO:        return -EINVAL;
  2037.         {
  2038.                 struct v4l2_audio* audio = (struct v4l2_audio*)arg;
  2039.                 if( dev->source[audio->audio].input.capability &
  2040.                                         V4L2_INPUT_CAP_AUDIO ) {
  2041.                         return -EINVAL;
  2042.                 }
  2043.                 memcpy( audio, &dev->source[audio->audio].audio,
  2044.                                         sizeof( *audio ) );
  2045.                 break;
  2046.         }
  2047.         case VIDIOC_S_AUDIO:        return -EINVAL;
  2048.         default:
  2049.                 return -ENOIOCTLCMD;
  2050.         }
  2051.         return 0;
  2052. }
  2053. static int
  2054. v4l2_mmap(void                        *id,
  2055.           struct vm_area_struct *vma)
  2056. {
  2057.         struct device_open        *o   = (struct device_open *)id;
  2058.         struct capture_device        *dev = o->dev;
  2059.         struct stream_buffer        *buf;
  2060.         int                        i, n = 1;
  2061.         if (o->noncapturing)
  2062.         {
  2063.                 debug_msg("mmap() called on non-capturing openn");
  2064.                 return -ENODEV;
  2065.         }
  2066.         buf = mmap_stream_buffer_from_offset(dev, vma->vm_offset);
  2067.         if (dev->stream_contig_map)
  2068.         {/*        N buffers in one contiguous map  */
  2069.                 buf = &dev->stream_buf[0];
  2070.                 n = dev->stream_buffers_requested;
  2071.         }
  2072.         if (buf == NULL)
  2073.         {
  2074.                 debug_msg("mmap() Invalid offset parametern");
  2075.                 return -EINVAL;/* no such buffer */
  2076.         }
  2077.         if (buf->vidbuf.length * n != vma->vm_end - vma->vm_start)
  2078.         {
  2079.                 debug_msg("mmap() Wrong length parametern");
  2080.                 return -EINVAL;/* wrong length */
  2081.         }
  2082.         for (i = 0; i < n; ++i)
  2083.         {
  2084.                 if (!buf->requested)
  2085.                 {
  2086.                         debug_msg("mmap() Buffer is not available for"
  2087.                                   " mappingn");
  2088.                         return -EINVAL;/* not requested */
  2089.                 }
  2090.                 if (buf->vidbuf.flags & V4L2_BUF_FLAG_MAPPED)
  2091.                 {
  2092.                         debug_msg("mmap() Buffer is already mappedn");
  2093.                         return -EINVAL;/* already mapped */
  2094.                 }
  2095.                 if (buf->vaddress != NULL)
  2096.                         vfree(buf->vaddress);
  2097.                 if (i == 0)
  2098.                         buf->vaddress = vmalloc(buf->vidbuf.length * n);
  2099.                 else
  2100.                         buf->vaddress = buf[-1].vaddress + buf->vidbuf.length;
  2101.                 if (buf->vaddress == NULL)
  2102.                 {
  2103.                         err_msg("Could not allocate mmap() buffern");
  2104.                         return -ENODEV;
  2105.                 }
  2106. #if 0                /*  TODO: build scatter list for buffer if using DMA  */
  2107.                 if ((using DMA) &&
  2108.                     !bm_build_scatter_list(dev, buf->vaddress, &buf->dma_list))
  2109.                         return -ENODEV;
  2110. #endif
  2111.                 buf->vidbuf.flags |= V4L2_BUF_FLAG_MAPPED;
  2112.                 ++dev->stream_buffers_mapped;
  2113.                 ++buf;
  2114.         }
  2115.         vma->vm_ops = &capture_vma_operations;
  2116.         if (vma->vm_ops->open)
  2117.                 vma->vm_ops->open(vma);
  2118.         /*  Note: vma->vm_file will be set up by V4L2  */
  2119.         return 0;
  2120. }
  2121. static int
  2122. v4l2_poll(void        *id,
  2123.             struct file        *file,
  2124.             poll_table        *table)
  2125. {
  2126.         struct device_open *o = (struct device_open *)id;
  2127.         struct capture_device *dev = o->dev;
  2128. debug_msg("starting pool: noncapturing=%d streaming=%dn", 
  2129. o->noncapturing, dev->streaming);
  2130.         if (o->noncapturing)
  2131.         {
  2132.                 debug_msg("poll() illegal in non-capturing openn");
  2133.                 return POLLERR;
  2134.         }
  2135.         if (dev->streaming)
  2136.         {
  2137.                 void        *node;
  2138.                 node = v4l2_q_peek_head(&dev->stream_q_done);
  2139. debug_msg("poll: check done queue: %xn", node);
  2140.                 if (node != NULL)
  2141.                         return (POLLIN | POLLRDNORM);/* data is ready now */
  2142.                 node = v4l2_q_peek_head(&dev->stream_q_capture);
  2143. debug_msg("poll: check capture queue: %xn", node);
  2144.                 if (node == NULL)
  2145.                         return POLLERR;  /* no frames queued */
  2146. {
  2147. struct timeval curr;
  2148. do_gettimeofday(&curr);
  2149. debug_msg("waiting on new_video_frame: time: %d:%dn", curr.tv_sec, curr.tv_usec);
  2150. }
  2151.                 poll_wait(file, &dev->new_video_frame, table);
  2152. {
  2153. struct timeval curr;
  2154. do_gettimeofday(&curr);
  2155. debug_msg("done waiting on new_video_frame: time: %d:%dn", curr.tv_sec, curr.tv_usec);
  2156. }
  2157.                 return 0;
  2158.         }
  2159.         /*  Capture is through read() call */
  2160.         if (dev->capture_completed)/* data is ready now */
  2161.                 return (POLLIN | POLLRDNORM);
  2162.         capture_grab_frame(dev);/* does nothing if capture is in progress */
  2163.         if (!dev->ready_to_capture)/* Can't grab frames! */
  2164.                 return POLLERR;
  2165.         poll_wait(file, &dev->new_video_frame, table);
  2166.         return 0;
  2167. }
  2168. static long
  2169. v4l2_read(void                *id,
  2170.             char                *buf,
  2171.             unsigned long        count,
  2172.             int                        noblock)
  2173. {
  2174.         struct device_open *o = (struct device_open *)id;
  2175.         struct capture_device *dev = o->dev;
  2176.         long        len = 0;
  2177.         long        my_timeout;
  2178.         if (o->noncapturing)
  2179.         {
  2180.                 debug_msg("read() illegal in non-capturing openn");
  2181.                 return -EPERM;
  2182.         }
  2183.         if (dev->streaming)
  2184.         {
  2185.                 debug_msg("Can't read() when streaming is onn");
  2186.                 return -EPERM;
  2187.         }
  2188.         capture_grab_frame(dev);/* does nothing if capture is in progress */
  2189.         if (!dev->ready_to_capture)
  2190.         {
  2191.                 debug_msg("Can't grab frames!n");
  2192.                 return 0;
  2193.         }
  2194.         my_timeout = HZ / 5;
  2195. #if LINUX_VERSION_CODE < KERNEL_VERSION(2,1,127)
  2196.         current->timeout = jiffies + my_timeout;
  2197. #endif
  2198.         while (len == 0)
  2199.         {
  2200.                 if (noblock)
  2201.                 {
  2202.                         if (!dev->capture_completed)
  2203.                                 return -EAGAIN;
  2204.                 }
  2205.                 else
  2206.                 {
  2207.                         /* watch out for race condition going to sleep! */
  2208.                         cli();
  2209.                         if (!dev->capture_completed)
  2210.                         {
  2211. debug_msg("read: wainting on new_video_framen");
  2212. #if LINUX_VERSION_CODE < KERNEL_VERSION(2,1,127)
  2213.                                 interruptible_sleep_on(&dev->new_video_frame);
  2214. #else
  2215.                                 my_timeout = interruptible_sleep_on_timeout(
  2216.                                         &dev->new_video_frame, my_timeout);
  2217. #endif
  2218. debug_msg("readL wakeup on new_video_timeoutn");
  2219.                         }
  2220.                         sti();
  2221.                 }
  2222. #if LINUX_VERSION_CODE < KERNEL_VERSION(2,1,127)
  2223.                 if (current->timeout <= jiffies)
  2224. #else
  2225.                 if (my_timeout == 0)
  2226. #endif
  2227.                 {
  2228. printk("Timeout on readn");
  2229.                         debug_msg("Timeout on readn");
  2230.                         break;
  2231.                 }
  2232.                 len = capture_read(dev, buf, count);
  2233.         }
  2234. #if LINUX_VERSION_CODE < KERNEL_VERSION(2,1,127)
  2235.         current->timeout = 0;
  2236. #endif
  2237.         //debug_msg("read %dn", (int)len);
  2238.         return len;
  2239. }
  2240. /*
  2241.  *        Remaining initialization of video decoder etc. This is only
  2242.  *        done when the device is successfully identified and registered.
  2243.  */
  2244. static int
  2245. v4l2_init_done(struct v4l2_device *v)
  2246. {
  2247.         struct capture_device *dev = (struct capture_device *)v;
  2248.         int        i;
  2249.         /*  Initialize video input array        */
  2250.         for (i = 0; i < KS_INPUT_COUNT; ++i)
  2251.         {
  2252.                 /*  Initialize video control properties        */
  2253.                 dev->source[i].control[VCTRL_BRIGHTNESS] =
  2254.                         capture_control[VCTRL_BRIGHTNESS].default_value;
  2255.                 dev->source[i].control[VCTRL_CONTRAST] =
  2256.                         capture_control[VCTRL_CONTRAST].default_value;
  2257.                 dev->source[i].control[VCTRL_SATURATION] =
  2258.                         capture_control[VCTRL_SATURATION].default_value;
  2259.                 dev->source[i].control[VCTRL_HUE] =
  2260.                         capture_control[VCTRL_HUE].default_value;
  2261.         }
  2262.         dev->source[KS_INPUT_COMPOSITE].input.index = KS_INPUT_COMPOSITE;
  2263.         strcpy(dev->source[KS_INPUT_COMPOSITE].input.name, "Composite");
  2264.         dev->source[KS_INPUT_COMPOSITE].input.type = V4L2_INPUT_TYPE_CAMERA;
  2265.         dev->source[KS_INPUT_COMPOSITE].input.capability = V4L2_INPUT_CAP_AUDIO;
  2266.         dev->source[KS_INPUT_COMPOSITE].input.assoc_audio = 0;
  2267.         dev->source[KS_INPUT_SVIDEO].input.index = KS_INPUT_SVIDEO;
  2268.         strcpy(dev->source[KS_INPUT_SVIDEO].input.name, "S-Video");
  2269.         dev->source[KS_INPUT_SVIDEO].input.type = V4L2_INPUT_TYPE_CAMERA;
  2270.         dev->source[KS_INPUT_SVIDEO].input.capability = 0;
  2271.         dev->source[KS_INPUT_SVIDEO].input.assoc_audio = 0;
  2272.         dev->source[KS_INPUT_TUNER].input.index = KS_INPUT_TUNER;
  2273.         strcpy(dev->source[KS_INPUT_TUNER].input.name, "Tuner");
  2274.         dev->source[KS_INPUT_TUNER].input.type = V4L2_INPUT_TYPE_TUNER;
  2275.         dev->source[KS_INPUT_TUNER].input.capability = V4L2_INPUT_CAP_AUDIO;
  2276.         dev->source[KS_INPUT_TUNER].input.assoc_audio = 0;
  2277.         dev->source[KS_INPUT_TUNER].tuner.input = KS_INPUT_TUNER;
  2278.         strcpy(dev->source[KS_INPUT_TUNER].tuner.name, "Tuner");
  2279.         dev->source[KS_INPUT_TUNER].tuner.capability = 0;
  2280.         dev->source[KS_INPUT_TUNER].tuner.rangelow = 552500 / 625;
  2281.         dev->source[KS_INPUT_TUNER].tuner.rangehigh = 8012500 / 625;
  2282.         dev->source[KS_INPUT_TUNER].tuner.rxsubchans = V4L2_TUNER_SUB_MONO;
  2283.         dev->source[KS_INPUT_TUNER].tuner.audmode = V4L2_TUNER_MODE_MONO;
  2284.         dev->source[KS_INPUT_TUNER].tuner.afc = 0;
  2285.         dev->source[KS_INPUT_TUNER].audio.audio = 0;
  2286.         strcpy(dev->source[KS_INPUT_TUNER].audio.name, "Audio");
  2287.         dev->source[KS_INPUT_TUNER].audio.capability = 0;
  2288.         dev->source[KS_INPUT_TUNER].audio.mode = 0;
  2289.         dev->source[KS_INPUT_YUV656_60HZ].input.index = KS_INPUT_YUV656_60HZ;
  2290.         strcpy(dev->source[KS_INPUT_YUV656_60HZ].input.name, "YUV656 60Hz");
  2291.         dev->source[KS_INPUT_YUV656_60HZ].input.type = V4L2_INPUT_TYPE_CAMERA;
  2292.         dev->source[KS_INPUT_YUV656_60HZ].input.capability = 0;
  2293.         dev->source[KS_INPUT_YUV656_60HZ].input.assoc_audio = 0;
  2294.         dev->source[KS_INPUT_YUV656_50HZ].input.index = KS_INPUT_YUV656_50HZ;
  2295.         strcpy(dev->source[KS_INPUT_YUV656_50HZ].input.name, "YUV656 50Hz");
  2296.         dev->source[KS_INPUT_YUV656_50HZ].input.type = V4L2_INPUT_TYPE_CAMERA;
  2297.         dev->source[KS_INPUT_YUV656_50HZ].input.capability = 0;
  2298.         dev->source[KS_INPUT_YUV656_50HZ].input.assoc_audio = 0;
  2299.         
  2300.         dev->standards = (1<<V4L2_STD_PAL)
  2301.                        | (1<<V4L2_STD_NTSC)
  2302.                        | (1<<V4L2_STD_SECAM);
  2303.         /*  BUG: get defaults from user somehow...  */
  2304.         set_video_input(dev, KS_INPUT_COMPOSITE);
  2305.         set_video_standard(dev, V4L2_STD_NTSC);
  2306.         /*  Capture parameters  */
  2307.         dev->capture.capability = V4L2_CAP_TIMEPERFRAME;
  2308.         dev->capture.capturemode = 0;
  2309.         dev->capture.extendedmode = 0;
  2310.         dev->capture.timeperframe = dev->frame_period;
  2311.         /*  Default capture dimensions        */
  2312.         dev->clientfmt.width = 704;
  2313.         dev->clientfmt.height = 240;
  2314.         dev->clientfmt.depth = 16;
  2315.         dev->clientfmt.pixelformat = V4L2_PIX_FMT_RGB565;
  2316.         dev->clientfmt.flags = 0;
  2317.         dev->clientfmt.bytesperline = 0;
  2318.         dev->clientfmt.sizeimage = 0;
  2319.         capture_new_format(dev);
  2320.         /* Default preview window */
  2321.         dev->window.x = 0;
  2322.         dev->window.y = 0;
  2323.         dev->window.width = 704;
  2324.         dev->window.height = 480;
  2325.         dev->window.chromakey = 0x000800;
  2326.         dev->window.clips = NULL;
  2327.         dev->window.clipcount = 0;
  2328.         /* Default preview framebuffer */
  2329.         dev->fbuf.capability = V4L2_FBUF_CAP_EXTERNOVERLAY
  2330.                              | V4L2_FBUF_CAP_CHROMAKEY
  2331.                              | V4L2_FBUF_CAP_SCALEUP
  2332.                              | V4L2_FBUF_CAP_SCALEDOWN;
  2333.         dev->fbuf.flags = V4L2_FBUF_FLAG_PRIMARY
  2334.                         | V4L2_FBUF_FLAG_OVERLAY
  2335.                         | V4L2_FBUF_FLAG_CHROMAKEY;
  2336.         dev->fbuf.base[0] = mgavideo_get_base( dev->mga );
  2337.         /* dev->fbuf.fmt = ? */
  2338.         return 0;
  2339. }
  2340. /*  =====================================================================
  2341.  *        The functions below this point are only called during loading
  2342.  *        and unloading of the driver.
  2343.  */
  2344. /*
  2345.  *        D E V I C E   I N I A L I Z A T I O N   R O U T I N E S
  2346.  *
  2347.  *        These routines locate and enable the hardware, and initialize
  2348.  *        the device structure. 
  2349.  */
  2350. static int
  2351. config_a_device(struct capture_device *dev)
  2352. {
  2353.         sprintf(dev->shortname, "capture%d", dev - capture);
  2354.         dev->mga = mgavideo_get();
  2355.         if( dev->mga == NULL )
  2356.                 return 0;
  2357.         device_initialize(dev);
  2358.         sprintf(dev->v.name, "V4L2 Video Capture Driver (%d)", dev-capture);
  2359.         dev->v.type = V4L2_TYPE_CAPTURE;
  2360.         dev->v.minor = unit_video[dev-capture];
  2361.         dev->v.open = v4l2_open;
  2362.         dev->v.close = v4l2_close;
  2363.         dev->v.read = v4l2_read;
  2364.         dev->v.write = v4l2_write;
  2365.         dev->v.ioctl = v4l2_ioctl;
  2366.         dev->v.mmap = v4l2_mmap;
  2367.         dev->v.poll = v4l2_poll;
  2368.         dev->v.initialize = v4l2_init_done;
  2369.         dev->v.priv = NULL;
  2370.         if(!( mgavideo_querycaps( dev->mga ) & MGAVIDEO_HAS_DECODER ))
  2371.         {
  2372.                 err_msg("Bad or unrecognized video decodern");
  2373. mgavideo_release(dev->mga);
  2374.                 return 0;/* failed */
  2375.         }
  2376.         return 1;
  2377. }
  2378. static void
  2379. unconfig_a_device(struct capture_device *dev)
  2380. {
  2381.         interrupt_disable(dev);
  2382.         capture_close(dev);
  2383.         /*  TODO: Unconfigure the device, free the I/O port, etc.  */
  2384.         mgavideo_release( dev->mga );
  2385.         if (dev->is_registered)
  2386.         {
  2387.                 v4l2_unregister_device((struct v4l2_device *)dev);
  2388.                 info_msg("Removed device %sn", dev->shortname);
  2389.         }
  2390.         memset(dev, 0, sizeof(capture[0]));
  2391. }
  2392. /*
  2393.  *        M O D U L E   I N I T   A N D   C L E A N U P
  2394.  */
  2395. int
  2396. init_module(void)
  2397. {
  2398.         int        i;
  2399.         for (i = 0; i < NBOARDS; ++i)
  2400.         {
  2401.                 memset(&capture[i], 0, sizeof(capture[0]));
  2402.                 if (!config_a_device(&capture[i]))
  2403.                 {
  2404.                         break;
  2405.                 }
  2406.                 if (v4l2_register_device(
  2407.                         (struct v4l2_device *)&capture[i]) != 0)
  2408.                 {
  2409.                         err_msg("Couldn't register the driver.n");
  2410.                         unconfig_a_device(&capture[i]);
  2411.                         return 0;
  2412.                 }
  2413.                 capture[i].is_registered = 1;
  2414.         }
  2415.         if (i == 0)
  2416.         {
  2417.                 err_msg("No devices found.n");
  2418.                 return -ENODEV;/* cleanup will not be called */
  2419.         }
  2420.         return 0;
  2421. }
  2422. void
  2423. cleanup_module(void)
  2424. {
  2425.         int        i;
  2426.         for (i = 0; i < NBOARDS; ++i)
  2427.                 unconfig_a_device(&capture[i]);
  2428. }