was read the article
array:23 [ "pii" => "S1665642314716086" "issn" => "16656423" "doi" => "10.1016/S1665-6423(14)71608-6" "estado" => "S300" "fechaPublicacion" => "2014-02-01" "aid" => "71608" "copyright" => "Universidad Nacional Autónoma de México" "copyrightAnyo" => "2014" "documento" => "article" "licencia" => "http://creativecommons.org/licenses/by-nc-nd/4.0/" "subdocumento" => "fla" "cita" => "Journal of Applied Research and Technology. 2014;12:80-6" "abierto" => array:3 [ "ES" => true "ES2" => true "LATM" => true ] "gratuito" => true "lecturas" => array:2 [ "total" => 1886 "formatos" => array:3 [ "EPUB" => 37 "HTML" => 1392 "PDF" => 457 ] ] "itemSiguiente" => array:18 [ "pii" => "S1665642314716098" "issn" => "16656423" "doi" => "10.1016/S1665-6423(14)71609-8" "estado" => "S300" "fechaPublicacion" => "2014-02-01" "aid" => "71609" "copyright" => "Universidad Nacional Autónoma de México" "documento" => "article" "licencia" => "http://creativecommons.org/licenses/by-nc-nd/4.0/" "subdocumento" => "fla" "cita" => "Journal of Applied Research and Technology. 2014;12:87-103" "abierto" => array:3 [ "ES" => true "ES2" => true "LATM" => true ] "gratuito" => true "lecturas" => array:2 [ "total" => 1237 "formatos" => array:3 [ "EPUB" => 33 "HTML" => 832 "PDF" => 372 ] ] "en" => array:11 [ "idiomaDefecto" => true "titulo" => "Intelligent Image Retrieval Techniques: A Survey" "tienePdf" => "en" "tieneTextoCompleto" => "en" "tieneResumen" => "en" "paginas" => array:1 [ 0 => array:2 [ "paginaInicial" => "87" "paginaFinal" => "103" ] ] "contieneResumen" => array:1 [ "en" => true ] "contieneTextoCompleto" => array:1 [ "en" => true ] "contienePdf" => array:1 [ "en" => true ] "resumenGrafico" => array:2 [ "original" => 0 "multimedia" => array:7 [ "identificador" => "fig0005" "etiqueta" => "Figure 1" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr1.jpeg" "Alto" => 423 "Ancho" => 933 "Tamanyo" => 61839 ] ] "descripcion" => array:1 [ "en" => "<p id="spar0005" class="elsevierStyleSimplePara elsevierViewall">General image retrieval process</p>" ] ] ] "autores" => array:1 [ 0 => array:2 [ "autoresLista" => "Mussarat Yasmin, Sajjad Mohsin, Muhammad Sharif" "autores" => array:3 [ 0 => array:2 [ "nombre" => "Mussarat" "apellidos" => "Yasmin" ] 1 => array:2 [ "nombre" => "Sajjad" "apellidos" => "Mohsin" ] 2 => array:2 [ "nombre" => "Muhammad" "apellidos" => "Sharif" ] ] ] ] ] "idiomaDefecto" => "en" "EPUB" => "https://multimedia.elsevier.es/PublicationsMultimediaV1/item/epub/S1665642314716098?idApp=UINPBA00004N" "url" => "/16656423/0000001200000001/v2_201505081640/S1665642314716098/v2_201505081640/en/main.assets" ] "itemAnterior" => array:18 [ "pii" => "S1665642314716074" "issn" => "16656423" "doi" => "10.1016/S1665-6423(14)71607-4" "estado" => "S300" "fechaPublicacion" => "2014-02-01" "aid" => "71607" "copyright" => "Universidad Nacional Autónoma de México" "documento" => "article" "licencia" => "http://creativecommons.org/licenses/by-nc-nd/4.0/" "subdocumento" => "fla" "cita" => "Journal of Applied Research and Technology. 2014;12:72-9" "abierto" => array:3 [ "ES" => true "ES2" => true "LATM" => true ] "gratuito" => true "lecturas" => array:2 [ "total" => 970 "formatos" => array:3 [ "EPUB" => 34 "HTML" => 454 "PDF" => 482 ] ] "en" => array:11 [ "idiomaDefecto" => true "titulo" => "Prefeasibility Study of a Solar Power Plant Project and Optimization of a Meteorological Station Performance" "tienePdf" => "en" "tieneTextoCompleto" => "en" "tieneResumen" => array:2 [ 0 => "en" 1 => "es" ] "paginas" => array:1 [ 0 => array:2 [ "paginaInicial" => "72" "paginaFinal" => "79" ] ] "contieneResumen" => array:2 [ "en" => true "es" => true ] "contieneTextoCompleto" => array:1 [ "en" => true ] "contienePdf" => array:1 [ "en" => true ] "resumenGrafico" => array:2 [ "original" => 0 "multimedia" => array:7 [ "identificador" => "fig0010" "etiqueta" => "Figure 2" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr2.jpeg" "Alto" => 665 "Ancho" => 807 "Tamanyo" => 58693 ] ] "descripcion" => array:1 [ "en" => "<p id="spar0010" class="elsevierStyleSimplePara elsevierViewall">Difference between measured DNI and satellite imagery</p>" ] ] ] "autores" => array:1 [ 0 => array:2 [ "autoresLista" => "W. Derouich, M. Besbes, J.D. Olivencia" "autores" => array:3 [ 0 => array:2 [ "nombre" => "W." "apellidos" => "Derouich" ] 1 => array:2 [ "nombre" => "M." "apellidos" => "Besbes" ] 2 => array:2 [ "nombre" => "J.D." "apellidos" => "Olivencia" ] ] ] ] ] "idiomaDefecto" => "en" "EPUB" => "https://multimedia.elsevier.es/PublicationsMultimediaV1/item/epub/S1665642314716074?idApp=UINPBA00004N" "url" => "/16656423/0000001200000001/v2_201505081640/S1665642314716074/v2_201505081640/en/main.assets" ] "en" => array:15 [ "idiomaDefecto" => true "titulo" => "Projector Calibration for Pattern Projection Systems" "tieneTextoCompleto" => true "paginas" => array:1 [ 0 => array:2 [ "paginaInicial" => "80" "paginaFinal" => "86" ] ] "autores" => array:1 [ 0 => array:3 [ "autoresLista" => "I. Din, H. Anwar, I. Syed, H. Zafar, L. Hasan" "autores" => array:5 [ 0 => array:4 [ "nombre" => "I." "apellidos" => "Din" "email" => array:1 [ 0 => "irfan@incheon.ac.kr" ] "referencia" => array:1 [ 0 => array:2 [ "etiqueta" => "<span class="elsevierStyleSup">1</span>" "identificador" => "aff0005" ] ] ] 1 => array:3 [ "nombre" => "H." "apellidos" => "Anwar" "referencia" => array:1 [ 0 => array:2 [ "etiqueta" => "<span class="elsevierStyleSup">2</span>" "identificador" => "aff0010" ] ] ] 2 => array:3 [ "nombre" => "I." "apellidos" => "Syed" "referencia" => array:1 [ 0 => array:2 [ "etiqueta" => "<span class="elsevierStyleSup">1</span>" "identificador" => "aff0005" ] ] ] 3 => array:3 [ "nombre" => "H." "apellidos" => "Zafar" "referencia" => array:1 [ 0 => array:2 [ "etiqueta" => "<span class="elsevierStyleSup">3</span>" "identificador" => "aff0015" ] ] ] 4 => array:3 [ "nombre" => "L." "apellidos" => "Hasan" "referencia" => array:1 [ 0 => array:2 [ "etiqueta" => "<span class="elsevierStyleSup">3</span>" "identificador" => "aff0015" ] ] ] ] "afiliaciones" => array:3 [ 0 => array:3 [ "entidad" => "Department of Electronics Engineering, Incheon National University, Incheon, South Korea." "etiqueta" => "1" "identificador" => "aff0005" ] 1 => array:3 [ "entidad" => "PhD School of Informatics, Vienna University of Technology (TU Vienna), Vienna, Austria," "etiqueta" => "2" "identificador" => "aff0010" ] 2 => array:3 [ "entidad" => "Department of Computer Systems Engineering, UET Peshawar, Peshawar, Pakistan." "etiqueta" => "3" "identificador" => "aff0015" ] ] ] ] "resumenGrafico" => array:2 [ "original" => 0 "multimedia" => array:7 [ "identificador" => "fig0005" "etiqueta" => "Figure 1" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr1.jpeg" "Alto" => 372 "Ancho" => 929 "Tamanyo" => 57008 ] ] "descripcion" => array:1 [ "en" => "<p id="spar0005" class="elsevierStyleSimplePara elsevierViewall">Special chessboard setup</p>" ] ] ] "textoCompleto" => "<span class="elsevierStyleSections"><span id="sec0005" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleLabel">1</span><span class="elsevierStyleSectionTitle" id="sect0015">Introduction</span><p id="par0005" class="elsevierStylePara elsevierViewall">The 3D shape measurement and reconstruction has become one of the hottest fields in computer vision and robotics during the past few years. Researchers from various fields like computer vision, robotics, mechatronics, intelligent manufacturing systems and applied optics have worked enormously to find more robust, less complex and faster techniques [<a class="elsevierStyleCrossRef" href="#bib0005">1</a>, <a class="elsevierStyleCrossRef" href="#bib0010">2</a>]. These techniques are being adapted for medical, rapid prototyping, defense and other numerous industries. Based on their characteristics, these techniques are divided into two subgroups. The first one includes the use of stereovision system. This system makes use of two cameras to measure and recover the 3D geometry. The images of the objects are taken by both cameras from different positions and orientations simultaneously. Triangulation is then used to measure the 3D geometry. The bottle neck in the stereovision system is the correspondence. That is, to find the corresponding points in the projection of the scene in one camera to the points in the other camera. To cop with the correspondence problem, various image processing techniques are used. The correspondence problem is not involved in the second method. In this method the projector projects a structured light on a 3D geometry which is captured by a single camera. During the past few years a lot of work has been done on this technique and many people have come up with some very diverse ideas. This technique of reconstructing and measuring the 3D geometry is faster, robust and inexpensive; especially these days the decreasing prices of projectors and CCD camera has made it easy to have a 3D measurement system. But before doing any re-construction and measurement process the projector and the camera system must be calibrated. Researchers have investigated camera calibration deeply thus, there are different algorithms for this purpose.</p><p id="par0010" class="elsevierStylePara elsevierViewall">For projectors there are two kinds of calibrations: The photometric calibration and the geometric calibration. The photometric calibration deals with the intensity values correspondence of the projected images and the images captured by the camera. This research focuses on the geometric calibration of the projector which deals with the calculation of the intrinsic and extrinsic parameters of the projector. Many researchers have worked on the geometric calibration of the projector.<a name="p81"></a></p><p id="par0015" class="elsevierStylePara elsevierViewall">Zhang and Huang <a class="elsevierStyleCrossRef" href="#bib0015">[3]</a> came up with the idea of capturing images with a projector. The projector is used to capture images like a camera, in this way the projector can be calibrated like a camera. The main difficulty lies in making the special setup of white and red light illumination. Apart from this detailed calculations, to find the absolute phase map make it a math heavy and time consuming method. Li and Shi <a class="elsevierStyleCrossRef" href="#bib0020">[4]</a> also proposed the calculation of the DMD image i.e., the image taken by the projector and make use of vertical and horizontal fringe patterns to recover the points seen by the projector, thus making it a time consuming method too. Gao and Wang <a class="elsevierStyleCrossRef" href="#bib0025">[5]</a> have done the projector calibration using homographies. That is a nice idea too but the problem is with the red and blue pattern they used like Zhang and Huang. They also use a very big chessboard pattern for which a camera with wide FOV is needed. A wide FOV results in the image distortion.</p><p id="par0020" class="elsevierStylePara elsevierViewall">Because much work has been done, by researchers on camera calibration, in this work the projector calibration for the 3D measurement system is done based on the principles of the camera calibration.</p><p id="par0025" class="elsevierStylePara elsevierViewall">The rest of the paper is arranged as follows; <a class="elsevierStyleCrossRef" href="#sec0010">Section 2</a> describes the basic concept of camera calibration and how is it done in OpenCV. <a class="elsevierStyleCrossRef" href="#sec0025">Section 3</a> sheds some light on projector calibration. <a class="elsevierStyleCrossRef" href="#sec0030">Section 4</a> gives the 3D shape measurement system setup. <a class="elsevierStyleCrossRef" href="#sec0035">Section 5</a> gives the results of experiments and their verification in OpenCV and finally the conclusion and future work are given.</p></span><span id="sec0010" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleLabel">2</span><span class="elsevierStyleSectionTitle" id="sect0020">System model</span><span id="sec0015" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleLabel">2.1</span><span class="elsevierStyleSectionTitle" id="sect0025">Problem statement</span><p id="par0030" class="elsevierStylePara elsevierViewall">Let us consider a point (<span class="elsevierStyleItalic">m</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">pro</span></span>, <span class="elsevierStyleItalic">n</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">pro</span></span>)<span class="elsevierStyleSup"><span class="elsevierStyleItalic">T</span></span> in the projector's plane. This point is projected on an unknown 3D point <span class="elsevierStyleItalic">(X</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">w</span></span>, <span class="elsevierStyleItalic">Y</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">w</span></span>, <span class="elsevierStyleItalic">Z</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">w</span></span><span class="elsevierStyleItalic">)</span><span class="elsevierStyleSup"><span class="elsevierStyleItalic">T</span></span> in the world plane. The camera then takes the image of this point and as a result the point (<span class="elsevierStyleItalic">m</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cam</span></span>, <span class="elsevierStyleItalic">n</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cam</span></span>)<span class="elsevierStyleSup"><span class="elsevierStyleItalic">T</span></span> is obtained in the camera's image plane. It can be observed that the projector acts as a reverse camera. The camera takes an image of the unknown point on the screen while the projector projects the known point onto an unknown point. Here the divided and conquer rule can be applied to solve the problem. The whole process can be divided into two parts.<ul class="elsevierStyleList" id="lis0005"><li class="elsevierStyleListItem" id="lsti0005"><span class="elsevierStyleLabel">1)</span><p id="par0035" class="elsevierStylePara elsevierViewall">The projector to the screen</p></li><li class="elsevierStyleListItem" id="lsti0010"><span class="elsevierStyleLabel">2)</span><p id="par0040" class="elsevierStylePara elsevierViewall">The screen to the camera</p></li></ul>The medium between the camera and the projector will be a chessboard that is attached to a plane and light sheet of plastic. Here we use a chessboard an 8<span class="elsevierStyleHsp" style=""></span>×<span class="elsevierStyleHsp" style=""></span>8 black and white squares. The size of each black or white square of chessboard is 20<span class="elsevierStyleHsp" style=""></span>mm<span class="elsevierStyleHsp" style=""></span>×<span class="elsevierStyleHsp" style=""></span>20<span class="elsevierStyleHsp" style=""></span>mm. For calibration, we used the OpenCV, which is an open source computer vision library. The camera calibration process in OpenCV follows the Zhang's method <a class="elsevierStyleCrossRef" href="#bib0020">[4]</a>. A brief description of this method is given below.</p></span><span id="sec0020" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleLabel">2.2</span><span class="elsevierStyleSectionTitle" id="sect0030">Zhang's method</span><p id="par0045" class="elsevierStylePara elsevierViewall">A lot of work has been done on camera calibration during the past decades. The latest method that is used by most of the researchers is Zhang's. This method uses pinhole camera model, which has focal length, pixel size, and skews factor as intrinsic parameters and the translation and rotation of the camera reference frame with respect to the world reference frame as extrinsic parameters. The calibration is simply a process that finds the intrinsic and extrinsic parameters of the camera. A brief description of Zhang's method follows.</p><p id="par0050" class="elsevierStylePara elsevierViewall">This method uses a regular shaped object e.g., a chessboard pattern. Let <span class="elsevierStyleItalic">q</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cam</span></span> = <span class="elsevierStyleItalic">(m</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cam</span></span>, <span class="elsevierStyleItalic">n</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cam</span></span>, <span class="elsevierStyleItalic">1)</span><span class="elsevierStyleSup"><span class="elsevierStyleItalic">T</span></span> be the 2D point in the image plane and <span class="elsevierStyleItalic">Q</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cam</span></span><span class="elsevierStyleItalic">(X</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cam</span></span>,<span class="elsevierStyleItalic">Y</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cam</span></span>,<span class="elsevierStyleItalic">Z</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cam</span></span>,<span class="elsevierStyleItalic">1)</span><span class="elsevierStyleSup"><span class="elsevierStyleItalic">T</span></span> be the corresponding 3D point in the screen frame of reference. According to the pinhole camera model<elsevierMultimedia ident="eq0005"></elsevierMultimedia>In <a class="elsevierStyleCrossRef" href="#eq0005">Equation 1</a>. <span class="elsevierStyleItalic">M</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cam</span></span> is the set of intrinsic parameters and <span class="elsevierStyleItalic">(R</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cam</span></span>/<span class="elsevierStyleItalic">t</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cam</span></span><span class="elsevierStyleItalic">)</span> is the set of extrinsic parameters. ‘<span class="elsevierStyleItalic">s</span>’ is an arbitrary scaling factor. The set of intrinsic parameters is given as:<a name="p82"></a><elsevierMultimedia ident="eq0010"></elsevierMultimedia>where <span class="elsevierStyleItalic">f</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">x</span></span> and <span class="elsevierStyleItalic">f</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">y</span></span> are the components of focal length in <span class="elsevierStyleItalic">x</span> and <span class="elsevierStyleItalic">y</span> co-ordinates. (<span class="elsevierStyleItalic">c</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">x</span></span>,<span class="elsevierStyleItalic">c</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">y</span></span>) are the co-ordinates of pricipal focus. It is assumed that the model plane is at <span class="elsevierStyleItalic">Z</span><span class="elsevierStyleHsp" style=""></span>=<span class="elsevierStyleHsp" style=""></span>0, hence, <a class="elsevierStyleCrossRef" href="#eq0005">Equation 1</a> becomes:<elsevierMultimedia ident="eq0015"></elsevierMultimedia>where [r<span class="elsevierStyleInf">1c</span> r<span class="elsevierStyleInf">2c</span> t<span class="elsevierStyleInf">c</span>] is the extrinsic parameters matrix. More details of the method can be found in <a class="elsevierStyleCrossRef" href="#bib0020">[4]</a>.</p><p id="par0055" class="elsevierStylePara elsevierViewall">The summary of the method is as follows:<ul class="elsevierStyleList" id="lis0010"><li class="elsevierStyleListItem" id="lsti0015"><span class="elsevierStyleLabel">1.</span><p id="par0060" class="elsevierStylePara elsevierViewall">A regular-shaped object like a chessboard is attached to a flat and smooth sheet of plastic.</p></li><li class="elsevierStyleListItem" id="lsti0020"><span class="elsevierStyleLabel">2.</span><p id="par0065" class="elsevierStylePara elsevierViewall">Images of the object are taken at different positions and orientations.</p></li><li class="elsevierStyleListItem" id="lsti0025"><span class="elsevierStyleLabel">3.</span><p id="par0070" class="elsevierStylePara elsevierViewall">The feature points in the image are detected by a special function in OpenCV and stored in a matrix called the ‘image points’.</p></li><li class="elsevierStyleListItem" id="lsti0030"><span class="elsevierStyleLabel">4.</span><p id="par0075" class="elsevierStylePara elsevierViewall">The object points are also stored in another matrix called the ‘object points’.</p></li><li class="elsevierStyleListItem" id="lsti0035"><span class="elsevierStyleLabel">5.</span><p id="par0080" class="elsevierStylePara elsevierViewall">Both matrices are provided to the main calibration function in OpenCV to find the intrinsic and distortion parameters of the camera.</p></li><li class="elsevierStyleListItem" id="lsti0040"><span class="elsevierStyleLabel">6.</span><p id="par0085" class="elsevierStylePara elsevierViewall">The set of extrinsic parameters of the camera is then determined with the help of the intrinsic parameters.</p></li></ul></p></span></span><span id="sec0025" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleLabel">3</span><span class="elsevierStyleSectionTitle" id="sect0035">Projector calibration</span><p id="par0090" class="elsevierStylePara elsevierViewall">Given that the projector can be considered as the inverse of a camera, the pinhole model also applies to the projector. The difference is that the camera captures the image of the screen while the projector projects the image to the screen. The projector can also be calibrated as the camera with the help of Zhang's method. If the object points of the pattern that is projected by the projector on the screen and the image points of the same pattern are known, then the calibration function in OpenCV can calculate the intrinsic parameters of the projector. The main problem here is that the object points are unknown. To find the object points the calibrated camera can be used. The camera can calculate the 2D object points (as we considered <span class="elsevierStyleItalic">Z</span><span class="elsevierStyleHsp" style=""></span>=<span class="elsevierStyleHsp" style=""></span>0) on the screen and the image points can be directly read from the image that the projector is projecting. According to pinhole camera model, <elsevierMultimedia ident="eq0020"></elsevierMultimedia>In <a class="elsevierStyleCrossRef" href="#eq0020">Equation 4</a><span class="elsevierStyleItalic">q</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">pro</span></span> can be directly read from the image that the projector is projecting. For the measurement of the object points <span class="elsevierStyleItalic">Q</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">pro</span></span> the calibrated camera can be used in the following manner.<a name="p83"></a></p><p id="par0095" class="elsevierStylePara elsevierViewall">A special set up of chessboard is used. The printed chessboard is attached to a flat sheet of plastic and then covered with a thin sheet of paper as shown in <a class="elsevierStyleCrossRef" href="#fig0005">Figure 1</a>. The printed sheet is attached to the plastic sheet from the upper side so that it can be moved freely.</p><elsevierMultimedia ident="fig0005"></elsevierMultimedia><p id="par0100" class="elsevierStylePara elsevierViewall">For each position two images are taken. The first image is that of the printed chessboard. For the second image the printed chessboard is covered with the sheet and the chessboard image from computer, is projected by the projector. The image of the projected chessboard is then taken with a camera. The whole process is summarized in the following steps:<ul class="elsevierStyleList" id="lis0015"><li class="elsevierStyleListItem" id="lsti0045"><span class="elsevierStyleLabel">1.</span><p id="par0105" class="elsevierStylePara elsevierViewall">Take the image of the printed chessboard.</p></li><li class="elsevierStyleListItem" id="lsti0050"><span class="elsevierStyleLabel">2.</span><p id="par0110" class="elsevierStylePara elsevierViewall">Cover the printed chessboard with the sheet and project the chessboard from the PC to take the second image.</p></li><li class="elsevierStyleListItem" id="lsti0055"><span class="elsevierStyleLabel">3.</span><p id="par0115" class="elsevierStylePara elsevierViewall">Calculate the extrinsic parameters using the printed chessboard with the help of the calibrated camera.</p></li><li class="elsevierStyleListItem" id="lsti0060"><span class="elsevierStyleLabel">4.</span><p id="par0120" class="elsevierStylePara elsevierViewall">Calculate the corners of the projected chessboard with the help of the extrinsic and intrinsic parameters of the camera and store in the ‘object points’ matrix.</p></li><li class="elsevierStyleListItem" id="lsti0065"><span class="elsevierStyleLabel">5.</span><p id="par0125" class="elsevierStylePara elsevierViewall">Detect the corners in the image of the chessboard that is projected by the projector and store in the ‘image points’ matrix.</p></li><li class="elsevierStyleListItem" id="lsti0070"><span class="elsevierStyleLabel">6.</span><p id="par0130" class="elsevierStylePara elsevierViewall">Move to the next position and repeat steps 1 to 5.</p></li><li class="elsevierStyleListItem" id="lsti0075"><span class="elsevierStyleLabel">7.</span><p id="par0135" class="elsevierStylePara elsevierViewall">Having enough image and object points, feed both matrices to the calibration function of OpenCV to calculate the intrinsic parameters and distortion parameters.</p></li></ul>When the chessboard is projected, the camera takes the image of that projected chessboard. Assuming that the camera is calibrated and the extrinsic parameters are also known in Step 3, according to <a class="elsevierStyleCrossRef" href="#eq0005">Equation 1</a>:<elsevierMultimedia ident="eq0025"></elsevierMultimedia>and according to <a class="elsevierStyleCrossRef" href="#eq0020">(4)</a><elsevierMultimedia ident="eq0030"></elsevierMultimedia>when the projector projects and the camera takes the image of the same projected chessboard then:<elsevierMultimedia ident="eq0035"></elsevierMultimedia>hence, <a class="elsevierStyleCrossRef" href="#eq0005">Equation 1</a> can be written as<elsevierMultimedia ident="eq0040"></elsevierMultimedia>Because the intrinsic and extrinsic parameters of the camera are known and also the corresponding image points can be detected from the captured image of the projected chessboard, <a class="elsevierStyleCrossRef" href="#eq0040">Equation 5</a> can be rearranged<elsevierMultimedia ident="eq0045"></elsevierMultimedia>or <a class="elsevierStyleCrossRef" href="#eq0045">Equation 6</a> can be rewritten as [8]<a name="p84"></a><elsevierMultimedia ident="eq0050"></elsevierMultimedia>Once the object points of the projected chessboard are known, the image points are directly read from the pixel location of the image to be projected and then both the image points and the object points are provided to the calibration function of OpenCV to calculate the intrinsic and distortion parameters of the projector.</p></span><span id="sec0030" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleLabel">4</span><span class="elsevierStyleSectionTitle" id="sect0040">Arrangement of 3D shape measurement system</span><p id="par0140" class="elsevierStylePara elsevierViewall">At this stage the intrinsic and distortion parameters of both the camera and the projector are known, this is the exact stage of setting up the 3D shape measurement system. To do this, the relative position of camera and projector need to be set with respect to a world coordinate system. This is done with the help of the same chessboard pattern. The projector is placed perpendicular to the screen and the chessboard is projected on the screen. The upper left corner of the projected chessboard is set as the origin of the screen reference plane. The <span class="elsevierStyleItalic">x-y</span> axes are on the plane and z-axis is perpendicular to the plane. The origin is selected due to the fact that OpenCV start detecting the corners from that corner. The set up of the system is shown in <a class="elsevierStyleCrossRef" href="#fig0010">Figure 2</a> along with the <span class="elsevierStyleItalic">x,y</span> and z axes of the camera, projector and screen reference planes and transformations between these planes. <a class="elsevierStyleCrossRef" href="#fig0020">Figure 4</a> shows the setup used in this research.</p><elsevierMultimedia ident="fig0010"></elsevierMultimedia></span><span id="sec0035" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleLabel">5</span><span class="elsevierStyleSectionTitle" id="sect0045">Experiments, results and verifications</span><p id="par0145" class="elsevierStylePara elsevierViewall">In this work the IDS uEye ® CCD camera was used. The resolution of this camera is 1024<span class="elsevierStyleHsp" style=""></span>×<span class="elsevierStyleHsp" style=""></span>768. The LCD projector used is EPSON EB-1735W with a resolution of 1024<span class="elsevierStyleHsp" style=""></span>×<span class="elsevierStyleHsp" style=""></span>768. The set of intrinsic and extrinsic parameters of the camera and projector are given.</p><p id="par0150" class="elsevierStylePara elsevierViewall">Camera intrinsic and extrinsic parameters</p><p id="par0155" class="elsevierStylePara elsevierViewall">Intrinsic parameters:<elsevierMultimedia ident="eq0055"></elsevierMultimedia>Extrinsic parameters:<elsevierMultimedia ident="eq0060"></elsevierMultimedia></p><p id="par0160" class="elsevierStylePara elsevierViewall">Projector intrinsic and extrinsic parameters</p><p id="par0165" class="elsevierStylePara elsevierViewall">Intrinsic parameters:<elsevierMultimedia ident="eq0065"></elsevierMultimedia>Extrinsic parameters:<elsevierMultimedia ident="eq0070"></elsevierMultimedia>Transformation matrix from camera to screen:<elsevierMultimedia ident="eq0075"></elsevierMultimedia><a name="p85"></a></p><p id="par0170" class="elsevierStylePara elsevierViewall">Transformation matrix from projector to screen:<elsevierMultimedia ident="eq0080"></elsevierMultimedia></p><p id="par0175" class="elsevierStylePara elsevierViewall">A 3D to 2D point projection method is used for the error analysis of the proposed method. This method of error analysis is also known as the re-projection error analysis. In this method known-object points such as the corners of a chessboard are projected onto the image plane by applying the known intrinsic and extrinsic parameters of the projector. These are called the “calculated image points”. Also the image points are read directly from the image to be projected by the projector. These points are called the “measured image points”. The corners of the projected chessboard are measured in two different ways. They are measured with calibrated camera and with a graph paper. These measured corners are the “measured object points”. The measured object points and the calibrated projector are then used to calculate the image points according to the following equation:<elsevierMultimedia ident="eq0085"></elsevierMultimedia>The whole scenario is shown in <a class="elsevierStyleCrossRef" href="#fig0015">Figure 3</a>. Let <span class="elsevierStyleItalic">q</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cal</span></span><span class="elsevierStyleItalic">(m</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cal</span></span>,<span class="elsevierStyleItalic">n</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">cal</span></span><span class="elsevierStyleItalic">)</span> be the calculated image point according to <a class="elsevierStyleCrossRef" href="#eq0085">Equation 8</a> and <span class="elsevierStyleItalic">q</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">measured</span></span><span class="elsevierStyleItalic">(m</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">measured</span></span>,<span class="elsevierStyleItalic">n</span><span class="elsevierStyleInf"><span class="elsevierStyleItalic">measured</span></span><span class="elsevierStyleItalic">)</span> be the point measured i.e., directly read from the chessboard image to be projected by the projector.</p><elsevierMultimedia ident="fig0015"></elsevierMultimedia><elsevierMultimedia ident="fig0020"></elsevierMultimedia><p id="par0180" class="elsevierStylePara elsevierViewall">Then:<elsevierMultimedia ident="eq0090"></elsevierMultimedia></p><p id="par0185" class="elsevierStylePara elsevierViewall">The steps for error analysis are shown below.<ul class="elsevierStyleList" id="lis0020"><li class="elsevierStyleListItem" id="lsti0080"><span class="elsevierStyleLabel">1.</span><p id="par0190" class="elsevierStylePara elsevierViewall">Project an image of a regular chessboard pattern.</p></li><li class="elsevierStyleListItem" id="lsti0085"><span class="elsevierStyleLabel">2.</span><p id="par0195" class="elsevierStylePara elsevierViewall">Read the corners of the image to be projected. These are the measured image points.</p></li><li class="elsevierStyleListItem" id="lsti0090"><span class="elsevierStyleLabel">3.</span><p id="par0200" class="elsevierStylePara elsevierViewall">Take the image of the projected chessboard with a calibrated camera.</p></li><li class="elsevierStyleListItem" id="lsti0095"><span class="elsevierStyleLabel">4.</span><p id="par0205" class="elsevierStylePara elsevierViewall">Detect and measure the corners of the projected chessboard. These are the object points measured with camera.</p></li><li class="elsevierStyleListItem" id="lsti0100"><span class="elsevierStyleLabel">5.</span><p id="par0210" class="elsevierStylePara elsevierViewall">Now detect and measure the corners of the same projected chessboard with a graph paper.</p></li><li class="elsevierStyleListItem" id="lsti0105"><span class="elsevierStyleLabel">6.</span><p id="par0215" class="elsevierStylePara elsevierViewall">Use <a class="elsevierStyleCrossRef" href="#eq0085">Equation 8</a> to calculate the image points using object points from Step 4 and then use <a class="elsevierStyleCrossRef" href="#eq0090">Equation 9</a> to calculate <span class="elsevierStyleItalic">Δm</span> and <span class="elsevierStyleItalic">Δn</span>.</p></li><li class="elsevierStyleListItem" id="lsti0110"><span class="elsevierStyleLabel">7.</span><p id="par0220" class="elsevierStylePara elsevierViewall">Use <a class="elsevierStyleCrossRef" href="#eq0085">Equation 8</a> to calculate the image points using object points from Step 5 and then use <a class="elsevierStyleCrossRef" href="#eq0090">Equation 9</a> to calculate <span class="elsevierStyleItalic">Δm</span> and <span class="elsevierStyleItalic">Δn</span>.</p></li></ul></p><p id="par0225" class="elsevierStylePara elsevierViewall">The error analysis graphs are shown below. <a class="elsevierStyleCrossRef" href="#fig0025">Figure 5</a> shows the error analysis graph from Step 6. In <a class="elsevierStyleCrossRef" href="#fig0030">Figure 6</a> the error analysis graph shows the result from step 7.<a name="p86"></a></p><elsevierMultimedia ident="fig0025"></elsevierMultimedia><elsevierMultimedia ident="fig0030"></elsevierMultimedia><p id="par0230" class="elsevierStylePara elsevierViewall">For comparison, the re-projection error of a typical CCD camera calibration is shown in <a class="elsevierStyleCrossRef" href="#fig0035">Figure 7</a> which shows that re-projection error of the projector calibration is at an acceptable level The object points used for projector calibration are measured with a calibrated camera. This measurement, most likely contains the calibration errors of the camera. The re-projection error of a typical CCD camera calibration is shown in <a class="elsevierStyleCrossRef" href="#fig0035">Figure 7</a>. If an object point is measure with this calibrated camera then the errors, which occurred during the calibration of the camera, will be propagated to this measurement resulting in less accuracy. Therefore, to avoid this situation an un-calibrated camera must assist the projector calibration process.</p><elsevierMultimedia ident="fig0035"></elsevierMultimedia></span><span id="sec0040" class="elsevierStyleSection elsevierViewall"><span class="elsevierStyleLabel">6</span><span class="elsevierStyleSectionTitle" id="sect0050">Conclusion</span><p id="par0235" class="elsevierStylePara elsevierViewall">In this work a 3D shape measurement system was developed with the help of a calibrated camera and a projector. The projector is considered as an inverse camera and is calibrated in a similar way as the camera. This calibration of the projector is done with the help of a specially designed setup of chessboard and a calibrated camera. The intrinsic and extrinsic parameters of the projector are calculated and then error analysis is done with OpenCV. For comparison, the re-projection error of a typical CCD camera was shown too. At this stage the 3D shape measurement system has been setup and the verified parameters of the camera and the projector are known.</p><p id="par0240" class="elsevierStylePara elsevierViewall">A possible future contribution will be to use an un-calibrated camera for projector calibration so that the errors from the camera calibration are prevented from being induced in the projector calibration process.</p></span></span>" "textoCompletoSecciones" => array:1 [ "secciones" => array:9 [ 0 => array:3 [ "identificador" => "xres498801" "titulo" => "Abstract" "secciones" => array:1 [ 0 => array:1 [ "identificador" => "abst0005" ] ] ] 1 => array:2 [ "identificador" => "xpalclavsec520321" "titulo" => "Keywords" ] 2 => array:2 [ "identificador" => "sec0005" "titulo" => "Introduction" ] 3 => array:3 [ "identificador" => "sec0010" "titulo" => "System model" "secciones" => array:2 [ 0 => array:2 [ "identificador" => "sec0015" "titulo" => "Problem statement" ] 1 => array:2 [ "identificador" => "sec0020" "titulo" => "Zhang's method" ] ] ] 4 => array:2 [ "identificador" => "sec0025" "titulo" => "Projector calibration" ] 5 => array:2 [ "identificador" => "sec0030" "titulo" => "Arrangement of 3D shape measurement system" ] 6 => array:2 [ "identificador" => "sec0035" "titulo" => "Experiments, results and verifications" ] 7 => array:2 [ "identificador" => "sec0040" "titulo" => "Conclusion" ] 8 => array:1 [ "titulo" => "References" ] ] ] "pdfFichero" => "main.pdf" "tienePdf" => true "PalabrasClave" => array:1 [ "en" => array:1 [ 0 => array:4 [ "clase" => "keyword" "titulo" => "Keywords" "identificador" => "xpalclavsec520321" "palabras" => array:3 [ 0 => "calibration" 1 => "extrinsic parameters" 2 => "intrinsic parameters" ] ] ] ] "tieneResumen" => true "resumen" => array:1 [ "en" => array:2 [ "titulo" => "Abstract" "resumen" => "<span id="abst0005" class="elsevierStyleSection elsevierViewall"><p id="spar0040" class="elsevierStyleSimplePara elsevierViewall">In this paper we proposed a method for geometric calibration of a projector. This method makes use of a calibrated camera to calibrate the projector. Since the projector works inversely with a camera i.e., it projects the image instead of capturing it, so it can be considered as a reverse camera. The projector is calibrated with the help of a calibrated camera using two types of chessboard, a printed chessboard and a projected chessboard by the projector. The object points of the projected chessboard pattern are measured with the help of calibrated camera and the image points are directly acquired from the chessboard pattern to be projected. Then using these object points and image points the projector is calibrated. Once the projector calibration is done, the transformation matrices (from projector to screen, from camera to screen and from camera to projector) are determined which are used for the reconstruction of the 3D geometry.</p></span>" ] ] "multimedia" => array:25 [ 0 => array:7 [ "identificador" => "fig0005" "etiqueta" => "Figure 1" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr1.jpeg" "Alto" => 372 "Ancho" => 929 "Tamanyo" => 57008 ] ] "descripcion" => array:1 [ "en" => "<p id="spar0005" class="elsevierStyleSimplePara elsevierViewall">Special chessboard setup</p>" ] ] 1 => array:7 [ "identificador" => "fig0010" "etiqueta" => "Figure 2" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr2.jpeg" "Alto" => 627 "Ancho" => 515 "Tamanyo" => 42418 ] ] "descripcion" => array:1 [ "en" => "<p id="spar0010" class="elsevierStyleSimplePara elsevierViewall">Reference planes and transformations of camera, projector and screen</p>" ] ] 2 => array:7 [ "identificador" => "fig0015" "etiqueta" => "Figure 3" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr3.jpeg" "Alto" => 499 "Ancho" => 717 "Tamanyo" => 60687 ] ] "descripcion" => array:1 [ "en" => "<p id="spar0015" class="elsevierStyleSimplePara elsevierViewall">Known chessboard projected by calibrated projector <a class="elsevierStyleCrossRef" href="#bib0035">[7]</a></p>" ] ] 3 => array:7 [ "identificador" => "fig0020" "etiqueta" => "Figure 4" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr4.jpeg" "Alto" => 499 "Ancho" => 809 "Tamanyo" => 46593 ] ] "descripcion" => array:1 [ "en" => "<p id="spar0020" class="elsevierStyleSimplePara elsevierViewall">3D Shape measurement system</p>" ] ] 4 => array:7 [ "identificador" => "fig0025" "etiqueta" => "Figure 5" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr5.jpeg" "Alto" => 635 "Ancho" => 768 "Tamanyo" => 74413 ] ] "descripcion" => array:1 [ "en" => "<p id="spar0025" class="elsevierStyleSimplePara elsevierViewall">Error analysis graph (camera measurement method)</p>" ] ] 5 => array:7 [ "identificador" => "fig0030" "etiqueta" => "Figure 6" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr6.jpeg" "Alto" => 614 "Ancho" => 766 "Tamanyo" => 71081 ] ] "descripcion" => array:1 [ "en" => "<p id="spar0030" class="elsevierStyleSimplePara elsevierViewall">Analysis graph (graph paper measurement method)</p>" ] ] 6 => array:7 [ "identificador" => "fig0035" "etiqueta" => "Figure 7" "tipo" => "MULTIMEDIAFIGURA" "mostrarFloat" => true "mostrarDisplay" => false "figura" => array:1 [ 0 => array:4 [ "imagen" => "gr7.jpeg" "Alto" => 672 "Ancho" => 752 "Tamanyo" => 104891 ] ] "descripcion" => array:1 [ "en" => "<p id="spar0035" class="elsevierStyleSimplePara elsevierViewall">Re-projection error analysis (camera calibration)</p>" ] ] 7 => array:6 [ "identificador" => "eq0005" "etiqueta" => "(1)" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "qcam=sMcam(Rcam|tcam)Qcam" "Fichero" => "si1.jpeg" "Tamanyo" => 1570 "Alto" => 16 "Ancho" => 206 ] ] 8 => array:6 [ "identificador" => "eq0010" "etiqueta" => "(2)" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "Mcam=fx0cx0fycy001" "Fichero" => "si2.jpeg" "Tamanyo" => 2562 "Alto" => 68 "Ancho" => 166 ] ] 9 => array:6 [ "identificador" => "eq0015" "etiqueta" => "(3)" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "mcamncam1=sMcam(r1cr2ctc)XcamYcam1" "Fichero" => "si3.jpeg" "Tamanyo" => 3823 "Alto" => 68 "Ancho" => 290 ] ] 10 => array:6 [ "identificador" => "eq0020" "etiqueta" => "(4)" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "qpro=sMpro(Rpro|tpro)Qpro" "Fichero" => "si4.jpeg" "Tamanyo" => 1633 "Alto" => 17 "Ancho" => 193 ] ] 11 => array:5 [ "identificador" => "eq0025" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "qcam=sMcam(Rcam|tcam)Qcam" "Fichero" => "si5.jpeg" "Tamanyo" => 1570 "Alto" => 16 "Ancho" => 206 ] ] 12 => array:5 [ "identificador" => "eq0030" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "qpro=sMpro(Rpro|tpro)Qpro" "Fichero" => "si6.jpeg" "Tamanyo" => 1633 "Alto" => 17 "Ancho" => 193 ] ] 13 => array:5 [ "identificador" => "eq0035" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "Qcam=Qpro" "Fichero" => "si7.jpeg" "Tamanyo" => 812 "Alto" => 16 "Ancho" => 89 ] ] 14 => array:6 [ "identificador" => "eq0040" "etiqueta" => "(5)" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "qcam=sMcam(Rcam|tcam)Qpro" "Fichero" => "si8.jpeg" "Tamanyo" => 1666 "Alto" => 17 "Ancho" => 203 ] ] 15 => array:6 [ "identificador" => "eq0045" "etiqueta" => "(6)" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "Qpro=(1/s)Mcam−1(Rcam|tcam)−1qcam" "Fichero" => "si9.jpeg" "Tamanyo" => 2463 "Alto" => 20 "Ancho" => 245 ] ] 16 => array:6 [ "identificador" => "eq0050" "etiqueta" => "(7)" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "XproYpro1=(1/s)Mcam−1(Rcam|tcam)−1mcamncam1" "Fichero" => "si10.jpeg" "Tamanyo" => 3936 "Alto" => 68 "Ancho" => 303 ] ] 17 => array:5 [ "identificador" => "eq0055" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "1278.270435.2701266.59342.377001" "Fichero" => "si11.jpeg" "Tamanyo" => 3466 "Alto" => 68 "Ancho" => 218 ] ] 18 => array:5 [ "identificador" => "eq0060" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "−0.00110.9460.322−129.290.9990.010−0.028−123.32−0.0300.321−0.946827.080001" "Fichero" => "si12.jpeg" "Tamanyo" => 6363 "Alto" => 87 "Ancho" => 277 ] ] 19 => array:5 [ "identificador" => "eq0065" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "1931.480423.3401985.84667.67001" "Fichero" => "si13.jpeg" "Tamanyo" => 3354 "Alto" => 68 "Ancho" => 209 ] ] 20 => array:5 [ "identificador" => "eq0070" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "−0.0001.000−0.000957−119.680.9990.0000020.0022−325.480.0022−0.000957−0.999801.560001" "Fichero" => "si14.jpeg" "Tamanyo" => 6983 "Alto" => 87 "Ancho" => 330 ] ] 21 => array:5 [ "identificador" => "eq0075" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "TSC=−0.00110.9460.322−129.290.9990.010−0.028−123.32−0.0300.321−0.946827.080001" "Fichero" => "si15.jpeg" "Tamanyo" => 6822 "Alto" => 87 "Ancho" => 320 ] ] 22 => array:5 [ "identificador" => "eq0080" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "TSP=−0.0001.000−0.000957−119.680.9990.0000020.0022−325.480.0022−0.000957−0.999801.560001" "Fichero" => "si16.jpeg" "Tamanyo" => 7627 "Alto" => 87 "Ancho" => 372 ] ] 23 => array:6 [ "identificador" => "eq0085" "etiqueta" => "(8)" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "qpro=sMpro(Rpro|tpro)Qpro" "Fichero" => "si17.jpeg" "Tamanyo" => 1633 "Alto" => 17 "Ancho" => 193 ] ] 24 => array:6 [ "identificador" => "eq0090" "etiqueta" => "(9)" "tipo" => "MULTIMEDIAFORMULA" "mostrarFloat" => false "mostrarDisplay" => true "Formula" => array:5 [ "Matematica" => "Δm=mcal−mmeasuredΔn=ncal−nmeasured" "Fichero" => "si18.jpeg" "Tamanyo" => 2456 "Alto" => 37 "Ancho" => 159 ] ] ] "bibliografia" => array:2 [ "titulo" => "References" "seccion" => array:1 [ 0 => array:2 [ "identificador" => "bibs0005" "bibliografiaReferencia" => array:8 [ 0 => array:3 [ "identificador" => "bib0005" "etiqueta" => "[1]" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Binocular Visual tracking and grasping of a moving object with a 3D trajectory predictor" "autores" => array:1 [ 0 => array:2 [ "etal" => true "autores" => array:1 [ 0 => "F. Pacheco" ] ] ] ] ] "host" => array:1 [ 0 => array:1 [ "Revista" => array:6 [ "tituloSerie" => "Journal of applied research and technology" "fecha" => "2009" "volumen" => "7" "numero" => "3" "paginaInicial" => "259" "paginaFinal" => "1174" ] ] ] ] ] ] 1 => array:3 [ "identificador" => "bib0010" "etiqueta" => "[2]" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "3D-faical expression synthesis and its application to face recognition systems" "autores" => array:1 [ 0 => array:2 [ "etal" => true "autores" => array:1 [ 0 => "L.R. Valdez" ] ] ] ] ] "host" => array:1 [ 0 => array:1 [ "Revista" => array:6 [ "tituloSerie" => "Journal of applied research and technology" "fecha" => "2009" "volumen" => "7" "numero" => "3" "paginaInicial" => "323" "paginaFinal" => "339" ] ] ] ] ] ] 2 => array:3 [ "identificador" => "bib0015" "etiqueta" => "[3]" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Novel method for structured light system calibration" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:2 [ 0 => "S. Zhang" 1 => "P.S. Huang" ] ] ] ] ] "host" => array:1 [ 0 => array:1 [ "Revista" => array:6 [ "tituloSerie" => "Optical Engineering" "fecha" => "2006" "volumen" => "45" "numero" => "8" "paginaInicial" => "83601" "paginaFinal" => "83608" ] ] ] ] ] ] 3 => array:3 [ "identificador" => "bib0020" "etiqueta" => "[4]" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Accurate calibration for structured light system" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:2 [ 0 => "Z. Li" 1 => "Y. shi" ] ] ] ] ] "host" => array:1 [ 0 => array:1 [ "Revista" => array:4 [ "tituloSerie" => "Optical Engineering" "fecha" => "2008" "volumen" => "47" "numero" => "05" ] ] ] ] ] ] 4 => array:3 [ "identificador" => "bib0025" "etiqueta" => "[5]" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Flexible method for structured light system calibration" "autores" => array:1 [ 0 => array:2 [ "etal" => true "autores" => array:1 [ 0 => "W. Gao" ] ] ] ] ] "host" => array:1 [ 0 => array:1 [ "Revista" => array:4 [ "tituloSerie" => "Optical Engineering" "fecha" => "2008" "volumen" => "47" "numero" => "08" ] ] ] ] ] ] 5 => array:3 [ "identificador" => "bib0030" "etiqueta" => "[6]" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:2 [ "titulo" => "Flexible camera calibration by viewing a plane from unknown orientation" "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:1 [ 0 => "Z. Zhang" ] ] ] ] ] "host" => array:1 [ 0 => array:1 [ "LibroEditado" => array:4 [ "paginaInicial" => "666" "paginaFinal" => "673" "conferencia" => "International conference on Computer Vision" "serieFecha" => "1999" ] ] ] ] ] ] 6 => array:3 [ "identificador" => "bib0035" "etiqueta" => "[7]" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:1 [ "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:2 [ 0 => "G. Bradski" 1 => "A. Kaehler" ] ] ] ] ] "host" => array:1 [ 0 => array:1 [ "Libro" => array:3 [ "titulo" => "Learning OpenCV" "fecha" => "2008" "editorial" => "O’Reilly Media" ] ] ] ] ] ] 7 => array:3 [ "identificador" => "bib0040" "etiqueta" => "[8]" "referencia" => array:1 [ 0 => array:2 [ "contribucion" => array:1 [ 0 => array:1 [ "autores" => array:1 [ 0 => array:2 [ "etal" => false "autores" => array:2 [ 0 => "E. Trucco" 1 => "A. Verri" ] ] ] ] ] "host" => array:1 [ 0 => array:1 [ "Libro" => array:3 [ "titulo" => "Introductory techniques for 3D computer vision" "fecha" => "1998" "editorial" => "Prentice Hall" ] ] ] ] ] ] ] ] ] ] ] "idiomaDefecto" => "en" "url" => "/16656423/0000001200000001/v2_201505081640/S1665642314716086/v2_201505081640/en/main.assets" "Apartado" => null "PDF" => "https://static.elsevier.es/multimedia/16656423/0000001200000001/v2_201505081640/S1665642314716086/v2_201505081640/en/main.pdf?idApp=UINPBA00004N&text.app=https://www.elsevier.es/" "EPUB" => "https://multimedia.elsevier.es/PublicationsMultimediaV1/item/epub/S1665642314716086?idApp=UINPBA00004N" ]
Year/Month | Html | Total | |
---|---|---|---|
2024 October | 33 | 2 | 35 |
2024 September | 76 | 12 | 88 |
2024 August | 68 | 17 | 85 |
2024 July | 82 | 10 | 92 |
2024 June | 62 | 16 | 78 |
2024 May | 68 | 6 | 74 |
2024 April | 58 | 8 | 66 |
2024 March | 32 | 13 | 45 |
2024 February | 48 | 10 | 58 |
2024 January | 95 | 11 | 106 |
2023 December | 56 | 15 | 71 |
2023 November | 81 | 13 | 94 |
2023 October | 86 | 29 | 115 |
2023 September | 54 | 4 | 58 |
2023 August | 82 | 17 | 99 |
2023 July | 66 | 19 | 85 |
2023 June | 92 | 5 | 97 |
2023 May | 86 | 4 | 90 |
2023 April | 48 | 14 | 62 |
2023 March | 71 | 10 | 81 |
2023 February | 59 | 11 | 70 |
2023 January | 51 | 1 | 52 |
2022 December | 51 | 12 | 63 |
2022 November | 77 | 17 | 94 |
2022 October | 82 | 11 | 93 |
2022 September | 77 | 8 | 85 |
2022 August | 99 | 11 | 110 |
2022 July | 75 | 19 | 94 |
2022 June | 70 | 13 | 83 |
2022 May | 55 | 11 | 66 |
2022 April | 116 | 12 | 128 |
2022 March | 69 | 20 | 89 |
2022 February | 57 | 5 | 62 |
2022 January | 64 | 7 | 71 |
2021 December | 51 | 8 | 59 |
2021 November | 138 | 11 | 149 |
2021 October | 171 | 10 | 181 |
2021 September | 85 | 23 | 108 |
2021 August | 314 | 12 | 326 |
2021 July | 110 | 14 | 124 |
2021 June | 68 | 12 | 80 |
2021 May | 855 | 4 | 859 |
2021 April | 248 | 17 | 265 |
2021 March | 799 | 10 | 809 |
2021 February | 50 | 10 | 60 |
2021 January | 47 | 9 | 56 |
2020 December | 46 | 16 | 62 |
2020 November | 49 | 6 | 55 |
2020 October | 30 | 7 | 37 |
2020 September | 33 | 17 | 50 |
2020 August | 35 | 6 | 41 |
2020 July | 28 | 6 | 34 |
2020 June | 39 | 8 | 47 |
2020 May | 42 | 4 | 46 |
2020 April | 36 | 6 | 42 |
2020 March | 72 | 18 | 90 |
2020 February | 47 | 3 | 50 |
2020 January | 50 | 5 | 55 |
2019 December | 42 | 8 | 50 |
2019 November | 43 | 9 | 52 |
2019 October | 54 | 2 | 56 |
2019 September | 41 | 5 | 46 |
2019 August | 22 | 9 | 31 |
2019 July | 66 | 11 | 77 |
2019 June | 98 | 17 | 115 |
2019 May | 171 | 24 | 195 |
2019 April | 86 | 19 | 105 |
2019 March | 19 | 6 | 25 |
2019 February | 25 | 4 | 29 |
2019 January | 17 | 4 | 21 |
2018 December | 17 | 6 | 23 |
2018 November | 19 | 1 | 20 |
2018 October | 31 | 4 | 35 |
2018 September | 22 | 2 | 24 |
2018 August | 7 | 4 | 11 |
2018 July | 3 | 5 | 8 |
2018 June | 8 | 6 | 14 |
2018 May | 7 | 18 | 25 |
2018 April | 18 | 2 | 20 |
2018 March | 11 | 1 | 12 |
2018 February | 10 | 1 | 11 |
2018 January | 14 | 1 | 15 |
2017 December | 7 | 3 | 10 |
2017 November | 17 | 8 | 25 |
2017 October | 15 | 12 | 27 |
2017 September | 7 | 18 | 25 |
2017 August | 9 | 6 | 15 |
2017 July | 10 | 13 | 23 |
2017 June | 28 | 45 | 73 |
2017 May | 26 | 16 | 42 |
2017 April | 18 | 46 | 64 |
2017 March | 13 | 12 | 25 |
2017 February | 7 | 4 | 11 |
2017 January | 13 | 5 | 18 |
2016 December | 19 | 3 | 22 |
2016 November | 24 | 6 | 30 |
2016 October | 13 | 2 | 15 |
2016 September | 24 | 4 | 28 |
2016 August | 16 | 3 | 19 |
2016 July | 19 | 1 | 20 |
2016 June | 12 | 1 | 13 |
2016 May | 11 | 5 | 16 |
2016 April | 15 | 31 | 46 |
2016 March | 17 | 7 | 24 |
2016 February | 6 | 9 | 15 |
2016 January | 18 | 4 | 22 |
2015 December | 14 | 1 | 15 |
2015 November | 14 | 1 | 15 |
2015 October | 12 | 7 | 19 |
2015 September | 12 | 2 | 14 |
2015 August | 28 | 1 | 29 |
2015 July | 9 | 1 | 10 |
2015 June | 1 | 0 | 1 |
2015 May | 6 | 1 | 7 |
2015 April | 3 | 2 | 5 |