@inproceedings{Vidal2009TPCG, author = {F. P. Vidal and M. Garnier and N. Freud and J. M. L\'etang and N. W. John}, title = {Simulation of {X-ray} Attenuation on the {GPU}}, booktitle = {Proceedings of Theory and Practice of Computer Graphics 2009}, year = 2009, pages = {25-32}, month = jun, address = {Cardiff, UK}, annotation = {Jun~17--19, 2009}, note = {Winner of Ken Brodlie Prize for Best Paper}, doi = {10.2312/LocalChapterEvents/TPCG/TPCG09/025-032}, abstract = {In this paper, we propose to take advantage of computer graphics hardware to achieve an accelerated simulation of X-ray transmission imaging, and we compare results with a fast and robust software-only implementation. The running times of the GPU and CPU implementations are compared in different test cases. The results show that the GPU implementation with full floating point precision is faster by a factor of about 60 to 65 than the CPU implementation, without any significant loss of accuracy. The increase in performance achieved with GPU calculations opens up new perspectives. Notably, it paves the way for physically-realistic simulation of X-ray imaging in interactive time.}, keywords = {Physically based modeling, Raytracing, Physics}, publisher = {Eurographics Association}, pdf = {./pdf/Vidal2009TPCG.pdf} }
@inproceedings{Sinha2009UKRC, author = {A. Sinha and S. Johnson and C. Hunt and H. Woolnough and F. P. Vidal and D. Gould}, title = {Preliminary face and content validation of {Imagine-S}: the {CIRSE} \& {BSIR} Experience}, booktitle = {Proceedings of the UK Radiological Congress}, year = 2009, pages = {2}, month = jun, address = {Manchester, UK}, annotation = {Jun~8--10, 2009}, abstract = {KEY LEARNING OBJECTIVES: To determine face and content validity of a physics-based virtual reality (VR) training simulation of visceral interventional radiology needle puncture procedures. DESCRIPTION: Imaging-guided needle puncture procedures use hand-eye coordination to direct needles, wires and catheters to perform nephrostomy. The visuo-spatial and manipulation skills required are learnt in a traditional apprenticeship, though Working Time Directives are reducing the time and case mix available to train. Animal and fixed models reproduce some training objectives, though they are an imperfect substitute for the `real patient' experience. ImaGiNe-S is a computer-based VR training simulation, using variable virtual environments with stereo 3D visual representation and devices to convey feel, realistically mimicking a percutaneous nephrostomy procedure. With ethical approval, a prospective pilot study was conducted at two international conferences to assess validity of Imagine-S. 53 subjects (49 male, 4 female: 30 trainees and 23 subject matter experts), underwent baseline testing on a simulated percutaneous nephrostomy. Face and content validation were assessed using a 5-point Likert scale. Outcomes showed that 41/53 (78\%) participants thought that the design of Imagine-S was moderately realistic with content validity being rated averagely for all critical task steps. 44/53 (83\%) participants thought that Imagine-S is a useful model for training skills for nephrostomy. CONCLUSION: Imagine-S may be a useful model for training skills for nephrostomy. With further development it may allow trainees to develop basic skills of percutaneous renal collecting system access. Further assessment of face and content validity is needed.} }
@inproceedings{Villard2008UKRC, author = {P. F. Villard and P. Littler and V. Gough and F. P. Vidal and C. Hughes and N. W. John and V. Luboz and F. Bello and Y. Song and R. Holbrey and A. Bulpitt and D. Mullan and N. Chalmers and D. Kessel and D. Gould}, title = {Improving the modeling of Medical Imaging data for simulation}, booktitle = {Proceedings of the UK Radiological Congress}, year = 2008, pages = {61}, month = jun, address = {Birmingham, UK}, annotation = {Jun~2--4, 2008}, abstract = {PURPOSE-MATERIALS: To use patient imaging as the basis for developing virtual environments (VE). BACKGROUND Interventional radiology basic skills are still taught in an apprenticeship in patients, though these could be learnt in high fidelity simulations using VE. Ideally, imaging data sets for simulation of image-guided procedures would alter dynamically in response to deformation forces such as respiration and needle insertion. We describe a methodology for deriving such dynamic volume rendering from patient imaging data. METHODS With patient consent, selected, routine imaging (computed tomography, magnetic resonance, ultrasound) of straightforward and complex anatomy and pathology was anonymised and uploaded to a repository at Bangor University. Computer scientists used interactive segmentation processes to label target anatomy for creation of a surface (triangular) and volume (tetrahedral) mesh. Computer modeling techniques used a mass spring algorithm to map tissue deformations such as needle insertion and intrinsic motion (e.g. respiration). These methods, in conjunction with a haptic device, provide output forces in real time to mimick the ‘feel’ of a procedure. Feedback from trainees and practitioners was obtained during preliminary demonstrations. RESULTS Data sets were derived from 6 patients and converted into deformable VEs. Preliminary content validation studies of a framework developed for training on liver biopsy procedures, demonstrated favourable observations that are leading to further revisions, including implementation of an immersive VE. CONCLUSION: It is possible to develop dynamic volume renderings from static patient data sets and these are likely to form the basis of future simulations for IR training of procedural interventions.} }
@inproceedings{Cosson2004UKRC, author = {P. Cosson and J. {Yu Cheng} and S. Keswani and G. Debouzy and D. Deprez and F. Vidal}, title = {Virtual radiographic environments become a reality}, booktitle = {Proceedings of the UK Radiological Congress}, year = 2004, month = jun, address = {Manchester, UK}, annotation = {Jun~6--8, 2004} }
@inproceedings{Cosson2004ALTC, author = {P. Cosson and G. Debouzy and D. Deprez and F. Vidal and S. Keswani and J. Warren}, title = {Virtual radiographic environments: what use would they be?}, booktitle = {University of Teesside Annual Learning \& Teaching Conference}, year = 2004, month = jan, address = {Middlesbrough, UK}, annotation = {Jan~15, 2004} }
@inproceedings{Cosson2003ALTC, author = {P. Cosson and G. Debouzy and D. Deprez and F. Vidal and S. Keswani and J. Warren}, title = {Virtual radiographic environments}, booktitle = {University of Teesside Annual Learning \& Teaching Conference}, year = 2003, address = {Middlesbrough, UK} }
This file was generated by bibtex2html 1.97.