@article{Gray2018GenetProgramEvolvableMach,
title = {Data exploration in evolutionary reconstruction of {PET} images},
journal = {Genetic Programming and Evolvable Machines},
volume = 19,
number = 3,
pages = {391-419},
year = 2018,
month = sep,
issn = {1573-7632},
doi = {10.1007/s10710-018-9330-7},
author = {Cameron C. Gray and Shatha F. {Al-Maliki} and Franck P. Vidal},
keywords = {Fly Algorithm},
keywords = {Tomography reconstruction},
keywords = {Information visualisation},
keywords = {Data exploration},
keywords = {Artificial evolution},
keywords = {Parisian evolution},
abstract = {This work is based on a cooperative co-evolution algorithm called
`Fly Algorithm', which is an evolutionary algorithm (EA) where individuals
are called `flies'. It is a specific case of the `Parisian Approach' where
the solution of an optimisation problem is a set of individuals
(e.g. the whole population) instead of a single individual (the best one) as
in typical EAs. The optimisation problem considered here is tomography
reconstruction in positron emission tomography (PET). It estimates
the concentration of a radioactive substance (called a radiotracer) within
the body. Tomography, in this context, is considered as a difficult
ill-posed inverse problem. The Fly Algorithm aims at optimising
the position of 3-D points that mimic the radiotracer. At the end of
the optimisation process, the fly population is extracted as it corresponds
to an estimate of the radioactive concentration. During the optimisation
loop a lot of data is generated by the algorithm, such as image metrics,
duration, and internal states. This data is recorded in a log file that
can be post-processed and visualised. We propose using information
visualisation and user interaction techniques to explore the algorithm's
internal data. Our aim is to better understand what happens during
the evolutionary loop. Using an example, we demonstrate that it is possible
to interactively discover when an early termination could be triggered.
It is implemented in a new stopping criterion. It is tested on two other
examples on which it leads to a 60\% reduction of the number of iterations
without any loss of accuracy.},
pdf = {./pdf/Gray2018GenetProgramEvolvableMach.pdf}
}
@article{AliAbbood2017ArtsAndScience,
title = {{Fly4Arts}: Evolutionary Digital Art with the {Fly} Algorithm},
journal = {{ISTE} Arts \& Science},
volume = {17},
number = 1,
pages = {11-16},
year = 2017,
month = oct,
issn = {2515-8767},
doi = {10.21494/ISTE.OP.2017.0177},
author = {Zainab Ali Abbood and Franck P. Vidal},
keywords = {Digital mosaic, Evolutionary art, Fly Algorithm, Parisian evolution, Cooperative co-evolution},
abstract = {The aim of this study is to generate artistic images, such as
digital mosaics, as an optimisation problem without the introduction of
any a priori knowledge or constraint other than an input image.
The usual practice to produce digital mosaic images heavily relies on
Centroidal Voronoi diagrams. We demonstrate here that it can be modelled
as an optimisation problem solved using a cooperative co-evolution
strategy based on the Parisian evolution approach, the Fly algorithm.
An individual is called a fly. Its aim of the algorithm is to optimise
the position of infinitely small 3-D points (the flies). The Fly
algorithm has been initially used in real-time stereo vision for
robotics. It has also demonstrated promising results in image
reconstruction for tomography. In this new application, a much more
complex representation has been study. A fly is a tile. It has its
own position, size, colour, and rotation angle. Our method takes
advantage of graphics processing units (GPUs) to generate the images
using the modern OpenGL Shading Language (GLSL) and Open Computing
Language (OpenCL) to compute the difference between the input image and
simulated image. Different types of tiles are implemented, some with
transparency, to generate different visual effects, such as digital
mosaic and spray paint. An online study with 41 participants has been
conducted to compare some of our results with those generated using
an open-source software for image manipulation. It demonstrates that
our method leads to more visually appealing images.},
pdf = {./pdf/AliAbbood2017ArtsAndScience.pdf}
}
@article{Abbood2017SwarmEvolComput,
author = "Zainab {Ali Abbood} and Julien Lavauzelle and \'Evelyne Lutton and
Jean-Marie Rocchisani and Jean Louchet and Franck P. Vidal",
title = "Voxelisation in the 3-D Fly Algorithm for PET",
journal = "Swarm and Evolutionary Computation",
year = 2017,
volume = 36,
pages = "91-105",
month = oct,
abstract = "The Fly Algorithm was initially developed for 3-D robot vision
applications. It consists in solving the inverse problem of shape
reconstruction from projections by evolving a population of 3-D points in
space (the `flies'), using an evolutionary optimisation strategy. Here, in
its version dedicated to tomographic reconstruction in medical imaging, the
flies are mimicking radioactive photon sources. Evolution is controlled
using a fitness function based on the discrepancy of the projections
simulated by the flies with the actual pattern received by the sensors.
The reconstructed radioactive concentration is derived from the population
of flies, i.e. a collection of points in the 3-D Euclidean space, after
convergence. `Good' flies were previously binned into voxels. In this paper,
we study which flies to include in the final solution and how this
information can be sampled to provide more accurate datasets in a reduced
computation time. We investigate the use of density fields, based on
Metaballs and on Gaussian functions respectively, to obtain a realistic
output. The spread of each Gaussian kernel is modulated in function of
the corresponding fly fitness. The resulting volumes are compared with
previous work in terms of normalised-cross correlation. In our test-cases,
data fidelity increases by more than 10% when density fields are used
instead of binning. Our method also provides reconstructions comparable to
those obtained using well-established techniques used in medicine
(filtered back-projection and ordered subset expectation-maximisation).",
doi = "10.1016/j.swevo.2017.04.001",
issn = "2210-6502",
keywords = "Fly algorithm; Evolutionary computation; tomography
reconstruction; iterative algorithms; inverse problems;
co-operative co-evolution",
publisher = {Elsevier},
pdf = {pdf/Abbood2017SwarmEvolComput.pdf},
}
@article{Vidal2016ComputMedImagingGraph,
author = "Franck P. Vidal and Pierre-Fr\'ed\'eric Villard",
title = "Development and validation of real-time simulation of X-ray imaging
with respiratory motion",
journal = "Computerized Medical Imaging and Graphics",
year = 2016,
volume = 49,
pages = "1-15",
month = apr,
abstract = "We present a framework that combines evolutionary
optimisation, soft tissue modelling and ray tracing on GPU to
simultaneously compute the respiratory motion and X-ray imaging in
real-time. Our aim is to provide validated building blocks with high
fidelity to closely match both the human physiology and the physics of
X-rays. A CPU-based set of algorithms is presented to model organ
behaviours during respiration. Soft tissue deformation is computed with an
extension of the Chain Mail method. Rigid elements move according to
kinematic laws. A GPU-based surface rendering method is proposed to
compute the X-ray image using the Beer–Lambert law. It is provided as an
open-source library. A quantitative validation study is provided to
objectively assess the accuracy of both components: (i) the respiration
against anatomical data, and (ii) the X-ray against the Beer–Lambert law and
the results of Monte Carlo simulations. Our implementation can be used in
various applications, such as interactive medical virtual environment to
train percutaneous transhepatic cholangiography in interventional radiology,
2D/3D registration, computation of digitally reconstructed radiograph,
simulation of 4D sinograms to test tomography reconstruction tools.",
doi = "10.1016/j.compmedimag.2015.12.002",
pmid = {26773644},
issn = "0895-6111",
keywords = "X-ray simulation, Deterministic simulation (ray-tracing),
Digitally reconstructed radiograph, Respiration simulation,
Medical virtual environment, Imaging guidance,
Interventional radiology training",
publisher = {Elsevier},
pdf = {pdf/Vidal2016ComputMedImagingGraph.pdf},
}
@article{Villard2014IntJComputAssistRadiolSurg,
author = {P. F. Villard and F. P. Vidal and L. {ap Cenydd} and R. Holbrey and
S. Pisharody and S. Johnson and A. Bulpitt and N. W. John and
F. Bello and D. Gould},
title = {Interventional radiology virtual simulator for liver biopsy},
journal = {International Journal of Computer Assisted Radiology and Surgery},
year = 2014,
volume = 9,
pages = {255-267},
number = 2,
month = mar,
abstract = {Purpose: Training in Interventional Radiology currently uses
the apprenticeship model, where clinical and technical skills of invasive
procedures are learnt during practice in patients. This apprenticeship
training method is increasingly limited by regulatory restrictions
on working hours, concerns over patient risk through
trainees' inexperience and the variable exposure to case mix and
emergencies during training. To address this, we have developed
a computer-based simulation of visceral needle puncture procedures.
Methods: A real-time framework has been built that includes:
segmentation, physically based modelling, haptics rendering,
pseudo-ultrasound generation and the concept of a physical mannequin.
It is the result of a close collaboration between different universities,
involving computer scientists, clinicians, clinical engineers and
occupational psychologists.
Results: The technical implementation of the framework is a robust and
real-time simulation environment combining a physical platform and
an immersive computerized virtual environment. The face, content and
construct validation have been previously assessed, showing
the reliability and effectiveness of this framework, as well as
its potential for teaching visceral needle puncture.
Conclusion: A simulator for ultrasound-guided liver biopsy
has been developed. It includes functionalities and metrics extracted from
cognitive task analysis. This framework can be useful during training,
particularly given the known difficulties in gaining significant practice of
core skills in patients.},
doi = {10.1109/s11548-013-0929-0},
pmid = {23881251},
keywords = {Biomedical computing, Image segmentation, Simulation, Virtual reality},
publisher = {Springer Berlin Heidelberg},
pdf = {pdf/Villard2014IntJComputAssistRadiolSurg.pdf},
}
@article{Villard2014IntJComputAssistRadiolSurg,
author = {P. F. Villard and F. P. Vidal and L. {ap Cenydd} and R. Holbrey and
S. Pisharody and S. Johnson and A. Bulpitt and N. W. John and
F. Bello and D. Gould},
title = {Interventional radiology virtual simulator for liver biopsy},
journal = {International Journal of Computer Assisted Radiology and Surgery},
year = 2014,
volume = 9,
pages = {255-267},
number = 2,
month = mar,
abstract = {Purpose: Training in Interventional Radiology currently uses
the apprenticeship model, where clinical and technical skills of invasive
procedures are learnt during practice in patients. This apprenticeship
training method is increasingly limited by regulatory restrictions
on working hours, concerns over patient risk through
trainees' inexperience and the variable exposure to case mix and
emergencies during training. To address this, we have developed
a computer-based simulation of visceral needle puncture procedures.
Methods: A real-time framework has been built that includes:
segmentation, physically based modelling, haptics rendering,
pseudo-ultrasound generation and the concept of a physical mannequin.
It is the result of a close collaboration between different universities,
involving computer scientists, clinicians, clinical engineers and
occupational psychologists.
Results: The technical implementation of the framework is a robust and
real-time simulation environment combining a physical platform and
an immersive computerized virtual environment. The face, content and
construct validation have been previously assessed, showing
the reliability and effectiveness of this framework, as well as
its potential for teaching visceral needle puncture.
Conclusion: A simulator for ultrasound-guided liver biopsy
has been developed. It includes functionalities and metrics extracted from
cognitive task analysis. This framework can be useful during training,
particularly given the known difficulties in gaining significant practice of
core skills in patients.},
doi = {10.1109/s11548-013-0929-0},
pmid = {23881251},
keywords = {Biomedical computing, Image segmentation, Simulation, Virtual reality},
publisher = {Springer Berlin Heidelberg},
pdf = {pdf/Villard2014IntJComputAssistRadiolSurg.pdf},
}
@article{Vidal2012IEEETransBiomedEng,
author = {F. P. Vidal and {P.-F.} Villard and \'E. Lutton},
title = {Tuning of Patient Specific Deformable Models using an Adaptive Evolutionary Optimization Strategy},
journal = {IEEE Transactions on Biomedical Engineering},
year = 2012,
volume = 59,
pages = {2942-2949},
number = 10,
month = oct,
abstract = {We present and analyze the behavior of an evolutionary algorithm designed
to estimate the parameters of a complex organ behavior model. The model is
adaptable to account for patients specificities. The aim is to finely tune the
model to be accurately adapted to various real patient datasets. It can then be
embedded, for example, in high fidelity simulations of the human physiology.
We present here an application focused on respiration modeling. The algorithm
is automatic and adaptive. A compound fitness function has been designed to take
into account for various quantities that have to be minimized. The algorithm
efficiency is experimentally analyzed on several real test-cases:
i) three patient datasets have been acquired with the breath hold protocol, and
ii) two datasets corresponds to 4D CT scans. Its performance is compared with
two traditional methods (downhill simplex and conjugate gradient descent),
a random search and a basic realvalued genetic algorithm. The results show that
our evolutionary scheme provides more significantly stable and accurate results.},
doi = {10.1109/TBME.2012.2213251},
pmid = {22907958},
keywords = {Evolutionary computation, inverse problems, medical simulation,
adaptive algorithm},
publisher = {IEEE},
pdf = {pdf/Vidal2012IEEETransBiomedEng.pdf}
}
@article{Villard2009IntJComputAssistRadiolSurg,
author = {{P.-F.} Villard and F. P. Vidal and C. Hunt and F. Bello and N. W.
John and S. Johnson and D. A. Gould},
title = {A prototype percutaneous transhepatic cholangiography training simulator with real-time breathing motion},
journal = {International Journal of Computer Assisted Radiology and Surgery},
year = 2009,
volume = 4,
pages = {571-578},
number = 6,
month = nov,
abstract = {Purpose: We present here a simulator for interventional radiology
focusing on percutaneous transhepatic cholangiography (PTC). This
procedure consists of inserting a needle into the biliary tree using
fluoroscopy for guidance. Methods: The requirements of the simulator
have been driven by a task analysis. The three main components have
been identified: the respiration, the real-time X-ray display (fluoroscopy)
and the haptic rendering (sense of touch). The framework for modelling
the respiratory motion is based on kinematics laws and on the Chainmail
algorithm. The fluoroscopic simulation is performed on the graphic
card and makes use of the Beer-Lambert law to compute the X-ray attenuation.
Finally, the haptic rendering is integrated to the virtual environment
and takes into account the soft-tissue reaction force feedback and
maintenance of the initial direction of the needle during the insertion.
Results: Five training scenarios have been created using patient-specific
data. Each of these provides the user with variable breathing behaviour,
fluoroscopic display tuneable to any device parameters and needle
force feedback. Conclusions A detailed task analysis has been used
to design and build the PTC simulator described in this paper. The
simulator includes real-time respiratory motion with two independent
parameters (rib kinematics and diaphragm action), on-line fluoroscopy
implemented on the Graphics Processing Unit and haptic feedback to
feel the soft-tissue behaviour of the organs during the needle insertion.},
doi = {10.1007/s11548-009-0367-1},
pmid = {20033333},
keywords = {Interventional radiology; Virtual environments; Respiration simulation;
X-ray simulation; Needle puncture; Haptics; Task analysis},
publisher = {Springer},
pdf = {pdf/Villard2009IntJComputAssistRadiolSurg.pdf}
}
@article{Vidal2008ComputAnimatVirtW,
author = {F. P. Vidal and N. W. John and D. A. Gould and A. E. Healey},
title = {Simulation of Ultrasound Guided Needle Puncture using Patient Specific
Data with {3D} Textures and Volume Haptics},
journal = {Computer Animation and Virtual Worlds},
year = 2008,
volume = 19,
pages = {111-127},
number = 2,
month = may,
abstract = {We present an integrated system for training ultrasound (US) guided
needle puncture. Our aim is to provide a validated training tool
for interventional radiology (IR) that uses actual patient data.
IR procedures are highly reliant on the sense of touch and so haptic
hardware is an important part of our solution. A hybrid surface/volume
haptic rendering of an US transducer is proposed to constrain the
device to remain outside the bony structures when scanning the patient's
skin. A volume haptic model is proposed that implements an effective
model of needle puncture. Force measurements have been made on real
tissue and the resulting data is incorporated into the model. The
other input data required is a computed tomography (CT) scan of the
patient that is used to create the patient specific models. It is
also the data source for a novel simulation of a virtual US scanner,
which is used to guide the needle to the correct location.},
doi = {10.1002/cav.217},
keywords = {medical virtual environment; imaging guidance; interventional radiology
training; needle puncture; volume haptics; vertex and pixel shaders},
publisher = {John Wiley \& Sons},
pdf = {pdf/Vidal2008ComputAnimatVirtW.pdf}
}
@article{Vidal2006ComputGraphForum,
author = {F. P. Vidal and F. Bello and K. W. Brodlie and D. A. Gould and N.
W. John and R. Phillips and N. J. Avis},
title = {Principles and Applications of Computer Graphics in Medicine},
journal = {Computer Graphics Forum},
year = 2006,
volume = 25,
pages = {113-137},
number = 1,
month = mar,
abstract = {The medical domain provides excellent opportunities for the application
of computer graphics, visualization and virtual environments, with
the potential to help improve healthcare and bring benefits to patients.
This survey paper provides a comprehensive overview of the state-of-the-art
in this exciting field. It has been written from the perspective
of both computer scientists and practising clinicians and documents
past and current successes together with the challenges that lie
ahead. The article begins with a description of the software algorithms
and techniques that allow visualization of and interaction with medical
data. Example applications from research projects and commercially
available products are listed, including educational tools; diagnostic
aids; virtual endoscopy; planning aids; guidance aids; skills training;
computer augmented reality and use of high performance computing.
The final section of the paper summarizes the current issues and
looks ahead to future developments.},
doi = {10.1111/j.1467-8659.2006.00822.x},
keywords = {visualization; augmented and virtual realities; computer graphics;
health; physically-based modeling; medical sciences; simulation},
publisher = {Blackwell},
pdf = {pdf/Vidal2006ComputGraphForum.pdf}
}
@article{Vidal2005NuclInstrumMethB,
author = {F. P. Vidal and J. M. L\'etang and G. Peix and P. Cl{\oe}tens},
title = {Investigation of artefact sources in synchrotron microtomography
via virtual X-ray imaging},
journal = {Nuclear Instruments and Methods in Physics Research B},
year = 2005,
volume = 234,
pages = {333-348},
number = 3,
month = jun,
abstract = {Qualitative and quantitative use of volumes reconstructed by computed
tomography (CT) can be compromised due to artefacts which corrupt
the data. This article illustrates a method based on virtual X-ray
imaging to investigate sources of artefacts which occur in microtomography
using synchrotron radiation. In this phenomenological study, different
computer simulation methods based on physical X-ray properties, eventually
coupled with experimental data, are used in order to compare artefacts
obtained theoretically to those present in a volume acquired experimentally,
or to predict them for a particular experimental setup. The article
begins with the presentation of a synchrotron microtomographic slice
of a reinforced fibre composite acquired at the European Synchrotron
Radiation Facility (ESRF) containing streak artefacts. This experimental
context is used as the motive throughout the paper to illustrate
the investigation of some artefact sources. First, the contribution
of direct radiation is compared to the contribution of secondary
radiations. Then, the effect of some methodological aspects are detailed,
including under-sampling, sample and camera misalignment, sample
extending outside of the field of view and photonic noise. The effect
of harmonic components present in the experimental spectrum are also
simulated. Afterwards, detector properties, such as its impulse response
or defective pixels, are taken into account. Finally, the importance
of phase contrast effects is evaluated. In the last section, this
investigation is discussed by putting emphasis on the experimental
context which is used throughout this paper.},
doi = {10.1016/j.nimb.2005.02.003},
keywords = {X-ray microtomography; Artefact; Deterministic simulation (ray-tracing);
Monte Carlo method; Phase contrast; Modulation transfer function},
publisher = {Elsevier},
pdf = {pdf/Vidal2005NuclInstrumMethB.pdf}
}
This file was generated by
bibtex2html 1.97.
