BACKGROUND: Due to their ability to solve complex problems, deep neural networks (DNNs) are becoming increasingly popular in medical applications. However, decision-making by such algorithms is essentially a black-box process that renders it difficult for physicians to judge whether the decisions are reliable. The use of explainable artificial intelligence (XAI) is often suggested as a solution to this problem. We investigate how XAI is used for skin cancer detection: how is it used during the development of new DNNs? What kinds of visualisations are commonly used? Are there systematic evaluations of XAI with dermatologists or dermatopathologists? METHODS: Google Scholar, PubMed, IEEE Explore, Science Direct and Scopus were searched for peer-reviewed studies published between January 2017 and October 2021 applying XAI to dermatological images: the search terms histopathological image, whole-slide image, clinical image, dermoscopic image, skin, dermatology, explainable, interpretable and XAI were used in various combinations. Only studies concerned with skin cancer were included. RESULTS: 37 publications fulfilled our inclusion criteria. Most studies (19/37) simply applied existing XAI methods to their classifier to interpret its decision-making. Some studies (4/37) proposed new XAI methods or improved upon existing techniques. 14/37 studies addressed specific questions such as bias detection and impact of XAI on man-machine-interactions. However, only three of them evaluated the performance and confidence of humans using CAD systems with XAI. CONCLUSION: XAI is commonly applied during the development of DNNs for skin cancer detection. However, a systematic and rigorous evaluation of its usefulness in this scenario is lacking.
%0 Journal Article
%1 Hauser2022-de
%A Hauser, Katja
%A Kurz, Alexander
%A Haggenmüller, Sarah
%A Maron, Roman C
%A von Kalle, Christof
%A Utikal, Jochen S
%A Meier, Friedegund
%A Hobelsberger, Sarah
%A Gellrich, Frank F
%A Sergon, Mildred
%A Hauschild, Axel
%A French, Lars E
%A Heinzerling, Lucie
%A Schlager, Justin G
%A Ghoreschi, Kamran
%A Schlaak, Max
%A Hilke, Franz J
%A Poch, Gabriela
%A Kutzner, Heinz
%A Berking, Carola
%A Heppt, Markus V
%A Erdmann, Michael
%A Haferkamp, Sebastian
%A Schadendorf, Dirk
%A Sondermann, Wiebke
%A Goebeler, Matthias
%A Schilling, Bastian
%A Kather, Jakob N
%A Fröhling, Stefan
%A Lipka, Daniel B
%A Hekler, Achim
%A Krieghoff-Henning, Eva
%A Brinker, Titus J
%D 2022
%I Elsevier BV
%J Eur. J. Cancer
%K Artificial Dermatology; Man-machine Skin Systematic intelligence; neoplasms; review systems; topic_lifescience
%P 54--69
%T Explainable artificial intelligence in skin cancer recognition: A systematic review
%V 167
%X BACKGROUND: Due to their ability to solve complex problems, deep neural networks (DNNs) are becoming increasingly popular in medical applications. However, decision-making by such algorithms is essentially a black-box process that renders it difficult for physicians to judge whether the decisions are reliable. The use of explainable artificial intelligence (XAI) is often suggested as a solution to this problem. We investigate how XAI is used for skin cancer detection: how is it used during the development of new DNNs? What kinds of visualisations are commonly used? Are there systematic evaluations of XAI with dermatologists or dermatopathologists? METHODS: Google Scholar, PubMed, IEEE Explore, Science Direct and Scopus were searched for peer-reviewed studies published between January 2017 and October 2021 applying XAI to dermatological images: the search terms histopathological image, whole-slide image, clinical image, dermoscopic image, skin, dermatology, explainable, interpretable and XAI were used in various combinations. Only studies concerned with skin cancer were included. RESULTS: 37 publications fulfilled our inclusion criteria. Most studies (19/37) simply applied existing XAI methods to their classifier to interpret its decision-making. Some studies (4/37) proposed new XAI methods or improved upon existing techniques. 14/37 studies addressed specific questions such as bias detection and impact of XAI on man-machine-interactions. However, only three of them evaluated the performance and confidence of humans using CAD systems with XAI. CONCLUSION: XAI is commonly applied during the development of DNNs for skin cancer detection. However, a systematic and rigorous evaluation of its usefulness in this scenario is lacking.
@article{Hauser2022-de,
abstract = {BACKGROUND: Due to their ability to solve complex problems, deep neural networks (DNNs) are becoming increasingly popular in medical applications. However, decision-making by such algorithms is essentially a black-box process that renders it difficult for physicians to judge whether the decisions are reliable. The use of explainable artificial intelligence (XAI) is often suggested as a solution to this problem. We investigate how XAI is used for skin cancer detection: how is it used during the development of new DNNs? What kinds of visualisations are commonly used? Are there systematic evaluations of XAI with dermatologists or dermatopathologists? METHODS: Google Scholar, PubMed, IEEE Explore, Science Direct and Scopus were searched for peer-reviewed studies published between January 2017 and October 2021 applying XAI to dermatological images: the search terms histopathological image, whole-slide image, clinical image, dermoscopic image, skin, dermatology, explainable, interpretable and XAI were used in various combinations. Only studies concerned with skin cancer were included. RESULTS: 37 publications fulfilled our inclusion criteria. Most studies (19/37) simply applied existing XAI methods to their classifier to interpret its decision-making. Some studies (4/37) proposed new XAI methods or improved upon existing techniques. 14/37 studies addressed specific questions such as bias detection and impact of XAI on man-machine-interactions. However, only three of them evaluated the performance and confidence of humans using CAD systems with XAI. CONCLUSION: XAI is commonly applied during the development of DNNs for skin cancer detection. However, a systematic and rigorous evaluation of its usefulness in this scenario is lacking.},
added-at = {2024-09-10T11:54:51.000+0200},
author = {Hauser, Katja and Kurz, Alexander and Haggenm{\"u}ller, Sarah and Maron, Roman C and von Kalle, Christof and Utikal, Jochen S and Meier, Friedegund and Hobelsberger, Sarah and Gellrich, Frank F and Sergon, Mildred and Hauschild, Axel and French, Lars E and Heinzerling, Lucie and Schlager, Justin G and Ghoreschi, Kamran and Schlaak, Max and Hilke, Franz J and Poch, Gabriela and Kutzner, Heinz and Berking, Carola and Heppt, Markus V and Erdmann, Michael and Haferkamp, Sebastian and Schadendorf, Dirk and Sondermann, Wiebke and Goebeler, Matthias and Schilling, Bastian and Kather, Jakob N and Fr{\"o}hling, Stefan and Lipka, Daniel B and Hekler, Achim and Krieghoff-Henning, Eva and Brinker, Titus J},
biburl = {https://puma.scadsai.uni-leipzig.de/bibtex/2b898f7460138d23964f8ade1ed2b516c/scadsfct},
copyright = {http://creativecommons.org/licenses/by-nc-nd/4.0/},
interhash = {645e78ea8fd1cc2451c26e5c4fac37f7},
intrahash = {b898f7460138d23964f8ade1ed2b516c},
journal = {Eur. J. Cancer},
keywords = {Artificial Dermatology; Man-machine Skin Systematic intelligence; neoplasms; review systems; topic_lifescience},
language = {en},
month = may,
pages = {54--69},
publisher = {Elsevier BV},
timestamp = {2024-09-10T12:00:15.000+0200},
title = {Explainable artificial intelligence in skin cancer recognition: A systematic review},
volume = 167,
year = 2022
}