<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE root>
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:ali="http://www.niso.org/schemas/ali/1.0/" article-type="research-article" dtd-version="1.2" xml:lang="en"><front><journal-meta><journal-id journal-id-type="publisher-id">Urologiia</journal-id><journal-title-group><journal-title xml:lang="en">Urologiia</journal-title><trans-title-group xml:lang="ru"><trans-title>Урология</trans-title></trans-title-group></journal-title-group><issn publication-format="print">1728-2985</issn><issn publication-format="electronic">2414-9020</issn><publisher><publisher-name xml:lang="en">Bionika Media</publisher-name></publisher></journal-meta><article-meta><article-id pub-id-type="publisher-id">680243</article-id><article-id pub-id-type="doi">10.18565/urology.2024.6.5-11</article-id><article-categories><subj-group subj-group-type="toc-heading" xml:lang="en"><subject>Original Articles</subject></subj-group><subj-group subj-group-type="toc-heading" xml:lang="ru"><subject>Оригинальные статьи</subject></subj-group><subj-group subj-group-type="article-type"><subject>Research Article</subject></subj-group></article-categories><title-group><article-title xml:lang="en">Development of a deep learning-based system for supporting medical decision-making in PI-RADs score determination</article-title><trans-title-group xml:lang="ru"><trans-title>Разработка системы на основе глубокого обучения для поддержки медицинских решений при определении оценки PI-RADS</trans-title></trans-title-group></title-group><contrib-group><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-0601-4713</contrib-id><name><surname>Mingze</surname><given-names>He</given-names></name><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Postgraduate student of the Institute</p></bio><bio xml:lang="ru"><p>аспирант 3 года обучения</p></bio><email>hemingze97@gmail.com</email><xref ref-type="aff" rid="aff1"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0002-3007-1315</contrib-id><name-alternatives><name xml:lang="en"><surname>Enikeev</surname><given-names>M. E.</given-names></name><name xml:lang="ru"><surname>Еникеев</surname><given-names>М. Э.</given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Dr.Med.Sci., professor of the Institute</p></bio><bio xml:lang="ru"><p>доктор медицинских наук, профессор</p></bio><email>enikmic@mail.ru</email><xref ref-type="aff" rid="aff1"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0002-6005-6247</contrib-id><name-alternatives><name xml:lang="en"><surname>Rzayev</surname><given-names>R. T.</given-names></name><name xml:lang="ru"><surname>Рзаев</surname><given-names>Р. Т.</given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Cand.Med.Sci., Department of Radiology, the Second University Hospital</p></bio><bio xml:lang="ru"><p>кандидат медицинских наук, отделение лучевой диагностики, университетская клиническая больница № 2</p></bio><email>ramin-rz@mail.ru</email><xref ref-type="aff" rid="aff1"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0001-5968-9883</contrib-id><name-alternatives><name xml:lang="en"><surname>Chernenkiy</surname><given-names>I.</given-names></name><name xml:lang="ru"><surname>Черненький</surname><given-names>И. М.</given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Senior IT engineer, Center for Neural Network Technologies</p></bio><bio xml:lang="ru"><p>ведущий инженер-программист Центра нейросетевых технологий</p></bio><email>chernenkiy_i_m@staff.sechenov.ru</email><xref ref-type="aff" rid="aff1"/></contrib><contrib contrib-type="author"><name-alternatives><name xml:lang="en"><surname>Feldsherov</surname><given-names>M. V.</given-names></name><name xml:lang="ru"><surname>Фельдшеров</surname><given-names>М. В.</given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Head of the Department of Radiology, The Second University Hospital</p></bio><bio xml:lang="ru"><p>заведующий отделением лучевой диагностики, университетская клиническая больница № 2</p></bio><email>feldsherov_m_v@staff.sechenov.ru</email><xref ref-type="aff" rid="aff2"/></contrib><contrib contrib-type="author"><name><surname>He</surname><given-names>Li</given-names></name><address><country country="CN">China</country></address><bio xml:lang="en"><p>Cand.Med.Sci., Department of Radiology</p></bio><bio xml:lang="ru"><p>кандидат медицинских наук, отделение лучевой диагностики</p></bio><email>lihe2018@jlu.edu.cn</email><xref ref-type="aff" rid="aff3"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-2860-276X</contrib-id><name><surname>Kebang</surname><given-names>Hu</given-names></name><address><country country="CN">China</country></address><bio xml:lang="en"><p>Dr. Med. Sci., professor of the Department of Urology</p></bio><bio xml:lang="ru"><p>доктор медицинских наук, профессор, отделение урологии</p></bio><email>hukb@jlu.edu.cn</email><xref ref-type="aff" rid="aff3"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0003-1121-9430</contrib-id><name-alternatives><name xml:lang="en"><surname>Shpot</surname><given-names>E. V.</given-names></name><name xml:lang="ru"><surname>Шпоть</surname><given-names>Е. В.</given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Dr.Med.Sci., professor of the Institute</p></bio><bio xml:lang="ru"><p>доктор медицинских наук, профессор</p></bio><email>shpot_e_v@staff.sechenov.ru</email><xref ref-type="aff" rid="aff1"/></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid">https://orcid.org/0000-0002-5541-2251</contrib-id><name-alternatives><name xml:lang="en"><surname>Glybochko</surname><given-names>P. V.</given-names></name><name xml:lang="ru"><surname>Глыбочко</surname><given-names>П. В.</given-names></name></name-alternatives><address><country country="RU">Russian Federation</country></address><bio xml:lang="en"><p>Dr.Med.Sci., professor, academician of the Russian Academy of Sciences</p></bio><bio xml:lang="ru"><p>доктор медицинских наук, профессор, академик РАН</p></bio><email>rector@staff.sechenov.ru</email><xref ref-type="aff" rid="aff1"/></contrib></contrib-group><aff-alternatives id="aff1"><aff><institution xml:lang="en">Institute of Urology and Reproductive Health, FGBOU VO Pavlov First Saint Petersburg State Medical University of the Ministry of Health of Russia</institution></aff><aff><institution xml:lang="ru">Институт урологии и репродуктивного здоровья человека, Первый МГМУ им. И.М. Сеченова Минздрава России (Сеченовский университет)</institution></aff></aff-alternatives><aff-alternatives id="aff2"><aff><institution xml:lang="en">FGBOU VO Pavlov First Saint Petersburg State Medical University of the Ministry of Health of Russia</institution></aff><aff><institution xml:lang="ru">Первый МГМУ им. И.М. Сеченова Минздрава России (Сеченовский университет)</institution></aff></aff-alternatives><aff-alternatives id="aff3"><aff><institution xml:lang="en">The First Hospital of Jilin University</institution></aff><aff><institution xml:lang="ru">Первая больница Цзилиньского университета</institution></aff></aff-alternatives><pub-date date-type="pub" iso-8601-date="2024-12-10" publication-format="electronic"><day>10</day><month>12</month><year>2024</year></pub-date><issue>6</issue><issue-title xml:lang="en"/><issue-title xml:lang="ru"/><fpage>5</fpage><lpage>11</lpage><history><date date-type="received" iso-8601-date="2025-05-23"><day>23</day><month>05</month><year>2025</year></date><date date-type="accepted" iso-8601-date="2025-05-23"><day>23</day><month>05</month><year>2025</year></date></history><permissions><copyright-statement xml:lang="en">Copyright ©; 2024, Bionika Media</copyright-statement><copyright-statement xml:lang="ru">Copyright ©; 2024, ООО «Бионика Медиа»</copyright-statement><copyright-year>2024</copyright-year><copyright-holder xml:lang="en">Bionika Media</copyright-holder><copyright-holder xml:lang="ru">ООО «Бионика Медиа»</copyright-holder></permissions><self-uri xlink:href="https://journals.eco-vector.com/1728-2985/article/view/680243">https://journals.eco-vector.com/1728-2985/article/view/680243</self-uri><abstract xml:lang="en"><p><bold>Aim</bold><bold>: </bold>to explore the development of a computer-aided diagnosis (CAD) system based on deep learning (DL) neural networks aimed at minimizing human error in PI-RADS grading and supporting medical decision-making.</p> <p>Materials and Methods. This retrospective multicenter study included a cohort of 136 patients, comprising 108 cases of PCa (PI-RADS score 4–5) and 28 cases of benign conditions (PI-RADS score 1–2). The 3D U-Net architecture was applied to process T2-weighted images (T2W), diffusion-weighted images (DWI), and dynamic contrast-enhanced images (DCE). Statistical analysis was conducted using Python libraries to assess diagnostic performance, including sensitivity, specificity, Dice similarity coefficients, and the area under the receiver operating characteristic curve (AUC).</p> <p><bold>Results</bold><bold>.</bold> The DL-CAD system achieved an average accuracy of 78%, sensitivity of 60%, and specificity of 84% for detecting lesions in the prostate. The Dice similarity coefficient for prostate segmentation was 0.71, and the AUC was 81.16%. The system demonstrated high specificity in reducing false-positive results, which, after further optimization, could help minimize unnecessary biopsies and overtreatment.</p> <p><bold>Conclusion</bold><bold>.</bold> The DL-CAD system shows potential in supporting clinical decision-making for patients with clinically significant PCa by improving diagnostic accuracy, particularly in minimizing intra- and inter-observer variability. Despite its high specificity, improvements in sensitivity and segmentation accuracy are needed, which could be achieved by using larger datasets and advanced deep learning techniques. Further multicenter validation is required for accelerated integration of this system into clinical practice.</p></abstract><trans-abstract xml:lang="ru"><p>Цель исследования заключается в разработке системы компьютерной диагностики (computer-aided diagnosis, CAD) с целью минимизации риска человеческих ошибок при градации по системе PI-RADS, что способствует улучшению процесса принятия врачебных решений.</p> <p><bold>Материалы и методы.</bold> Ретроспективное многоцентровое исследование включило 136 пациентов, из которых 108 составили больные раком предстательной железы (РПЖ) (оценка PI-RADS 4–5) и 28 случаев – пациенты с доброкачественными заболеваниями предстательной железы (оценка PI-RADS 1–2). Для анализа изображений была применена архитектура 3D U-Net, обрабатывающая T2-взвешенные изображения (T2W), диффузионно-взвешенные изображения (DWI) и изображения с динамическим контрастированием (DCE). Статистический анализ проводили с использованием библиотек Python для оценки диагностической эффективности, включая чувствительность, специфичность, коэффициенты сходства Dice и площадь под кривой характеристик работы приемника (AUC).</p> <p><bold>Результаты.</bold> Система DL-CAD достигла средней точности 78%, чувствительности 60% и специфичности 84% для выявления очагов поражений в простате. Коэффициент сходства Dice для сегментации предстательной железы составил 0,71, а AUC – 81,16%. Система продемонстрировала высокую специфичность в снижении ложноположительных результатов, что после оптимизации системы сможет минимизировать ненужные биопсии и «избыточное» лечение.</p> <p><bold>Заключение.</bold> Система DL-CAD демонстрирует потенциал в поддержке процесса принятия клинических решений для пациентов с клинически значимым РПЖ за счет повышения точности диагностики, особенно в минимизации вариабельности при интерпретации результатов несколькими рентгенологами (межэкспертная вариабельность). Несмотря на высокую специфичность, требуются дополнительные исследования для улучшения чувствительности и точности сегментации, что может быть достигнуто за счет использования более крупных массивов данных и передовых методов DL. Дальнейшая многоцентровая валидация необходима для более активной интеграции данной системы в клиническую практику.</p></trans-abstract><kwd-group xml:lang="en"><kwd>prostate cancer</kwd><kwd>mpMRI</kwd><kwd>artificial intelligence</kwd><kwd>deep learning</kwd><kwd>Pi-RADS</kwd></kwd-group><kwd-group xml:lang="ru"><kwd>рак простаты</kwd><kwd>мпМРТ</kwd><kwd>искуственный интеллект</kwd><kwd>глубокое обучение</kwd><kwd>Pi-RADS</kwd></kwd-group><funding-group/></article-meta></front><body></body><back><ref-list><ref id="B1"><label>1.</label><mixed-citation>Song JM, Kim CB, Chung HC, Kane RL. Prostate-specific antigen, digital rectal examination and transrectal ultrasonography: a meta-analysis for this diagnostic triad of prostate cancer in symptomatic korean men. Yonsei medical journal. 2005;46(3):414-24. doi: 10.3349/ymj.2005.46.3.414.</mixed-citation></ref><ref id="B2"><label>2.</label><mixed-citation>Moe A, Hayne D. Transrectal ultrasound biopsy of the prostate: does it still have a role in prostate cancer diagnosis? Translational andrology and urology. 2020;9(6):3018-24. doi: 10.21037/tau.2019.09.37.</mixed-citation></ref><ref id="B3"><label>3.</label><mixed-citation>Rezvykh I.A., Rapoport L.M., Belysheva E.S. et al. mpMRI in planning nerve-sparing RARP in patients with localized prostate cancer of low and intermediate risk of progression. Pilot research. Russian Electronic Journal of Radiology. 2020;10(2):140-147. DOI 10.21569/2222-7415-2020-10-2-140-147. Russian (Резвых И.А., Рапопорт Л.М., Белышева Е.С. и др. МПМРТ в планировании нервосберегающей робот-ассистированной радикальной простатэктомии у больных с локализованным раком предстательной железы низкого и промежуточного рисков прогрессии. Пилотное исследование/ Российский электронный журнал лучевой диагностики. 2020;10(2):140-147. DOI 10.21569/2222-7415-2020-10-2-140-147).</mixed-citation></ref><ref id="B4"><label>4.</label><mixed-citation>Rezvykh I.A., Rapoport L.M., Chuvalov L.L. Multiparametric MRI in planning of organ-sparing robot-assisted radical prostatectomy for treatment of localized prostate cancer with low and intermediate risk of progression. Andrology and Genital Surgery. 2021; 22 (2): 35-44. DOI 10.17650/1726-9784-2021-22-2-35-44. Russian (Резвых И.А., Рапопорт Л.М., Чувалов Л.Л. и др. Мультипараметрическая МРТ в планировании анатомосберегающей робот-ассистированной радикальной простатэктомии при локализованном раке предстательной железы низкого и промежуточного риска прогрессирования. Андрология и генитальная хирургия. 2021; 22 (2): 35-44. DOI: 10.17650/1726-9784-2021-22-2-35-44).</mixed-citation></ref><ref id="B5"><label>5.</label><mixed-citation>Benelli A, Vaccaro C, Guzzo S, Nedbal C, Varca V, Gregori A. The role of MRI/TRUS fusion biopsy in the diagnosis of clinically significant prostate cancer. Therapeutic advances in urology. 2020;12:1756287220916613. doi: 10.1177/1756287220916613.</mixed-citation></ref><ref id="B6"><label>6.</label><mixed-citation>Kasivisvanathan V, Rannikko AS, Borghi M, Panebianco V, Mynderse LA, Vaarala MH, et al. MRI-Targeted or Standard Biopsy for Prostate-Cancer Diagnosis. The New England journal of medicine. 2018;378(19):1767-77. doi: 10.1056/NEJMoa1801993.</mixed-citation></ref><ref id="B7"><label>7.</label><mixed-citation>Weinreb JC, Barentsz JO, Choyke PL, Cornud F, Haider MA, Macura KJ, et al. PI-RADS Prostate Imaging - Reporting and Data System: 2015, Version 2. European urology. 2016;69(1):16-40. doi: 10.1016/j.eururo.2015.08.052.</mixed-citation></ref><ref id="B8"><label>8.</label><mixed-citation>van der Leest M, Cornel E, Israël B, Hendriks R, Padhani AR, Hoogenboom M, et al. Head-to-head Comparison of Transrectal Ultrasound-guided Prostate Biopsy Versus Multiparametric Prostate Resonance Imaging with Subsequent Magnetic Resonance-guided Biopsy in Biopsy-naïve Men with Elevated Prostate-specific Antigen: A Large Prospective Multicenter Clinical Study. European urology. 2019;75(4):570-8. doi: 10.1016/j.eururo.2018.11.023.</mixed-citation></ref><ref id="B9"><label>9.</label><mixed-citation>Gupta RT, Mehta KA, Turkbey B, Verma S. PI-RADS: Past, present, and future. Journal of magnetic resonance imaging : JMRI. 2020;52(1):33-53. doi: 10.1002/jmri.26896.</mixed-citation></ref><ref id="B10"><label>10.</label><mixed-citation>Song Y, Zhang YD, Yan X, Liu H, Zhou M, Hu B, et al. Computer-aided diagnosis of prostate cancer using a deep convolutional neural network from multiparametric MRI. Journal of magnetic resonance imaging : JMRI. 2018;48(6):1570-7. doi: 10.1002/jmri.26047.</mixed-citation></ref><ref id="B11"><label>11.</label><mixed-citation>Sanders JW, Mok H, Hanania AN, Venkatesan AM, Tang C, Bruno TL, et al. Computer-aided segmentation on MRI for prostate radiotherapy, Part I: Quantifying human interobserver variability of the prostate and organs at risk and its impact on radiation dosimetry. Radiotherapy and oncology : journal of the European Society for Therapeutic Radiology and Oncology. 2022;169:124-31. doi: 10.1016/j.radonc.2021.12.011.</mixed-citation></ref><ref id="B12"><label>12.</label><mixed-citation>Brembilla G, Dell’Oglio P, Stabile A, Damascelli A, Brunetti L, Ravelli S, et al. Interreader variability in prostate MRI reporting using Prostate Imaging Reporting and Data System version 2.1. European radiology. 2020;30(6):3383-92. doi: 10.1007/s00330-019-06654-2.</mixed-citation></ref><ref id="B13"><label>13.</label><mixed-citation>Smith CP, Harmon SA, Barrett T, Bittencourt LK, Law YM, Shebel H, et al. Intra- and interreader reproducibility of PI-RADSv2: A multireader study. Journal of magnetic resonance imaging : JMRI. 2019;49(6):1694-703. doi: 10.1002/jmri.26555.</mixed-citation></ref><ref id="B14"><label>14.</label><mixed-citation>Barragán-Montero A, Javaid U, Valdés G, Nguyen D, Desbordes P, Macq B, et al. Artificial intelligence and machine learning for medical imaging: A technology review. Physica medica : PM : an international journal devoted to the applications of physics to medicine and biology : official journal of the Italian Association of Biomedical Physics (AIFB). 2021;83:242-56. doi: 10.1016/j.ejmp.2021.04.016.</mixed-citation></ref><ref id="B15"><label>15.</label><mixed-citation>Yang R, Yu Y. Artificial Convolutional Neural Network in Object Detection and Semantic Segmentation for Medical Imaging Analysis. Frontiers in oncology. 2021;11:638182. doi: 10.3389/fonc.2021.638182.</mixed-citation></ref><ref id="B16"><label>16.</label><mixed-citation>He M, Cao Y, Chi C, Yang X, Ramin R, Wang S, et al. Research progress on deep learning in magnetic resonance imaging-based diagnosis and treatment of prostate cancer: a review on the current status and perspectives. Front Oncol. 2023;13:1189370. doi: 10.3389/fonc.2023.1189370.</mixed-citation></ref><ref id="B17"><label>17.</label><mixed-citation>Chen F, Cen S, Palmer S. Application of Prostate Imaging Reporting and Data System Version 2 (PI-RADS v2): Interobserver Agreement and Positive Predictive Value for Localization of Intermediate- and High-Grade Prostate Cancers on Multiparametric Magnetic Resonance Imaging. Academic radiology. 2017;24(9):1101-6. doi: 10.1016/j.acra.2017.03.019.</mixed-citation></ref><ref id="B18"><label>18.</label><mixed-citation>Girometti R, Giannarini G, Greco F, Isola M, Cereser L, Como G, et al. Interreader agreement of PI-RADS v. 2 in assessing prostate cancer with multiparametric MRI: A study using whole-mount histology as the standard of reference. Journal of magnetic resonance imaging : JMRI. 2019;49(2):546-55. doi: 10.1002/jmri.26220.</mixed-citation></ref><ref id="B19"><label>19.</label><mixed-citation>Min X, Li M, Dong D, Feng Z, Zhang P, Ke Z, et al. Multi-parametric MRI-based radiomics signature for discriminating between clinically significant and insignificant prostate cancer: Cross-validation of a machine learning method. Eur J Radiol. 2019;115:16-21. doi: 10.1016/j.ejrad.2019.03.010.</mixed-citation></ref><ref id="B20"><label>20.</label><mixed-citation>Liu Y, Zheng H, Liang Z, Miao Q, Brisbane WG, Marks LS, et al. Textured-Based Deep Learning in Prostate Cancer Classification with 3T Multiparametric MRI: Comparison with PI-RADS-Based Classification. Diagnostics (Basel, Switzerland). 2021;11(10). doi: 10.3390/diagnostics11101785.</mixed-citation></ref><ref id="B21"><label>21.</label><mixed-citation>Aldoj N, Lukas S, Dewey M, Penzkofer T. Semi-automatic classification of prostate cancer on multi-parametric MR imaging using a multi-channel 3D convolutional neural network. Eur Radiol. 2020;30(2):1243-53. doi: 10.1007/s00330-019-06417-z.</mixed-citation></ref><ref id="B22"><label>22.</label><mixed-citation>Saha A, Bosma JS, Twilt JJ, van Ginneken B, Bjartell A, Padhani AR, et al. Artificial intelligence and radiologists in prostate cancer detection on MRI (PI-CAI): an international, paired, non-inferiority, confirmatory study. Lancet Oncol. 2024;25(7):879-87. doi: 10.1016/s1470-2045(24)00220-1.</mixed-citation></ref><ref id="B23"><label>23.</label><mixed-citation>Hoar D, Lee PQ, Guida A, Patterson S, Bowen CV, Merrimen J, et al. Combined Transfer Learning and Test-Time Augmentation Improves Convolutional Neural Network-Based Semantic Segmentation of Prostate Cancer from Multi-Parametric MR Images. Comput Methods Programs Biomed. 2021;210:106375. doi: 10.1016/j.cmpb.2021.106375.</mixed-citation></ref><ref id="B24"><label>24.</label><mixed-citation>Cao R, Mohammadian Bajgiran A, Afshari Mirak S, Shakeri S, Zhong X, Enzmann D, et al. Joint Prostate Cancer Detection and Gleason Score Prediction in mp-MRI via FocalNet. IEEE Trans Med Imaging. 2019;38(11):2496-506. doi: 10.1109/tmi.2019.2901928.</mixed-citation></ref></ref-list></back></article>
