@article{MTMT:31612924, title = {AnnotatorJ: an ImageJ plugin to ease hand annotation of cellular compartments}, url = {https://m2.mtmt.hu/api/publication/31612924}, author = {Hollandi, Réka and Diósdi, Ákos and Hollandi, Gábor and Moshkov, Nikita and Horváth, Péter}, doi = {10.1091/mbc.E20-02-0156}, journal-iso = {MOL BIOL CELL}, journal = {MOLECULAR BIOLOGY OF THE CELL}, volume = {31}, unique-id = {31612924}, issn = {1059-1524}, abstract = {AnnotatorJ combines single-cell identification with deep learning (DL) and manual annotation. Cellular analysis quality depends on accurate and reliable detection and segmentation of cells so that the subsequent steps of analyses, for example, expression measurements, may be carried out precisely and without bias. DL has recently become a popular way of segmenting cells, performing unimaginably better than conventional methods. However, such DL applications may be trained on a large amount of annotated data to be able to match the highest expectations. High-quality annotations are unfortunately expensive as they require field experts to create them, and often cannot be shared outside the lab due to medical regulations. We propose AnnotatorJ, an ImageJ plugin for the semiautomatic annotation of cells (or generally, objects of interest) on (not only) microscopy images in 2D that helps find the true contour of individual objects by applying U-Net-based presegmentation. The manual labor of hand annotating cells can be significantly accelerated by using our tool. Thus, it enables users to create such datasets that could potentially increase the accuracy of state-of-the-art solutions, DL or otherwise, when used as training data.}, year = {2020}, eissn = {1939-4586}, pages = {2179-2186}, orcid-numbers = {Diósdi, Ákos/0000-0002-3118-5576; Moshkov, Nikita/0000-0002-5823-4884} } @article{MTMT:31595768, title = {Test-time augmentation for deep learning-based cell segmentation on microscopy images}, url = {https://m2.mtmt.hu/api/publication/31595768}, author = {Moshkov, Nikita and Mathe, Botond and Kertész-Farkas, Attila and Hollandi, Réka and Horváth, Péter}, doi = {10.1038/s41598-020-61808-3}, journal-iso = {SCI REP}, journal = {SCIENTIFIC REPORTS}, volume = {10}, unique-id = {31595768}, issn = {2045-2322}, abstract = {Recent advancements in deep learning have revolutionized the way microscopy images of cells are processed. Deep learning network architectures have a large number of parameters, thus, in order to reach high accuracy, they require a massive amount of annotated data. A common way of improving accuracy builds on the artificial increase of the training set by using different augmentation techniques. A less common way relies on test-time augmentation (TTA) which yields transformed versions of the image for prediction and the results are merged. In this paper we describe how we have incorporated the test-time argumentation prediction method into two major segmentation approaches utilized in the single-cell analysis of microscopy images. These approaches are semantic segmentation based on the U-Net, and instance segmentation based on the Mask R-CNN models. Our findings show that even if only simple test-time augmentations (such as rotation or flipping and proper merging methods) are applied, TTA can significantly improve prediction accuracy. We have utilized images of tissue and cell cultures from the Data Science Bowl (DSB) 2018 nuclei segmentation competition and other sources. Additionally, boosting the highest-scoring method of the DSB with TTA, we could further improve prediction accuracy, and our method has reached an ever-best score at the DSB.}, year = {2020}, eissn = {2045-2322} } @article{MTMT:31360966, title = {Software tools for 3D nuclei segmentation and quantitative analysis in multicellular aggregates}, url = {https://m2.mtmt.hu/api/publication/31360966}, author = {Piccinini, Filippo and Balassa, Tamás and Carbonaro, Antonella and Diósdi, Ákos and Tóth, Tímea and Moshkov, Nikita and Tasnádi, Ervin Áron and Horváth, Péter}, doi = {10.1016/j.csbj.2020.05.022}, journal-iso = {CSBJ}, journal = {COMPUTATIONAL AND STRUCTURAL BIOTECHNOLOGY JOURNAL}, volume = {18}, unique-id = {31360966}, issn = {2001-0370}, year = {2020}, eissn = {2001-0370}, pages = {1287-1300} } @article{MTMT:31334623, title = {nucleAIzer: A Parameter-free Deep Learning Framework for Nucleus Segmentation Using Image Style Transfer}, url = {https://m2.mtmt.hu/api/publication/31334623}, author = {Hollandi, Réka and Szkalisity, Ábel and Tóth, Tímea and Tasnádi, Ervin Áron and Molnár, Csaba and Mathe, Botond and Grexa, István and Molnár, József and Bálind, Árpád and Gorbe, Mate and Kovács, Mária and Migh, Ede and Goodman, Allen and Balassa, Tamás and Koós, Krisztián and Wang, Wenyu and Caicedo, Juan Carlos and Bara, Norbert and Kovács, Ferenc and Paavolainen, Lassi and Danka, Tivadar and Kriston, András and Carpenter, Anne Elizabeth and Smith, Kevin and Horváth, Péter}, doi = {10.1016/j.cels.2020.04.003}, journal-iso = {CELL SYST}, journal = {CELL SYSTEMS}, volume = {10}, unique-id = {31334623}, issn = {2405-4712}, abstract = {Single-cell segmentation is typically a crucial task of image-based cellular analysis. We present nucleAIzer, a deep-learning approach aiming toward a truly general method for localizing 2D cell nuclei across a diverse range of assays and light microscopy modalities. We outperform the 739 methods submitted to the 2018 Data Science Bowl on images representing a variety of realistic conditions, some of which were not represented in the training data. The key to our approach is that during training nucleAIzer automatically adapts its nucleus-style model to unseen and unlabeled data using image style transfer to automatically generate augmented training samples. This allows the model to recognize nuclei in new and different experiments efficiently without requiring expert annotations, making deep learning for nucleus segmentation fairly simple and labor free for most biological light microscopy experiments. It can also be used online, integrated into CellProfiler and freely downloaded at www.nucleaizer.org. A record of this paper's transparent peer review process is included in the Supplemental Information.}, year = {2020}, eissn = {2405-4720}, pages = {453-458}, orcid-numbers = {Molnár, Csaba/0000-0002-6124-1209; Molnár, József/0000-0002-9185-9376} } @article{MTMT:27598077, title = {Phenotypic Image Analysis Software Tools for Exploring and Understanding Big Image Data from Cell-Based Assays}, url = {https://m2.mtmt.hu/api/publication/27598077}, author = {Smith, Kevin and Piccinini, Filippo and Balassa, Tamás and Koós, Krisztián and Danka, Tivadar and Azizpour, Hossein and Horváth, Péter}, doi = {10.1016/j.cels.2018.06.001}, journal-iso = {CELL SYST}, journal = {CELL SYSTEMS}, volume = {6}, unique-id = {27598077}, issn = {2405-4712}, year = {2018}, eissn = {2405-4720}, pages = {636-653} } @article{MTMT:3414411, title = {A deep convolutional neural network approach for astrocyte detection}, url = {https://m2.mtmt.hu/api/publication/3414411}, author = {Suleymanova, I and Balassa, Tamás and Tripathi, S and Molnár, Csaba and Saarma, M and Sidorova, Y and Horváth, Péter}, doi = {10.1038/s41598-018-31284-x}, journal-iso = {SCI REP}, journal = {SCIENTIFIC REPORTS}, volume = {8}, unique-id = {3414411}, issn = {2045-2322}, abstract = {Astrocytes are involved in various brain pathologies including trauma, stroke, neurodegenerative disorders such as Alzheimer's and Parkinson's diseases, or chronic pain. Determining cell density in a complex tissue environment in microscopy images and elucidating the temporal characteristics of morphological and biochemical changes is essential to understand the role of astrocytes in physiological and pathological conditions. Nowadays, manual stereological cell counting or semi-automatic segmentation techniques are widely used for the quantitative analysis of microscopy images. Detecting astrocytes automatically is a highly challenging computational task, for which we currently lack efficient image analysis tools. We have developed a fast and fully automated software that assesses the number of astrocytes using Deep Convolutional Neural Networks (DCNN). The method highly outperforms state-of-the-art image analysis and machine learning methods and provides precision comparable to those of human experts. Additionally, the runtime of cell detection is significantly less than that of other three computational methods analysed, and it is faster than human observers by orders of magnitude. We applied our DCNN-based method to examine the number of astrocytes in different brain regions of rats with opioid-induced hyperalgesia/tolerance (OIH/OIT), as morphine tolerance is believed to activate glia. We have demonstrated a strong positive correlation between manual and DCNN-based quantification of astrocytes in rat brain.}, keywords = {ACTIVATION; PAIN; MICROSCOPY; cell detection; IMAGES; MORPHINE-TOLERANCE}, year = {2018}, eissn = {2045-2322}, orcid-numbers = {Molnár, Csaba/0000-0002-6124-1209} } @article{MTMT:3396221, title = {Environmental properties of cells improve machine learning-based phenotype recognition accuracy.}, url = {https://m2.mtmt.hu/api/publication/3396221}, author = {Tóth, Tímea and Balassa, Tamás and Bara, Norbert and Kovács, Ferenc and Kriston, András and Molnár, Csaba and Haracska, Lajos and Sükösd, Farkas and Horváth, Péter}, doi = {10.1038/s41598-018-28482-y}, journal-iso = {SCI REP}, journal = {SCIENTIFIC REPORTS}, volume = {8}, unique-id = {3396221}, issn = {2045-2322}, abstract = {To answer major questions of cell biology, it is often essential to understand the complex phenotypic composition of cellular systems precisely. Modern automated microscopes produce vast amounts of images routinely, making manual analysis nearly impossible. Due to their efficiency, machine learning-based analysis software have become essential tools to perform single-cell-level phenotypic analysis of large imaging datasets. However, an important limitation of such methods is that they do not use the information gained from the cellular micro- and macroenvironment: the algorithmic decision is based solely on the local properties of the cell of interest. Here, we present how various features from the surrounding environment contribute to identifying a cell and how such additional information can improve single-cell-level phenotypic image analysis. The proposed methodology was tested for different sizes of Euclidean and nearest neighbour-based cellular environments both on tissue sections and cell cultures. Our experimental data verify that the surrounding area of a cell largely determines its entity. This effect was found to be especially strong for established tissues, while it was somewhat weaker in the case of cell cultures. Our analysis shows that combining local cellular features with the properties of the cell's neighbourhood significantly improves the accuracy of machine learning-based phenotyping.}, year = {2018}, eissn = {2045-2322}, orcid-numbers = {Molnár, Csaba/0000-0002-6124-1209} } @article{MTMT:3325350, title = {Hsp70-associated chaperones have a critical role in buffering protein production costs.}, url = {https://m2.mtmt.hu/api/publication/3325350}, author = {Farkas, Zoltán and Kalapis, Dorottya and Bódi, Zoltán and Szamecz, Béla and Daraba, Andreea and Almási, Karola and Kovács, Károly and Boross, Gábor and Pál, Ferenc and Horváth, Péter and Balassa, Tamás and Molnár, Csaba and Pettkó-Szandtner, Aladár and Klement, Éva and Rutkai, E and Szvetnik, Attila and Papp, Balázs and Pál, Csaba}, doi = {10.7554/eLife.29845}, journal-iso = {ELIFE}, journal = {ELIFE}, volume = {7}, unique-id = {3325350}, issn = {2050-084X}, abstract = {Proteins are necessary for cellular growth. Concurrently, however, protein production has high energetic demands associated with transcription and translation. Here, we propose that activity of molecular chaperones shape protein burden, that is the fitness costs associated with expression of unneeded proteins. To test this hypothesis, we performed a genome-wide genetic interaction screen in baker's yeast. Impairment of transcription, translation, and protein folding rendered cells hypersensitive to protein burden. Specifically, deletion of specific regulators of the Hsp70-associated chaperone network increased protein burden. In agreement with expectation, temperature stress, increased mistranslation and a chemical misfolding agent all substantially enhanced protein burden. Finally, unneeded protein perturbed interactions between key components of the Hsp70-Hsp90 network involved in folding of native proteins. We conclude that specific chaperones contribute to protein burden. Our work indicates that by minimizing the damaging impact of gratuitous protein overproduction, chaperones enable tolerance to massive changes in genomic expression.}, year = {2018}, eissn = {2050-084X}, orcid-numbers = {Boross, Gábor/0000-0002-7208-5678; Pál, Ferenc/0000-0002-0985-8578; Molnár, Csaba/0000-0002-6124-1209} } @article{MTMT:3318793, title = {Intelligent image-based in situ single-cell isolation}, url = {https://m2.mtmt.hu/api/publication/3318793}, author = {Braskó, Csilla and Smith, K and Molnár, Csaba and Faragó, Nóra and Hegedűs, Lili and Bálind, Árpád and Balassa, Tamás and Szkalisity, Ábel and Sükösd, Farkas and Kocsis, Ágnes Katalin and Bálint, Balázs and Paavolainen, L and Enyedi, Márton Zsolt and Nagy, István and Puskás, László and Haracska, Lajos and Tamás, Gábor and Horváth, Péter}, doi = {10.1038/s41467-017-02628-4}, journal-iso = {NAT COMMUN}, journal = {NATURE COMMUNICATIONS}, volume = {9}, unique-id = {3318793}, issn = {2041-1723}, abstract = {Quantifying heterogeneities within cell populations is important for many fields including cancer research and neurobiology; however, techniques to isolate individual cells are limited. Here, we describe a high-throughput, non-disruptive, and cost-effective isolation method that is capable of capturing individually targeted cells using widely available techniques. Using high-resolution microscopy, laser microcapture microscopy, image analysis, and machine learning, our technology enables scalable molecular genetic analysis of single cells, targetable by morphology or location within the sample.}, year = {2018}, eissn = {2041-1723}, orcid-numbers = {Molnár, Csaba/0000-0002-6124-1209; Tamás, Gábor/0000-0002-7905-6001} } @article{MTMT:3247398, title = {Advanced Cell Classifier: User-Friendly Machine-Learning-Based Software for Discovering Phenotypes in High-Content Imaging Data}, url = {https://m2.mtmt.hu/api/publication/3247398}, author = {Piccinini, F and Balassa, Tamás and Szkalisity, Ábel and Molnár, Csaba and Paavolainen, L and Kujala, K and Buzás, Krisztina and Sarazova, M and Pietiainen, V and Kutay, U and Smith, K and Horváth, Péter}, doi = {10.1016/j.cels.2017.05.012}, journal-iso = {CELL SYST}, journal = {CELL SYSTEMS}, volume = {4}, unique-id = {3247398}, issn = {2405-4712}, abstract = {High-content, imaging-based screens now routinely generate data on a scale that precludes manual verification and interrogation. Software applying machine learning has become an essential tool to automate analysis, but these methods require annotated examples to learn from. Efficiently exploring large datasets to find relevant examples remains a challenging bottleneck. Here, we present Advanced Cell Classifier (ACC), a graphical software package for phenotypic analysis that addresses these difficulties. ACC applies machine-learning and image-analysis methods to high-content data generated by large-scale, cell-based experiments. It features methods to mine microscopic image data, discover new phenotypes, and improve recognition performance. We demonstrate that these features substantially expedite the training process, successfully uncover rare phenotypes, and improve the accuracy of the analysis. ACC is extensively documented, designed to be user-friendly for researchers without machine-learning expertise, and distributed as a free open-source tool at www.cellclassifier.org.}, year = {2017}, eissn = {2405-4720}, pages = {651-655}, orcid-numbers = {Molnár, Csaba/0000-0002-6124-1209; Buzás, Krisztina/0000-0001-8933-2033} }