@article{CamargoSchirrmannLandwehretal.2021, author = {Camargo, Tibor de and Schirrmann, Michael and Landwehr, Niels and Dammer, Karl-Heinz and Pflanz, Michael}, title = {Optimized deep learning model as a basis for fast UAV mapping of weed species in winter wheat crops}, series = {Remote sensing / Molecular Diversity Preservation International (MDPI)}, volume = {13}, journal = {Remote sensing / Molecular Diversity Preservation International (MDPI)}, number = {9}, publisher = {MDPI}, address = {Basel}, issn = {2072-4292}, doi = {10.3390/rs13091704}, pages = {19}, year = {2021}, abstract = {Weed maps should be available quickly, reliably, and with high detail to be useful for site-specific management in crop protection and to promote more sustainable agriculture by reducing pesticide use. Here, the optimization of a deep residual convolutional neural network (ResNet-18) for the classification of weed and crop plants in UAV imagery is proposed. The target was to reach sufficient performance on an embedded system by maintaining the same features of the ResNet-18 model as a basis for fast UAV mapping. This would enable online recognition and subsequent mapping of weeds during UAV flying operation. Optimization was achieved mainly by avoiding redundant computations that arise when a classification model is applied on overlapping tiles in a larger input image. The model was trained and tested with imagery obtained from a UAV flight campaign at low altitude over a winter wheat field, and classification was performed on species level with the weed species Matricaria chamomilla L., Papaver rhoeas L., Veronica hederifolia L., and Viola arvensis ssp. arvensis observed in that field. The ResNet-18 model with the optimized image-level prediction pipeline reached a performance of 2.2 frames per second with an NVIDIA Jetson AGX Xavier on the full resolution UAV image, which would amount to about 1.78 ha h(-1) area output for continuous field mapping. The overall accuracy for determining crop, soil, and weed species was 94\%. There were some limitations in the detection of species unknown to the model. When shifting from 16-bit to 32-bit model precision, no improvement in classification accuracy was observed, but a strong decline in speed performance, especially when a higher number of filters was used in the ResNet-18 model. Future work should be directed towards the integration of the mapping process on UAV platforms, guiding UAVs autonomously for mapping purpose, and ensuring the transferability of the models to other crop fields.}, language = {en} } @misc{WitzelNeugartRuppeletal.2015, author = {Witzel, Katja and Neugart, Susanne and Ruppel, Silke and Schreiner, Monika and Wiesner, Melanie and Baldermann, Susanne}, title = {Recent progress in the use of 'omics technologies in brassicaceous vegetables}, series = {Frontiers in plant science}, volume = {6}, journal = {Frontiers in plant science}, publisher = {Frontiers Research Foundation}, address = {Lausanne}, issn = {1664-462X}, doi = {10.3389/fpls.2015.00244}, pages = {14}, year = {2015}, abstract = {Continuing advances in 'omics methodologies and instrumentation is enhancing the understanding of how plants cope with the dynamic nature of their growing environment. 'Omics platforms have been only recently extended to cover horticultural crop species. Many of the most widely cultivated vegetable crops belong to the genus Brassica: these include plants grown for their root (turnip, rutabaga/swede), their swollen stem base (kohlrabi), their leaves (cabbage, kale, pak choi) and their inflorescence (cauliflower, broccoli). Characterization at the genome, transcript, protein and metabolite levels has illustrated the complexity of the cellular response to a whole series of environmental stresses, including nutrient deficiency, pathogen attack, heavy metal toxicity, cold acclimation, and excessive and sub optimal irradiation. This review covers recent applications of omics technologies to the brassicaceous vegetables, and discusses future scenarios in achieving improvements in crop end-use quality.}, language = {en} }