@article{IjiriInagakiKuboetal.2018, author = {Ijiri, Akira and Inagaki, Fumio and Kubo, Yusuke and Adhikari, Rishi Ram and Hattori, Shohei and Hoshino, Tatsuhiko and Imachi, Hiroyuki and Kawagucci, Shinsuke and Morono, Yuki and Ohtomo, Yoko and Ono, Shuhei and Sakai, Sanae and Takai, Ken and Toki, Tomohiro and Wang, David T. and Yoshinaga, Marcos Y. and Arnold, Gail L. and Ashi, Juichiro and Case, David H. and Feseker, Tomas and Hinrichs, Kai-Uwe and Ikegawa, Yojiro and Ikehara, Minoru and Kallmeyer, Jens and Kumagai, Hidenori and Lever, Mark Alexander and Morita, Sumito and Nakamura, Ko-ichi and Nakamura, Yuki and Nishizawa, Manabu and Orphan, Victoria J. and Roy, Hans and Schmidt, Frauke and Tani, Atsushi and Tanikawa, Wataru and Terada, Takeshi and Tomaru, Hitoshi and Tsuji, Takeshi and Tsunogai, Urumu and Yamaguchi, Yasuhiko T. and Yoshida, Naohiro}, title = {Deep-biosphere methane production stimulated by geofluids in the Nankai accretionary complex}, series = {Science Advances}, volume = {4}, journal = {Science Advances}, number = {6}, publisher = {American Assoc. for the Advancement of Science}, address = {Washington}, issn = {2375-2548}, doi = {10.1126/sciadv.aao4631}, pages = {15}, year = {2018}, language = {en} } @misc{IjiriInagakiKuboetal.2018, author = {Ijiri, Akira and Inagaki, Fumio and Kubo, Yusuke and Adhikari, Rishi Ram and Hattori, Shohei and Hoshino, Tatsuhiko and Imachi, Hiroyuki and Kawagucci, Shinsuke and Morono, Yuki and Ohtomo, Yoko and Ono, Shuhei and Sakai, Sanae and Takai, Ken and Toki, Tomohiro and Wang, David T. and Yoshinaga, Marcos Y. and Arnold, Gail L. and Ashi, Juichiro and Case, David H. and Feseker, Tomas and Hinrichs, Kai-Uwe and Ikegawa, Yojiro and Ikehara, Minoru and Kallmeyer, Jens and Kumagai, Hidenori and Lever, Mark Alexander and Morita, Sumito and Nakamura, Ko-ichi and Nakamura, Yuki and Nishizawa, Manabu and Orphan, Victoria J. and R{\o}y, Hans and Schmidt, Frauke and Tani, Atsushi and Tanikawa, Wataru and Terada, Takeshi and Tomaru, Hitoshi and Tsuji, Takeshi and Tsunogai, Urumu and Yamaguchi, Yasuhiko T. and Yoshida, Naohiro}, title = {Deep-biosphere methane production stimulated by geofluids in the Nankai accretionary complex}, series = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, journal = {Postprints der Universit{\"a}t Potsdam Mathematisch-Naturwissenschaftliche Reihe}, number = {802}, issn = {1866-8372}, doi = {10.25932/publishup-42700}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus4-427002}, pages = {16}, year = {2018}, abstract = {Microbial life inhabiting subseafloor sediments plays an important role in Earth's carbon cycle. However, the impact of geodynamic processes on the distributions and carbon-cycling activities of subseafloor life remains poorly constrained. We explore a submarine mud volcano of the Nankai accretionary complex by drilling down to 200 m below the summit. Stable isotopic compositions of water and carbon compounds, including clumped methane isotopologues, suggest that ~90\% of methane is microbially produced at 16° to 30°C and 300 to 900 m below seafloor, corresponding to the basin bottom, where fluids in the accretionary prism are supplied via megasplay faults. Radiotracer experiments showed that relatively small microbial populations in deep mud volcano sediments (10 2 to 10 3 cells cm -3 ) include highly active hydrogenotrophic methanogens and acetogens. Our findings indicate that subduction-associated fluid migration has stimulated microbial activity in the mud reservoir and that mud volcanoes may contribute more substantially to the methane budget than previously estimated.}, language = {en} } @article{RezaeiNaeppiLippertetal.2020, author = {Rezaei, Mina and N{\"a}ppi, Janne J. and Lippert, Christoph and Meinel, Christoph and Yoshida, Hiroyuki}, title = {Generative multi-adversarial network for striking the right balance in abdominal image segmentation}, series = {International journal of computer assisted radiology and surgery}, volume = {15}, journal = {International journal of computer assisted radiology and surgery}, number = {11}, publisher = {Springer}, address = {Berlin}, issn = {1861-6410}, doi = {10.1007/s11548-020-02254-4}, pages = {1847 -- 1858}, year = {2020}, abstract = {Purpose: The identification of abnormalities that are relatively rare within otherwise normal anatomy is a major challenge for deep learning in the semantic segmentation of medical images. The small number of samples of the minority classes in the training data makes the learning of optimal classification challenging, while the more frequently occurring samples of the majority class hamper the generalization of the classification boundary between infrequently occurring target objects and classes. In this paper, we developed a novel generative multi-adversarial network, called Ensemble-GAN, for mitigating this class imbalance problem in the semantic segmentation of abdominal images. Method: The Ensemble-GAN framework is composed of a single-generator and a multi-discriminator variant for handling the class imbalance problem to provide a better generalization than existing approaches. The ensemble model aggregates the estimates of multiple models by training from different initializations and losses from various subsets of the training data. The single generator network analyzes the input image as a condition to predict a corresponding semantic segmentation image by use of feedback from the ensemble of discriminator networks. To evaluate the framework, we trained our framework on two public datasets, with different imbalance ratios and imaging modalities: the Chaos 2019 and the LiTS 2017. Result: In terms of the F1 score, the accuracies of the semantic segmentation of healthy spleen, liver, and left and right kidneys were 0.93, 0.96, 0.90 and 0.94, respectively. The overall F1 scores for simultaneous segmentation of the lesions and liver were 0.83 and 0.94, respectively. Conclusion: The proposed Ensemble-GAN framework demonstrated outstanding performance in the semantic segmentation of medical images in comparison with other approaches on popular abdominal imaging benchmarks. The Ensemble-GAN has the potential to segment abdominal images more accurately than human experts.}, language = {en} }