@article{DenkerKuckeinVermaetal.2018, author = {Denker, Carsten and Kuckein, Christoph and Verma, Meetu and Manrique Gonzalez, Sergio Javier Gonzalez and Diercke, Andrea and Enke, Harry and Klar, Jochen and Balthasar, Horst and Louis, Rohan E. and Dineva, Ekaterina}, title = {High-cadence Imaging and Imaging Spectroscopy at the GREGOR Solar Telescope-A Collaborative Research Environment for High-resolution Solar Physics}, series = {The astrophysical journal : an international review of spectroscopy and astronomical physics ; Supplement series}, volume = {236}, journal = {The astrophysical journal : an international review of spectroscopy and astronomical physics ; Supplement series}, number = {1}, publisher = {IOP Publ. Ltd.}, address = {Bristol}, issn = {0067-0049}, doi = {10.3847/1538-4365/aab773}, pages = {12}, year = {2018}, abstract = {In high-resolution solar physics, the volume and complexity of photometric, spectroscopic, and polarimetric ground-based data significantly increased in the last decade, reaching data acquisition rates of terabytes per hour. This is driven by the desire to capture fast processes on the Sun and the necessity for short exposure times "freezing" the atmospheric seeing, thus enabling ex post facto image restoration. Consequently, large-format and high-cadence detectors are nowadays used in solar observations to facilitate image restoration. Based on our experience during the "early science" phase with the 1.5 m GREGOR solar telescope (2014-2015) and the subsequent transition to routine observations in 2016, we describe data collection and data management tailored toward image restoration and imaging spectroscopy. We outline our approaches regarding data processing, analysis, and archiving for two of GREGOR's post-focus instruments (see http://gregor.aip.de), i.e., the GREGOR Fabry-P{\´e}rot Interferometer (GFPI) and the newly installed High-Resolution Fast Imager (HiFI). The heterogeneous and complex nature of multidimensional data arising from high-resolution solar observations provides an intriguing but also a challenging example for "big data" in astronomy. The big data challenge has two aspects: (1) establishing a workflow for publishing the data for the whole community and beyond and (2) creating a collaborative research environment (CRE), where computationally intense data and postprocessing tools are colocated and collaborative work is enabled for scientists of multiple institutes. This requires either collaboration with a data center or frameworks and databases capable of dealing with huge data sets based on virtual observatory (VO) and other community standards and procedures.}, language = {en} }