@article{BrellSeglGuanteretal.2017, author = {Brell, Maximilian and Segl, Karl and Guanter, Luis and Bookhagen, Bodo}, title = {Hyperspectral and Lidar Intensity Data Fusion: A Framework for the Rigorous Correction of Illumination, Anisotropic Effects, and Cross Calibration}, series = {IEEE transactions on geoscience and remote sensing}, volume = {55}, journal = {IEEE transactions on geoscience and remote sensing}, publisher = {Inst. of Electr. and Electronics Engineers}, address = {Piscataway}, issn = {0196-2892}, doi = {10.1109/TGRS.2017.2654516}, pages = {2799 -- 2810}, year = {2017}, abstract = {The fusion of hyperspectral imaging (HSI) sensor and airborne lidar scanner (ALS) data provides promising potential for applications in environmental sciences. Standard fusion approaches use reflectance information from the HSI and distance measurements from the ALS to increase data dimen-sionality and geometric accuracy. However, the potential for data fusion based on the respective intensity information of the complementary active and passive sensor systems is high and not yet fully exploited. Here, an approach for the rigorous illumination correction of HSI data, based on the radiometric cross-calibrated return intensity information of ALS data, is presented. The cross calibration utilizes a ray tracing-based fusion of both sensor measurements by intersecting their particular beam shapes. The developed method is capable of compensating for the drawbacks of passive HSI systems, such as cast and cloud shadowing effects, illumination changes over time, across track illumination, and partly anisotropy effects. During processing, spatial and temporal differences in illumination patterns are detected and corrected over the entire HSI wavelength domain. The improvement in the classification accuracy of urban and vegetation surfaces demonstrates the benefit and potential of the proposed HSI illumination correction. The presented approach is the first step toward the rigorous in-flight fusion of passive and active system characteristics, enabling new capabilities for a variety of applications.}, language = {en} } @article{BrellRogassSegletal.2016, author = {Brell, Maximilian and Rogass, Christian and Segl, Karl and Bookhagen, Bodo and Guanter, Luis}, title = {Improving Sensor Fusion: A Parametric Method for the Geometric Coalignment of Airborne Hyperspectral and Lidar Data}, series = {IEEE transactions on geoscience and remote sensing}, volume = {54}, journal = {IEEE transactions on geoscience and remote sensing}, publisher = {Inst. of Electr. and Electronics Engineers}, address = {Piscataway}, issn = {0196-2892}, doi = {10.1109/TGRS.2016.2518930}, pages = {3460 -- 3474}, year = {2016}, abstract = {Synergistic applications based on integrated hyperspectral and lidar data are receiving a growing interest from the remote-sensing community. A prerequisite for the optimum sensor fusion of hyperspectral and lidar data is an accurate geometric coalignment. The simple unadjusted integration of lidar elevation and hyperspectral reflectance causes a substantial loss of information and does not exploit the full potential of both sensors. This paper presents a novel approach for the geometric coalignment of hyperspectral and lidar airborne data, based on their respective adopted return intensity information. The complete approach incorporates ray tracing and subpixel procedures in order to overcome grid inherent discretization. It aims at the correction of extrinsic and intrinsic (camera resectioning) parameters of the hyperspectral sensor. In additional to a tie-point-based coregistration, we introduce a ray-tracing-based back projection of the lidar intensities for area-based cost aggregation. The approach consists of three processing steps. First is a coarse automatic tie-point-based boresight alignment. The second step coregisters the hyperspectral data to the lidar intensities. Third is a parametric coalignment refinement with an area-based cost aggregation. This hybrid approach of combining tie-point features and area-based cost aggregation methods for the parametric coregistration of hyperspectral intensity values to their corresponding lidar intensities results in a root-mean-square error of 1/3 pixel. It indicates that a highly integrated and stringent combination of different coalignment methods leads to an improvement of the multisensor coregistration.}, language = {en} }