@article{SaparinThomsenProhaskaetal.2005, author = {Saparin, P. I. and Thomsen, J. S. and Prohaska, Steffen and Zaikin, Alexei and Kurths, J{\"u}rgen and Hege, H. C. and Gowin, W.}, title = {Quantification of spatial structure of human proximal tibial bone biopsies using 3D measures of complexity}, issn = {0094-5765}, year = {2005}, abstract = {Changes in trabecular bone composition during development of osteoporosis are used as a model for bone loss in microgravity conditions during a space flight. Symbolic dynamics and measures of complexity are proposed and applied to assess quantitatively the structural composition of bone tissue from 3D data sets of human tibia bone biopsies acquired by a micro-CT scanner. In order to justify the newly proposed approach, the measures of complexity of the bone architecture were compared with the results of traditional 2D bone histomorphometry. The proposed technique is able to quantify the structural loss of the bone tissue and may help to diagnose and to monitor changes in bone structure of patients on Earth as well as of the space-flying personnel. © 2005 Elsevier Ltd. All rights reserved}, language = {en} } @article{ZaikinKurthsSaparinetal.2005, author = {Zaikin, Alexei and Kurths, J{\"u}rgen and Saparin, Peter and Gowin, W. and Prohaska, Steffen}, title = {Modeling bone resorption in 2D CT and 3D mu CT images}, issn = {0218-1274}, year = {2005}, abstract = {We study several algorithms to simulate bone mass loss in two-dimensional and three-dimensional computed tomography bone images. The aim is to extrapolate and predict the bone loss, to provide test objects for newly developed structural measures, and to understand the physical mechanisms behind the bone alteration. Our bone model approach differs from those already reported in the literature by two features. First, we work with original bone images, obtained by computed tomography (CT); second, we use structural measures of complexity to evaluate bone resorption and to compare it with the data provided by CT. This gives us the possibility to test algorithms of bone resorption by comparing their results with experimentally found dependencies of structural measures of complexity, as well as to show efficiency of the complexity measures in the analysis of bone models. For two-dimensional images we suggest two algorithms, a threshold algorithm and a virtual slicing algorithm. The threshold algorithm simulates bone resorption on a boundary between bone and marrow, representing an activity of osteoclasts. The virtual slicing algorithm uses a distribution of the bone material between several virtually created slices to achieve statistically correct results, when the bone-marrow transition is not clearly defined. These algorithms have been tested for original CT 10 mm thick vertebral slices and for simulated 10 mm thick slices constructed from ten I mm thick slices. For three-dimensional data, we suggest a variation of the threshold algorithm and apply it to bone images. The results of modeling have been compared with CT images using structural measures of complexity in two- and three-dimensions. This comparison has confirmed credibility of a virtual slicing modeling algorithm for two-dimensional data and a threshold algorithm for three-dimensional data}, language = {en} } @phdthesis{Prohaska2007, author = {Prohaska, Steffen}, title = {Skeleton-based visualization of massive voxel objects with network-like architecture}, url = {http://nbn-resolving.de/urn:nbn:de:kobv:517-opus-14888}, school = {Universit{\"a}t Potsdam}, year = {2007}, abstract = {This work introduces novel internal and external memory algorithms for computing voxel skeletons of massive voxel objects with complex network-like architecture and for converting these voxel skeletons to piecewise linear geometry, that is triangle meshes and piecewise straight lines. The presented techniques help to tackle the challenge of visualizing and analyzing 3d images of increasing size and complexity, which are becoming more and more important in, for example, biological and medical research. Section 2.3.1 contributes to the theoretical foundations of thinning algorithms with a discussion of homotopic thinning in the grid cell model. The grid cell model explicitly represents a cell complex built of faces, edges, and vertices shared between voxels. A characterization of pairs of cells to be deleted is much simpler than characterizations of simple voxels were before. The grid cell model resolves topologically unclear voxel configurations at junctions and locked voxel configurations causing, for example, interior voxels in sets of non-simple voxels. A general conclusion is that the grid cell model is superior to indecomposable voxels for algorithms that need detailed control of topology. Section 2.3.2 introduces a noise-insensitive measure based on the geodesic distance along the boundary to compute two-dimensional skeletons. The measure is able to retain thin object structures if they are geometrically important while ignoring noise on the object's boundary. This combination of properties is not known of other measures. The measure is also used to guide erosion in a thinning process from the boundary towards lines centered within plate-like structures. Geodesic distance based quantities seem to be well suited to robustly identify one- and two-dimensional skeletons. Chapter 6 applies the method to visualization of bone micro-architecture. Chapter 3 describes a novel geometry generation scheme for representing voxel skeletons, which retracts voxel skeletons to piecewise linear geometry per dual cube. The generated triangle meshes and graphs provide a link to geometry processing and efficient rendering of voxel skeletons. The scheme creates non-closed surfaces with boundaries, which contain fewer triangles than a representation of voxel skeletons using closed surfaces like small cubes or iso-surfaces. A conclusion is that thinking specifically about voxel skeleton configurations instead of generic voxel configurations helps to deal with the topological implications. The geometry generation is one foundation of the applications presented in Chapter 6. Chapter 5 presents a novel external memory algorithm for distance ordered homotopic thinning. The presented method extends known algorithms for computing chamfer distance transformations and thinning to execute I/O-efficiently when input is larger than the available main memory. The applied block-wise decomposition schemes are quite simple. Yet it was necessary to carefully analyze effects of block boundaries to devise globally correct external memory variants of known algorithms. In general, doing so is superior to naive block-wise processing ignoring boundary effects. Chapter 6 applies the algorithms in a novel method based on confocal microscopy for quantitative study of micro-vascular networks in the field of microcirculation.}, language = {en} }