
{  
   "types" : {
      "Bookmark" : {
         "pluralLabel" : "Bookmarks"
      },
      "Publication" : {
         "pluralLabel" : "Publications"
      },
      "GoldStandardPublication" : {
         "pluralLabel" : "GoldStandardPublications"
      },
      "GoldStandardBookmark" : {
         "pluralLabel" : "GoldStandardBookmarks"
      },
      "Tag" : {
         "pluralLabel" : "Tags"
      },
      "User" : {
         "pluralLabel" : "Users"
      },
      "Group" : {
         "pluralLabel" : "Groups"
      },
      "Sphere" : {
         "pluralLabel" : "Spheres"
      }
   },
   
   "properties" : {
      "count" : {
         "valueType" : "number"
      },
      "date" : {
         "valueType" : "date"
      },
      "changeDate" : {
         "valueType" : "date"
      },
      "url" : {
         "valueType" : "url"
      },
      "id" : {
         "valueType" : "url"
      },
      "tags" : {
         "valueType" : "item"
      },
      "user" : {
         "valueType" : "item"
      }      
   },
   
   "items" : [
   	  
      {
         "type" : "Publication",
         "id"   : "https://puma.ub.uni-stuttgart.de/bibtex/26809bfc73b7cafc5a109578f775d3a84/mgeiger",         
         "tags" : [
            "(computer","(mathematics),","Computational","GPU,","Ptex,","Rendering","Splines","Terrain,","Three-dimensional","Tools,","algorithms,","common","complete","computer","curve-based","curves,","data","data,","diffusion,","diffusion-based","displays,","extraction,","feature","feature-based","features,","field,","fields,","generation,","graphics),","graphics,","height","input","interactive","large-scale","layers,","manual","mapping,","modeling,","modelling","modelling,","multiple","offline","primitives,","prominent","real-time","rendering","rendering,","representation","representations,","scale","sparse","spline","structure,","structures,","surface","surface,","terrain","texture,","three-dimensional","toolset,","two-dimensional","vertical","visualisation,","volumetric","workflow,"
         ],
         
         "intraHash" : "6809bfc73b7cafc5a109578f775d3a84",
         "interHash" : "eb89ebb1743ac1f23d0413247168fe06",
         "label" : "Feature-based volumetric terrain generation and decoration",
         "user" : "mgeiger",
         "description" : "",
         "date" : "2019-11-08 16:11:00",
         "changeDate" : "2019-11-08 15:14:53",
         "count" : 5,
         "pub-type": "article",
         "journal": "IEEE Transactions on Visualization and Computer Graphics",
         "year": "2019", 
         "url": "", 
         
         "author": [ 
            "Michael Becher","Michael Krone","Guido Reina","Thomas Ertl"
         ],
         "authors": [
         	
            	{"first" : "Michael",	"last" : "Becher"},
            	{"first" : "Michael",	"last" : "Krone"},
            	{"first" : "Guido",	"last" : "Reina"},
            	{"first" : "Thomas",	"last" : "Ertl"}
         ],
         "volume": "25","number": "2","pages": "1283--1296","abstract": "Two-dimensional height fields are the most common data structure used for storing and rendering of terrain in offline rendering and especially real-time computer graphics. By its very nature, a height field cannot store terrain structures with multiple vertical layers such as overhanging cliffs, caves, or arches. This restriction does not apply to volumetric data structures. However, the workflow of manual modelling and editing of volumetric terrain usually is tedious and very time-consuming. Therefore, we propose to use three-dimensional curve-based primitives to efficiently model prominent, large-scale terrain features. We present a technique for volumetric generation of a complete terrain surface from the sparse input data by means of diffusion-based algorithms. By combining an efficient, feature-based toolset with a volumetric terrain representation, the modelling workflow is accelerated and simplified while retaining the full artistic freedom of volumetric terrains. Feature Curves also contain material information that can be complemented with local details by using per-face texture mapping. All stages of our method are GPU-accelerated using compute shaders to ensure interactive editing of terrain. Please note that this paper is an extended version of our previously published work [1] .",
         "doi" : "10.1109/TVCG.2017.2762304",
         
         "bibtexKey": "becher_feature-based_2019"

      }
,
      {
         "type" : "Publication",
         "id"   : "https://puma.ub.uni-stuttgart.de/bibtex/2642682ff73a376da21daf23bcf1a50ee/isw-bibliothek",         
         "tags" : [
            "Augmented","Machine","Rendering","Tools","View-dependent","Virtual"
         ],
         
         "intraHash" : "642682ff73a376da21daf23bcf1a50ee",
         "interHash" : "1664c06d9ec91faa4425f101f22abbb2",
         "label" : "View-dependent Virtual and Augmented Reality for Machine Tools",
         "user" : "isw-bibliothek",
         "description" : "",
         "date" : "2017-05-16 17:06:11",
         "changeDate" : "2017-05-16 15:06:56",
         "count" : 1,
         "pub-type": "inproceedings",
         "booktitle": "2016 European Modelling Symposium (EMS)","publisher":"IEEE",
         "year": "2016", 
         "url": "https://doi.org/10.1109%2Fems.2016.035", 
         
         "author": [ 
            "Philipp Sommer","Alexander Verl"
         ],
         "authors": [
         	
            	{"first" : "Philipp",	"last" : "Sommer"},
            	{"first" : "Alexander",	"last" : "Verl"}
         ],
         "abstract": "In recent papers, the concept of an intelligent window for machine tools (iWindow) using virtual and augmented reality in order to replace traditional windows in machine tools was introduced. Such intelligent windows provide additional possibilities for observing the machine interior under difficult visibility conditions, combine and reduce the number of supporting systems and enrich the machine interior with contextual information. For creating a view-dependent virtual and augmented reality individual solution approaches presented in other research were combined, extended and applied to machine tools to form an intelligent window. For the impression of a real window and perspectively correctly overlay virtual objects, the operator\u2019s head position is tracked and the perspective projection matrix is calculated continuously. The necessary equations are presented. In an experimental setup, view-dependent rendering for virtual reality on opaque displays as well as augmented reality on transparent displays were validated. The rendering is performing well and an accurate overlay of virtual and real machine interior can be created. However, several difficulties regarding illumination, transparency and human stereo vision did occur. Possible improvements and solutions are suggested.",
         "doi" : "10.1109/ems.2016.035",
         
         "bibtexKey": "Sommer_2016"

      }
	  
   ]
}
