<?xml version="1.0" encoding="UTF-8"?><?xml-stylesheet type="text/xsl" href="https://digitalproduction.com/wp-content/plugins/xslt/public/template.xsl"?><rss version="2.0"
	xmlns:content="http://purl.org/rss/1.0/modules/content/"
	xmlns:wfw="http://wellformedweb.org/CommentAPI/"
	xmlns:dc="http://purl.org/dc/elements/1.1/"
	xmlns:atom="http://www.w3.org/2005/Atom"
	xmlns:sy="http://purl.org/rss/1.0/modules/syndication/"
	xmlns:slash="http://purl.org/rss/1.0/modules/slash/"
	xmlns:rssFeedStyles="http://www.wordpress.org/ns/xslt#"
>

<channel>
	<title>AR - DIGITAL PRODUCTION</title>
	<atom:link href="https://digitalproduction.com/tag/ar/feed/" rel="self" type="application/rss+xml" />
	<link>https://digitalproduction.com</link>
	<description>Magazine for Digital Media Production</description>
	<lastBuildDate>Fri, 27 Mar 2026 11:43:41 +0000</lastBuildDate>
	<language>en-US</language>
	<sy:updatePeriod>
	hourly	</sy:updatePeriod>
	<sy:updateFrequency>
	1	</sy:updateFrequency>
	
<site xmlns="com-wordpress:feed-additions:1">236729828</site>	<item>
		<title>XR Day 2026 brings XR to Ludwigsburg</title>
		<link>https://digitalproduction.com/2026/03/30/xr-day-2026-brings-xr-to-ludwigsburg/</link>
		
		<dc:creator><![CDATA[Bela Beier]]></dc:creator>
		<pubDate>Mon, 30 Mar 2026 06:00:00 +0000</pubDate>
				<category><![CDATA[News]]></category>
		<category><![CDATA[Animationsinstitut]]></category>
		<category><![CDATA[AR]]></category>
		<category><![CDATA[CyberLÄND]]></category>
		<category><![CDATA[GaussianSplatting]]></category>
		<category><![CDATA[MAIA]]></category>
		<category><![CDATA[MR]]></category>
		<category><![CDATA[TREMENS]]></category>
		<category><![CDATA[VirtualProduction]]></category>
		<category><![CDATA[VR]]></category>
		<category><![CDATA[XR]]></category>
		<category><![CDATA[XR Day]]></category>
		<guid isPermaLink="false">https://digitalproduction.com/?p=263388</guid>

					<description><![CDATA[<div style="margin: 5px 5% 10px 5%;"><img src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2026/03/ledwall_adkworkshop2026_cjonastrottnow1-0c580c78.png?fit=1200%2C675&quality=72&ssl=1" width="1200" height="675" title="" alt="A dancer performs in front of a vibrant digital backdrop featuring glowing pink and blue waves, with an audience silhouetted in shadow, engaging with the lively performance." /></div><div><p>Talks in the morning, headsets in the afternoon. XR Day tours fox vision, AI avatars, and a Minotaur choice with real pipeline flavor.</p>
<p>The post <a href="https://digitalproduction.com/2026/03/30/xr-day-2026-brings-xr-to-ludwigsburg/">XR Day 2026 brings XR to Ludwigsburg</a> first appeared on <a href="https://digitalproduction.com">DIGITAL PRODUCTION</a> and was written by <a href="https://digitalproduction.com/author/qualityjellyfish45275761d0/">Bela Beier</a>. </p></div>]]></description>
										<content:encoded><![CDATA[<div style="margin: 5px 5% 10px 5%;"><img src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2026/03/ledwall_adkworkshop2026_cjonastrottnow1-0c580c78.png?fit=1200%2C675&quality=72&ssl=1" width="1200" height="675" title="" alt="A dancer performs in front of a vibrant digital backdrop featuring glowing pink and blue waves, with an audience silhouetted in shadow, engaging with the lively performance." /></div><div><script type='application/json' class='__iawmlf-post-loop-links'>[{"id":13862,"href":"https:\/\/www.filmakademie.de\/?utm_source=chatgpt.com","archived_href":"","redirect_href":"https:\/\/www.filmakademie.de\/en\/","checks":[],"broken":false,"last_checked":null,"process":"done"},{"id":13863,"href":"https:\/\/www.adk-bw.de","archived_href":"http:\/\/web-wp.archive.org\/web\/20260307190106\/https:\/\/www.adk-bw.de\/","redirect_href":"","checks":[{"date":"2026-03-30 06:03:36","http_code":200},{"date":"2026-04-08 17:27:08","http_code":200},{"date":"2026-04-13 14:36:15","http_code":200},{"date":"2026-04-17 09:12:22","http_code":200},{"date":"2026-04-28 13:58:57","http_code":200}],"broken":false,"last_checked":{"date":"2026-04-28 13:58:57","http_code":200},"process":"done"},{"id":13864,"href":"https:\/\/cyberlaend.eu","archived_href":"http:\/\/web-wp.archive.org\/web\/20250216073033\/https:\/\/www.cyberlaend.eu\/","redirect_href":"https:\/\/cyberlaend.eu\/willkommen","checks":[{"date":"2026-03-30 07:38:00","http_code":200},{"date":"2026-04-08 17:27:20","http_code":200},{"date":"2026-04-13 14:36:19","http_code":200},{"date":"2026-04-17 09:12:23","http_code":200},{"date":"2026-04-28 13:59:07","http_code":200}],"broken":false,"last_checked":{"date":"2026-04-28 13:59:07","http_code":200},"process":"done"},{"id":13865,"href":"https:\/\/www.acameo.de","archived_href":"http:\/\/web-wp.archive.org\/web\/20260212210400\/https:\/\/www.acameo.de\/","redirect_href":"","checks":[{"date":"2026-03-30 06:04:35","http_code":200},{"date":"2026-04-08 17:27:17","http_code":200},{"date":"2026-04-13 14:36:17","http_code":200},{"date":"2026-04-17 09:12:22","http_code":200},{"date":"2026-04-28 13:59:06","http_code":200}],"broken":false,"last_checked":{"date":"2026-04-28 13:59:06","http_code":200},"process":"done"},{"id":13866,"href":"https:\/\/www.b-rex.de","archived_href":"","redirect_href":"","checks":[],"broken":false,"last_checked":null,"process":"done"},{"id":13867,"href":"https:\/\/www.residenztheater.de","archived_href":"","redirect_href":"","checks":[],"broken":false,"last_checked":null,"process":"done"},{"id":13868,"href":"https:\/\/crew.brussels\/en\/about","archived_href":"http:\/\/web-wp.archive.org\/web\/20260218182611\/https:\/\/crew.brussels\/en\/about","redirect_href":"","checks":[{"date":"2026-03-30 06:04:50","http_code":200},{"date":"2026-04-08 17:27:50","http_code":503},{"date":"2026-04-13 14:36:24","http_code":503}],"broken":false,"last_checked":{"date":"2026-04-13 14:36:24","http_code":503},"process":"done"},{"id":13869,"href":"https:\/\/www.filmakademie.de\/de\/news-aktuelles\/article\/xr-day-2026-immersion-kunst-performance","archived_href":"http:\/\/web-wp.archive.org\/web\/20260330062333\/https:\/\/www.filmakademie.de\/de\/news-aktuelles\/article\/xr-day-2026-immersion-kunst-performance","redirect_href":"","checks":[{"date":"2026-03-30 07:41:37","http_code":200},{"date":"2026-04-08 17:27:52","http_code":503},{"date":"2026-04-13 14:36:20","http_code":200}],"broken":false,"last_checked":{"date":"2026-04-13 14:36:20","http_code":200},"process":"done"},{"id":13870,"href":"https:\/\/www.filmakademie.de\/de\/ueber-die-fabw\/organisation-abteilungen\/carl-laemmle-institut","archived_href":"","redirect_href":"","checks":[],"broken":false,"last_checked":null,"process":"done"},{"id":13871,"href":"https:\/\/animationsinstitut.de\/de","archived_href":"http:\/\/web-wp.archive.org\/web\/20251207104357\/https:\/\/animationsinstitut.de\/de\/","redirect_href":"","checks":[{"date":"2026-03-30 06:05:34","http_code":200},{"date":"2026-04-08 17:27:58","http_code":200},{"date":"2026-04-13 14:36:21","http_code":200}],"broken":false,"last_checked":{"date":"2026-04-13 14:36:21","http_code":200},"process":"done"},{"id":4596,"href":"https:\/\/www.filmakademie.de","archived_href":"http:\/\/web-wp.archive.org\/web\/20240913012200\/https:\/\/www.filmakademie.de\/","redirect_href":"","checks":[{"date":"2025-12-28 16:36:12","http_code":404},{"date":"2026-01-30 22:50:02","http_code":404},{"date":"2026-02-18 22:38:51","http_code":503},{"date":"2026-03-13 15:12:00","http_code":404},{"date":"2026-03-30 08:08:35","http_code":404},{"date":"2026-04-08 17:28:04","http_code":404},{"date":"2026-04-13 14:36:25","http_code":404}],"broken":true,"last_checked":{"date":"2026-04-13 14:36:25","http_code":404},"process":"done"}]</script>
<p class="wp-block-paragraph">XR Day 2026 runs Friday, April 10, from 9:00 to 18:00 on the shared campus of <a href="https://www.filmakademie.de/?utm_source=chatgpt.com">Filmakademie Baden-Württemberg</a> and <a href="https://www.adk-bw.de/">Akademie für Darstellende Kunst Baden-Württemberg</a>. The day focuses on eXtended Reality as non-linear, user-controlled technology across film, theater, media, art, and industry. The format mixes impulse talks with hands-on experiences for VR, AR, and MR, with an explicit focus on where new application fields emerge.</p>



<p class="wp-block-paragraph"></p>



<h3 id="morning-talks" class="wp-block-heading">Morning talks</h3>



<p class="wp-block-paragraph">A welcome at 9:00 sets the day with Volker Helzle, Ludger Engels, and Ilja Mirsky.</p>



<p class="wp-block-paragraph">At 9:10, WHAT THE FOX puts participants into a nighttime city from the perspective of a red fox. The talk features Gaussian Splatting, with Alex Herrmann and Alec Barth from jyotifilm as speakers, focusing on interdisciplinary teamwork across art, science, and technology.</p>



<p class="wp-block-paragraph">At 9:30, MAIA presents a 3D avatar linked to <a href="https://cyberlaend.eu/" title="">CyberLÄND</a>. Frank Dürr from <a href="https://www.acameo.de/" title="">acameo</a> and CyberLÄND plus Max Schmierer from <a href="https://www.b-rex.de/" title="">b.ReX</a> present how AI supported figures connect immersive experiences, community interaction, and digital business models.</p>



<figure class="wp-block-image"><img data-recalc-dims="1"  decoding="async"  src="https://i0.wp.com/www.filmakademie.de/files/public/14%20Events%20Indoor/ReverseTuring2%281%29.jpg?w=1200&quality=80&ssl=1"  alt="https://www.filmakademie.de/files/public/14%20Events%20Indoor/ReverseTuring2%281%29.jpg" ></figure>



<p class="wp-block-paragraph">At 9:50, REVERSE TURING TEST sets five ticket holders in a train compartment, billed as a group ticket for artificial intelligences, with one human hidden among them. The twist is that AI systems identify the human. </p>



<p class="wp-block-paragraph">At 10:10, Kameraarbeit für Virtual Production frames virtual production as a broadened form of cinematic thinking, where composition, lighting design, scenography, performance, and real time visualization merge. <a href="https://digitalproduction.com/2025/01/05/hdr-without-headaches-dolby-vision-training-by-matthias-bolliger/" title="HDR Without Headaches: Dolby Vision Training by Matthias Bolliger">Matthias Bolliger</a> is the speaker. </p>



<p class="wp-block-paragraph">At 10:30, Breakfall focuses on capturing spontaneous moments. Frederik Stapf and Raphael Tonn explain how improvised movement becomes an animated dance film.</p>



<figure class="wp-block-image"><img data-recalc-dims="1"  decoding="async"  src="https://i0.wp.com/www.filmakademie.de/files/public/14%20Events%20Indoor/schiele.jpg?w=1200&quality=80&ssl=1"  alt="https://www.filmakademie.de/files/public/14%20Events%20Indoor/schiele.jpg" ></figure>



<p class="wp-block-paragraph">At 11:00, EGON SCHIELE. Eine persönliche Begegnung is a realistic VR experience by Gerda Leopold, framed as a journey through Vienna around 1900. The artist looks back on life, speaks with participants, and asks questions.</p>



<p class="wp-block-paragraph">At 11:20, TREMENS combines live performance and virtual reality, inspired by Timon von Athen by William Shakespeare. The XR experience was developed at <a href="https://www.residenztheater.de/" title="">Residenztheater München</a>. An excerpt is presented by the Belgian media art collective <a href="https://crew.brussels/en/about" title="">CREW</a>. The piece  places the audience in flowing architectures and interactive avatars, with the boundary between stage presence and virtual experience described as blurring.</p>



<p class="wp-block-paragraph">At 11:40, <a href="https://www.filmakademie.de/de/news-aktuelles/article/xr-day-2026-immersion-kunst-performance">AniTrailer</a> and <a href="https://www.filmakademie.de/de/news-aktuelles/article/xr-day-2026-immersion-kunst-performance">AniPlay</a> cover student team production over four months, resulting in either an animated trailer or an interactive experience. The speakers listed are Diana Arellano, Clara Deitmar, Sam Matsa, and Lennart Haak, with insight into collaborative workflows, XR technologies, and virtual production approaches.</p>



<p class="wp-block-paragraph">At 12:00, XR in Baden-Württemberg introduces the <a href="https://www.filmakademie.de/de/ueber-die-fabw/organisation-abteilungen/carl-laemmle-institut" title="">Carl Laemmle Institut</a> and with two funded research projects named VISTA and IMRL, both focused on extended reality at the interface between the creative industries and other industrial sectors. </p>



<h3 id="hands-on-experiences" class="wp-block-heading">Hands on experiences</h3>



<p class="wp-block-paragraph">From 14:00 onward, the program switches to XR experiences on site.</p>



<p class="wp-block-paragraph">TREMENS appears as a live performance and VR hybrid with continuously transforming spaces, interactive avatars, and architecture.</p>



<figure class="wp-block-image"><img data-recalc-dims="1"  decoding="async"  src="https://i0.wp.com/www.filmakademie.de/files/public/14%20Events%20Indoor/FateOfTheMinotaur_PromoWithLogo_v02_16by9_1920x1080.jpg?w=1200&quality=80&ssl=1"  alt="https://www.filmakademie.de/files/public/14%20Events%20Indoor/FateOfTheMinotaur_PromoWithLogo_v02_16by9_1920x1080.jpg" ></figure>



<p class="wp-block-paragraph">FATE OF THE MINOTAUR is a location based VR experience developed at <a href="https://animationsinstitut.de/de/" title="">Animationsinstitut</a>. Players take the role of human sacrifices from Athens sent into the labyrinth by Minos, King of Crete, and face a choice: kill the Minotaur or spare the tormented creature. The story supports different immersive levels depending on technical and spatial conditions on site.</p>



<p class="wp-block-paragraph">LICHTUNG invites visitors into a sleeping world that wakes up gradually through interaction.</p>



<p class="wp-block-paragraph">LIMBO is an interactive installation where visitors explore their most beautiful memories through a conversation with an artificial intelligence, while reflecting on how memories shape personal convictions and influence society.</p>



<h3 id="the-takeaway-for-post-vfx-and-realtime-folks" class="wp-block-heading">The takeaway for post, VFX, and realtime folks</h3>



<p class="wp-block-paragraph">This program stitches viewpoint experiments, realtime visualization language, stage driven XR, and interactive installations into one schedule, and it keeps practical constraints visible, like immersion levels adapting to venue and gear for FATE OF THE MINOTAUR.</p>



<p class="wp-block-paragraph"><br /><a href="https://www.filmakademie.de/de/news-aktuelles/article/xr-day-2026-immersion-kunst-performance">https://www.filmakademie.de/de/news-aktuelles/article/xr-day-2026-immersion-kunst-performance</a><br /></p>



<p class="wp-block-paragraph"><br /><a href="https://www.filmakademie.de/">https://www.filmakademie.de/</a></p>



<p class="wp-block-paragraph"><br /><a href="https://www.adk-bw.de/">https://www.adk-bw.de/</a></p>



<p class="wp-block-paragraph"><br /><a href="https://animationsinstitut.de/de/">https://animationsinstitut.de/de/</a><br /><br /><br /><br /><br /><br /><br /><br /><br /></p>



<p class="wp-block-paragraph"></p>



<p class="wp-block-paragraph"></p>



<p class="wp-block-paragraph"></p>



<p class="wp-block-paragraph"></p>



<p class="wp-block-paragraph"></p><p>The post <a href="https://digitalproduction.com/2026/03/30/xr-day-2026-brings-xr-to-ludwigsburg/">XR Day 2026 brings XR to Ludwigsburg</a> first appeared on <a href="https://digitalproduction.com">DIGITAL PRODUCTION</a> and was written by <a href="https://digitalproduction.com/author/qualityjellyfish45275761d0/">Bela Beier</a>. </p></div>]]></content:encoded>
					
		
		
		<enclosure url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2026/03/ledwall_adkworkshop2026_cjonastrottnow1-0c580c78.png?fit=1590%2C894&#038;quality=72&#038;ssl=1" length="550761" type="image/jpg" />
<media:content xmlns:media="http://search.yahoo.com/mrss/" url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2026/03/ledwall_adkworkshop2026_cjonastrottnow1-0c580c78.png?fit=1200%2C675&#038;quality=72&#038;ssl=1" width="1200" height="675" medium="image" type="image/jpeg">
	<media:copyright>DIGITAL PRODUCTION</media:copyright>
	<media:title></media:title>
	<media:description type="html"><![CDATA[A dancer performs in front of a vibrant digital backdrop featuring glowing pink and blue waves, with an audience silhouetted in shadow, engaging with the lively performance.]]></media:description>
</media:content>
<media:thumbnail xmlns:media="http://search.yahoo.com/mrss/" url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2026/03/ledwall_adkworkshop2026_cjonastrottnow1-0c580c78.png?fit=1200%2C675&#038;quality=72&#038;ssl=1" width="1200" height="675" />
<post-id xmlns="com-wordpress:feed-additions:1">263388</post-id>	</item>
		<item>
		<title>VR beyond the hype</title>
		<link>https://digitalproduction.com/2024/06/28/vr-jenseits-vom-hype/</link>
		
		<dc:creator><![CDATA[Bela Beier]]></dc:creator>
		<pubDate>Fri, 28 Jun 2024 17:49:00 +0000</pubDate>
				<category><![CDATA[Articles]]></category>
		<category><![CDATA[amusement parks]]></category>
		<category><![CDATA[AR]]></category>
		<category><![CDATA[DP2403]]></category>
		<category><![CDATA[Epic Unreal Engine]]></category>
		<category><![CDATA[Event]]></category>
		<category><![CDATA[mack one]]></category>
		<category><![CDATA[subscribers]]></category>
		<category><![CDATA[VR]]></category>
		<guid isPermaLink="false">https://digitalproduction.com/?p=144220</guid>

					<description><![CDATA[<div style="margin: 5px 5% 10px 5%;"><img src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2024/09/image-12.webp?fit=1200%2C675&quality=72&ssl=1" width="1200" height="675" title="" alt="" /></div><div><p>Although the hype surrounding the Apple Vision Pro is still going strong, VR has yet to really gain a foothold at home. However, where it is really picking up speed is in the area of "rides" and free roaming. Amusement parks are where the technology can be fully utilised, regardless of home PC compatibility. We ask ourselves, what does it all look like then?</p>
<p>The post <a href="https://digitalproduction.com/2024/06/28/vr-jenseits-vom-hype/">VR beyond the hype</a> first appeared on <a href="https://digitalproduction.com">DIGITAL PRODUCTION</a> and was written by <a href="https://digitalproduction.com/author/belabeier/">Bela Beier</a>. </p></div>]]></description>
										<content:encoded><![CDATA[<div style="margin: 5px 5% 10px 5%;"><img src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2024/09/image-12.webp?fit=1200%2C675&quality=72&ssl=1" width="1200" height="675" title="" alt="" /></div><div><script type='application/json' class='__iawmlf-post-loop-links'>[{"id":2655,"href":"http:\/\/www.vrcoaster.com","archived_href":"http:\/\/web-wp.archive.org\/web\/20250920224629\/https:\/\/www.vrcoaster.com\/","redirect_href":"","checks":[{"date":"2025-12-28 04:43:14","http_code":200}],"broken":false,"last_checked":{"date":"2025-12-28 04:43:14","http_code":200},"process":"done"}]</script>

<p class="wp-block-paragraph"></p>





<p class="wp-block-paragraph">It turns out that there are studios and companies that specialise in exactly this kind of thing. Unique on this planet is the MACK Group, which is not only a manufacturer of rollercoasters [MACK Rides], but also operates Europa-Park (europapark.de) and is home to several leading companies in the fields of VR, specialised hardware and creative content production. We are talking about the company VR Coaster<a href="http://www.vrcoaster.com" target="_blank" rel="noreferrer noopener">(www.vrcoaster.com)</a> based in Kaiserslautern, the sub-unit MACK Interactive and the group driver MACK One (mackone. eu), which not only develop prototypes and VR experiences for all rides, but also have an Animago judge in their ranks: Alexander Bouquet. He is Managing Director of VR Coaster and Executive Director within MACK One.</p>





<p class="wp-block-paragraph">For this interview, he is joined by Robin Herrmann, COO and Head of Production at VR Coaster, and Marcus Ernst, Senior Product Manager at MACK Interactive.</p>





<p class="wp-block-paragraph"><strong>DP: Before we talk about the “how”: What exactly makes your VR rides stand out?</strong></p>





<p class="wp-block-paragraph">Alexander Bouquet: VR Coaster GmbH & Co. KG has been adding virtual reality to real rides such as rollercoasters and freefall towers for 10 years now. This combination allows you to feel real “airtime” during the VR experience, i.e. weightlessness, G-forces and generally the classic “tingling in the stomach” of a real rollercoaster ride. The first experiments took place in 2014, when Michael Mack let Thomas Wagner and his virtual design students onto the Blue Fire roller coaster with VR goggles and a laptop for the first time.</p>





<p class="wp-block-paragraph">At the time, it was quickly realised that this had created a completely new type of ride that combined a real ride experience with media-based storytelling for the first time. For example, you can experience a flight on a dragon, a breakneck chase on a jet ski or a space battle more immersively than ever before. Shortly afterwards, the two founded VR Coaster GmbH & Co KG together with Mack Rides, the Mack Group’s rollercoaster manufacturer. The VR Coaster portfolio now also includes underwater VR experiences, and the award-winning free-roaming format “YULLBE” was also developed together with MACK One, where guests can walk through fantastic VR worlds on their own.</p>





<figure class="wp-block-image"><img  decoding="async"  src="https://images.creativebase.com/_next/image?url=https://s3.eu-central-1.amazonaws.com/zone.busch.store.image/1a15678f-a1a8-43df-ac3d-f2920da8dbdc.jpg&w=3840&q=100"  alt="" ></figure>





<p class="wp-block-paragraph"><strong>DP: And where can you find them?</strong></p>





<p class="wp-block-paragraph">Alexander Bouquet: This unique experience is not only available at Europa-Park on two rides, but also at many theme parks around the globe. With over 80 equipped rides worldwide, VR Coaster is the industry leader in this field. We also offer underwater VR in our own water park Rulantica and, of course, the YULLBE experience. The latter can be found not only in theme parks and family entertainment centres, but recently also on many cruise ships.</p>





<figure class="wp-block-image"><img  decoding="async"  src="https://images.creativebase.com/_next/image?url=https://s3.eu-central-1.amazonaws.com/zone.busch.store.image/3bdbc899-f8a3-4477-b2ea-9eb8839112c7.jpg&w=3840&q=100"  alt="" ></figure>





<p class="wp-block-paragraph"><strong>DP: How is a ride like this actually organised?</strong></p>





<p class="wp-block-paragraph">Robin Herrmann: The actual VR application runs entirely on the VR goggles themselves. It would simply be impossible to install laptops or computers for each seat in a rollercoaster train – if only because of vibrations, difficult power supply, cables to the goggles or the inconvenience of cleaning the goggles. That is why we have developed a system that runs entirely on mobile hardware and still enables very high visual quality. Only objects in the foreground are shown in real-time graphics (e.g. a cockpit or your own body, etc.), and everything further away is shown in a stereoscopically pre-calculated, high-resolution panoramic image sequence, which also runs at an extremely high frame rate.</p>





<figure class="wp-block-image"><img  decoding="async"  src="https://images.creativebase.com/_next/image?url=https://s3.eu-central-1.amazonaws.com/zone.busch.store.image/fd076268-9501-4dff-8c9e-c7936779ce4f.jpg&w=3840&q=100"  alt="" ></figure>





<p class="wp-block-paragraph">However, in order to experience VR in a moving car, its movement must also be precisely synchronised. For this purpose, a so-called “black box” is mounted on the rollercoaster train, which uses a wheel sensor to track the position of the train on the track and regularly recalibrates itself after each lap. This position is then sent via Bluetooth to the goggles, which can then display a precisely synchronised VR ride. For larger installations, we also use camera tracking in the station to automatically determine the seating position and head orientation of the guests.</p>





<p class="wp-block-paragraph">Marcus Ernst: In the case of coastiality, i.e. VR on roller coasters, it is advisable to install a conveyor belt system on larger rides to transport the used goggles from the exit side to the entrance side for re-issue. There is also the “Roam & Ride” concept, where guests already wear the VR headset in the queue and board the rollercoaster train with their glasses on so that the VR ride starts seamlessly. We are currently building a new award-winning experience that will also introduce a new generation of VR goggles and tracking.</p>





<p class="wp-block-paragraph"><strong>DP: What is the “experience”?</strong></p>





<p class="wp-block-paragraph">Alexander Bouquet: It’s very different. We have something for every age and thrill level, from the wild chase on jet skis of agent Amber Blake, to riding in a lorry through mines with our own Mascots Ed & Edda, to racing with cartoonish dinosaurs and Madame Freudenreich. Incidentally, my 7-year-old daughter looked at me with proud eyes after her first ride when she drove a screw figure during the experience that doesn’t even exist in reality – that’s immersion. There is also the huge advantage that we design the experiences in-house and realise them with the help of VR coasters, MACK interactive or MACK Animation.</p>





<p class="wp-block-paragraph">Marcus Ernst: Let’s continue to take the procedure for VR on rollercoasters as an example. Of course, it’s slightly different for free-roaming VR and swim VR. Our aim is always to “upgrade” an existing rollercoaster ride, to make it even more immersive. In other words, to be able to operate the existing ride for longer. This is economical and sustainable. We refer to the equipping of existing installations that are to be made more profitable as a retrofit. With different VRContents, the same roller coaster looks completely different every time and therefore invites you to take another ride. The aim is always to have little or no impact on the maximum hourly capacity of a ride – the most important criterion for theme parks.</p>





<p class="wp-block-paragraph">Robin Herrmann: In addition to upgrading an existing ride, another USP is to offer an experience that is only possible on a roller coaster or a freefall tower. Large rides with longer periods of weightlessness in particular enable a VR experience that would not be possible on a simulator chair. Technically, our system allows an unlimited number of VR headsets to be synchronised with the ride at the same time, but most installations allow guests to choose whether they want to ride with or without VR. Only some of our customers, such as Universal Studios Japan or Phantasialand, operate their VR rollercoasters exclusively with VR.</p>





<figure class="wp-block-image"><img  decoding="async"  src="https://images.creativebase.com/_next/image?url=https://s3.eu-central-1.amazonaws.com/zone.busch.store.image/eeb892f0-538a-45e1-b97f-b805b50120d0.jpg&w=3840&q=100"  alt="„Attack on Titan XR Ride“ für Universal Studios Japan" ><figcaption class="wp-element-caption">“Attack on Titan XR Ride” for Universal Studios Japan</figcaption></figure>





<p class="wp-block-paragraph"><strong>DP: And what happens if one of the goggles stops working?</strong></p>





<p class="wp-block-paragraph">Robin Herrmann: Goggle failures were particularly common in the early days, when we were still forced to use the mobile phone-based Gear VR. The mobile phones tended to overheat quickly in VR mode and the software was also difficult to control – after all, we were dealing with consumer hardware. However, with integrated glasses optimised for location-based VR, we now have an extremely robust hardware and software basis that has proven to be very reliable and resilient in daily operation and also gives us complete control over the entire software and operating system. The battery level and the status of the VR application are also constantly monitored so that failures can be avoided at the station.</p>





<p class="wp-block-paragraph"><strong>DP: How do you synchronise between visitors?</strong></p>





<p class="wp-block-paragraph">Marcus Ernst: That depends on the type of experience: “Roam and Ride” with a free roaming component and pure “VR Ride” experiences. In Roam and Ride, the players have to be able to see each other at all times, as do the operators, so that they don’t run into each other (in the free roaming part) and can also board the train safely. This takes place in the latest generation of goggles by means of inside-out tracking, whereas previously complex camera systems were required to track people.</p>





<p class="wp-block-paragraph">**DP: You would think that relative tracking would be a horror – coloured light, individual movement and someone always has an ancient mobile phone transmitting in exactly the wrong band? **</p>





<p class="wp-block-paragraph">Robin Herrmann: Fortunately, synchronisation on the rollercoaster, on freefall towers or other rides doesn’t work optically, but with sensors on the vehicle and Bluetooth broadcast packets that communicate the vehicle’s position to the goggles. In fact, at the very beginning in 2014, we had real problems with the newly emerging smartwatches, which also sent massive amounts of broadcast packets for the first time. However, we were quickly able to get this under control.</p>





<p class="wp-block-paragraph">We only use camera tracking in the station for particularly large systems in order to determine the seating position and head orientation of guests using infrared markers. But this also works surprisingly well and robustly, even outdoors. Even with inside-out tracking, such as in our YULLBE GO attractions, colourful light and a lot of movement are no longer a problem, as the current generation of glasses can cope surprisingly well with changing or unsettled lighting situations.</p>





<p class="wp-block-paragraph"><strong>DP: And when we talk about timing: How precise does it have to be when you have VR goggles on the rollercoaster?</strong></p>





<p class="wp-block-paragraph">Robin Herrmann: The synchronisation does indeed have to be pretty precise, as otherwise curves would very quickly appear in the wrong place and people would be disorientated. It’s not about latency, but that the basic direction of movement is correct. A rollercoaster train travels at quite different speeds depending on the load, temperature and weather, and it also makes a big difference whether you are sitting at the very front or the very back. In order to synchronise this precisely, the train is equipped with a so-called “black box”, which uses a wheel sensor to determine where the vehicle is currently located on the track. This information is constantly sent to the goggles, which can then generate a precisely synchronised VR ride.</p>





<p class="wp-block-paragraph">This is still the biggest problem with VR in home applications: When you move through virtual environments with quick turns, seasickness immediately sets in. However, as soon as the movements and rotations also take place in the real world and the VR journey is synchronised with them, any feeling of dizziness disappears. In fact, by augmenting real rollercoasters and rides with VR, we have created the only setup that enables dynamic, fast flights through VR worlds without motion sickness.</p>





<p class="wp-block-paragraph">Alexander Bouquet: This is where our patented technology and precise content creation come into play. The interplay between hardware and content is our guarantee for success. I keep seeing new rides or simulators with VR headsets at trade fairs and this is where the wheat is separated from the chaff – 80 per cent of simulators are asynchronous, which causes motion sickness.</p>





<figure class="wp-block-image"><img  decoding="async"  src="https://images.creativebase.com/_next/image?url=https://s3.eu-central-1.amazonaws.com/zone.busch.store.image/8dd35d49-b947-4be0-af37-7868be271fd9.jpg&w=3840&q=100"  alt="" ></figure>





<p class="wp-block-paragraph"><strong>DP: When it comes to VR goggles: Which ones are good?</strong></p>





<p class="wp-block-paragraph">Robin Herrmann: On rollercoasters or rides in general, where 3DOF tracking is sufficient, the G2 or G3 devices from Pico are the best choice. The goggles are very robust and we were also able to modify them perfectly for location-based entertainment operations (e.g. with sun protection against display damage and a hard-wearing cover with a head strap from our own production). We also use these goggles in underwater applications, for which we have developed a completely sealed housing. In the free roaming area, we currently mainly use the HTC Vive Focus 3, as these goggles offer robust inside-out tracking and can recognise learned spaces very quickly and orientate themselves immediately. You can also swap the battery here, which makes operation more efficient.</p>





<p class="wp-block-paragraph"><strong>DP: And how much effort does it take for your developers to switch between the different devices and SDKs?</strong></p>





<p class="wp-block-paragraph">Alexander Bouquet: As we work with Android-based VR headsets in practically all set-ups, it’s never really that difficult. Both Unity and Unreal make the work relatively easy. Only in the more complex YULLBE PRO installation did we initially use Windows-based backpack PCs, but even here we were able to successfully switch to mobile glasses from HTC. What’s more, a lot of real-time graphics only occur in the free roaming setups, while on the rollercoaster or underwater, mainly pre-calculated image sequences are streamed, which is independent of the SDK or VR platform.</p>





<figure class="wp-block-image"><img  decoding="async"  src="https://images.creativebase.com/_next/image?url=https://s3.eu-central-1.amazonaws.com/zone.busch.store.image/a2c31390-e7fd-4843-add1-0bbdb7fbe15a.jpg&w=3840&q=100"  alt="" ></figure>





<p class="wp-block-paragraph">**DP: Let’s talk about the content: How were the assets created? **</p>





<p class="wp-block-paragraph">Robin Herrmann: Basically, it’s the same approach as with CG productions in the animation sector. Assets are usually created “from scratch” by our team, or we receive existing 3D models from our clients, for example if it’s an IP for which films or computer games already exist. With the DC Comics IPs in particular, there were already many existing models that we could build on. There are different workflows depending on where the assets are used: Objects that appear as real-time graphics in the foreground need to be more optimised and work with fewer polygons. What is used in the pre-rendered layer, on the other hand, can of course be as complex as you like, even if the panorama rendering is realised in Unity or Unreal.</p>





<p class="wp-block-paragraph"><strong>DP: People say that Unity is much better for this? Why Unreal?</strong></p>





<p class="wp-block-paragraph">Robin Herrmann: We use Unity almost exclusively for the actual VR apps. The pre-rendered part, which is streamed as a high-resolution image sequence during the experience, is usually also rendered with Unity, but we are using Unreal more and more often for this, as you can achieve great results even faster here. In rare cases, we still render in the classic way, for example with V-Ray or Arnold – but due to the extremely high resolution and frame rate, this takes weeks of rendering time, whereas the real-time engines can render even our largest projects in a single day.</p>





<p class="wp-block-paragraph"><strong>DP: What are the restrictions that you have imposed on yourselves to ensure that it always works?</strong> Marcus Ernst: The pre-rendered stereoscopic panoramas have a resolution of 6K×6K, and run at an average frame rate of 60 fps. However, the current VR glasses could not display a higher resolution anyway. With real-time geometry in the foreground, you should always stay below 300,000 vertices with current mobile VR hardware and not use overly complex shaders so that the frame rate remains high.</p>





<p class="wp-block-paragraph">Alexander Bouquet: For me, the story is the essential part – emotions are our currency and our restriction here is as follows: If it doesn’t kick, it’s out. Our technology is “military grade”, we produce patented parts to refine the standard headsets and everything is customised to the respective track with its unique layout. These are standards that we orientate ourselves by.</p>





<p class="wp-block-paragraph"><strong>DP: When we look at a development like this: How many iterations were necessary for the different rides until it was “Technically Clean” for you?</strong></p>





<p class="wp-block-paragraph">Robin Herrmann: As far as the visual quality is concerned, a lot can be tested and checked directly on the computer or in the office, so we don’t have to test so many iterations on the actual ride. However, it is important that certain effects are frame-accurate, such as a bump in a curve that translates into a virtual collision with a monster – and this coordination sometimes requires one or two test rides, because you simply can’t feel it on the office chair. In the case of our “Diving Theatre”, our underwater ride with counter-current system and effect jets, it’s the other way round – here the fine-tuning takes place more in the control of the system, which has to be precisely synchronised with the VR dramaturgy.</p>





<figure class="wp-block-image"><img  decoding="async"  src="https://images.creativebase.com/_next/image?url=https://s3.eu-central-1.amazonaws.com/zone.busch.store.image/5916a990-2039-46f4-ab67-271d6f0ae054.jpg&w=3840&q=100"  alt="" ></figure>





<p class="wp-block-paragraph">Alexander Bouquet: Of course, it took many rounds to get from POC to an industry-standard product suitable for mass production. However, we have created a pipeline that enables us to measure all rollercoasters in the world and supply them with hardware and software relatively quickly. This is where our in-house AMS – Attraction Management System – helps us, which we also use to ping every pair of glasses in the world and generate statistics that are relevant for our controlling and accounting.</p>





<p class="wp-block-paragraph"><strong>DP: There are few activities that are more interactive than bumper cars – do you have anything on offer?</strong></p>





<p class="wp-block-paragraph">Robin Herrmann: In principle, we do the same thing here as with a free roaming installation: we use camera-based tracking to follow the movement of passengers and vehicles. This makes it possible to replace the real driving area in the VR world with a much larger area with remotely controlled vehicles. This opens up very exciting possibilities: On the one hand, we greatly enlarge the travelling area and the vehicles, which increases the perceived speed enormously. On the other hand, the vehicles also appear huge and no longer as tiny as in the real world. One of the highlights of the experience is when the driving surface is folded up halfway and you can virtually drive up the wall – it works really well and is totally amazing!</p>





<p class="wp-block-paragraph"><strong>DP: Are there interactions beyond the users themselves? For example, can I drive over the robot spider?</strong></p>





<p class="wp-block-paragraph">Robin Herrmann: We stage a big boss opponent like that, which of course doesn’t exist in the real world, in such a way that it can’t be touched, so it pulls its leg away just before the collision. But of course you can touch all the other vehicles and feel it quite clearly – just like in a real bumper car.</p>





<p class="wp-block-paragraph"><strong>DP: Let’s talk about what happens inside the goggles: What works and why?</strong></p>





<p class="wp-block-paragraph">Robin Herrmann: A key feature is always the enlargement of the virtual track layout to increase the perceived speed and also to simulate much greater heights or steeper drops. In addition, you can virtually “bend up” curves to create more space, similar to bending a paper clip apart: A 90° right turn becomes a 45° right turn and you have a little more room to shape the virtual world. It even works so well that at some points we let people travel backwards briefly in VR, even though the train is travelling forwards as normal – that always amazes the guests the most.</p>





<figure class="wp-block-image"><img  decoding="async"  src="https://images.creativebase.com/_next/image?url=https://s3.eu-central-1.amazonaws.com/zone.busch.store.image/e4112b32-0e2e-4944-9969-93498b593237.jpg&w=3840&q=100"  alt="„The Great Lego Race“, Legoland Florida" ><figcaption class="wp-element-caption">“The Great Lego Race”, Legoland Florida</figcaption></figure>





<figure class="wp-block-image"><img  decoding="async"  src="https://images.creativebase.com/_next/image?url=https://s3.eu-central-1.amazonaws.com/zone.busch.store.image/0f674d24-7854-416a-8079-b7bc5350f406.jpg&w=3840&q=100"  alt="Dank speziellem Sonnenschutz können die VR-Coaster-Brillen auch draußen verwendet werden." ><figcaption class="wp-element-caption">Thanks to special sun protection, the VR coaster goggles can also be used outside.</figcaption></figure>





<p class="wp-block-paragraph">With freefall towers, we also swivel our gaze vertically downwards into the depths as soon as the fall begins. As the guests are weightless from this moment anyway and no “up” or “down” can be felt, this trick also works perfectly. Some real freefall towers create this effect with very elaborate swivelling seats in the real world – we offer this virtually free of charge with a gentle virtual camera pan.</p>





<figure class="wp-block-image"><img  decoding="async"  src="https://images.creativebase.com/_next/image?url=https://s3.eu-central-1.amazonaws.com/zone.busch.store.image/4469be35-c508-406c-9c1c-76627bf88cf0.jpg&w=3840&q=100"  alt="Freeroaming in „Yullbe“" ><figcaption class="wp-element-caption">Free-roaming in “Yullbe”</figcaption></figure>





<p class="wp-block-paragraph">Marcus Ernst: Anything that feels natural is possible in free roaming. Free roaming is the logical development of 360° videos: It feels natural to be able to look around freely, and it feels just as natural to then be able to move freely around the room. It’s always an experience that changes everything when you experience it for the first time. It really feels like Star Trek in the holodeck, because you can move completely freely in a virtual world, which you can’t do at home. What you can do very well is play with the actual dimensions of the room. Example: step length manipulation. Here, 1 metre, which is walked in real life, sometimes becomes only 90 cm in VR, or sometimes 1.20 m. This allows me to make rooms appear larger or smaller than they actually are, as it remains plausible for our brain as long as I don’t overdo it. We have defined a factor of around 1.5 as the maximum, above that everything feels like on the conveyor belts at the airport (every movement becomes extremely fast). But we can easily make a real 80 square metre room feel like a 200 square metre room, and not a single guest has ever come out of the 80 square metre attraction and said “Oh, that room was small”.</p>





<p class="wp-block-paragraph"><strong>DP: What doesn’t work at all?</strong></p>





<p class="wp-block-paragraph">Marcus Ernst: True photorealism is always an issue, at least with free-roaming real-time graphics – the glasses simply can’t do that yet. However, good storytelling is much more important. Due to the Uncanny Valley, we can only fail at recreating reality. Our brain always and immediately recognises whether something is real or fake. This is also the reason why, even today, CG figures still appear somewhat cool and rigid. You have to incorporate imperfections at every point. I’m much more likely to form an emotional attachment to an overdrawn character than to one who tries to look realistic but inevitably always fails.</p>





<p class="wp-block-paragraph">For comparison: the almost realistic animatronics of, for example, Abraham Lincoln at Disney look extremely creepy, whereas the totally overdrawn characters from the films look cute. Horror and jumpscares work extremely well, as VR and MR are immersive media. You can’t escape the impressions unless you close your eyes. For an appealing horror experience, however, the scene has to seem somewhat realistic so that I can relate to it and recognise the danger as such. Of course, suspension horror can and should be used here too, but a monster has to look really intimidating when it jumps towards you, just like in computer games. Carelessly animated assets that are immediately recognisable as such do not create a feeling of fear – rather humour.</p>





<figure class="wp-block-image"><img  decoding="async"  src="https://images.creativebase.com/_next/image?url=https://s3.eu-central-1.amazonaws.com/zone.busch.store.image/938a82e2-9d42-46b8-a017-dfcac498f3a9.jpg&w=3840&q=100"  alt="" ></figure>





<p class="wp-block-paragraph">**DP: And how much “reality” is needed? **</p>





<p class="wp-block-paragraph">Alexander Bouquet: We always ask the question when creating the content. We often include a path or a trail as a directional indicator for our guests. It’s more pleasant – especially with agile tracks – when you can see where the journey is about to take you. Then we create a real flow for the passenger.</p>





<p class="wp-block-paragraph">**DP: There are “IPs” in the rides – how did you choose what appears in the ride? **</p>





<p class="wp-block-paragraph">Alexander Bouquet: Basically, the IP and the ride have to fit together and be harmonised. We naturally have rides with our internal IPs such as Ed & Edda or Snorri in our portfolio, but strong external IPs are also represented. The new Phantom of the Opera experience by Andrew Lloyd Webber is a strong addition to the park and fits in perfectly with the layout of Eurosat.</p>





<p class="wp-block-paragraph">Robin Herrmann: In fact, our customers, such as Universal or Six Flags, often already owned the rights to well-known IPs and asked us to create the relevant content for them. We then sat at the table with three parties who all had to be satisfied: Warner Bros, DC Comics and Six Flags, all with different priorities. While DC was mainly concerned with visual style and graphic quality, Six Flags was mainly concerned with being ready in time for the start of the season. The content productions for Universal Studios were always particularly complex projects, for example with the “Attack on Titan” IP, which the VR Coaster team realised completely in-house.</p>





<p class="wp-block-paragraph">**DP: What technology are you currently testing? **</p>





<p class="wp-block-paragraph">Alexander Bouquet: We have several streams running at the moment, in which we are once again merging new engineering skills from MACK Rides with a new generation of goggles. The XR Maze, including a shooting device and recoil module, is also on the roadmap, right through to <em>psssst</em>, which we use in the park for queue entertainment. It’s going to be a very exciting year. We also have the Apple Vision Pro in the wringer and are of course looking to see what we can tease out of it.</p>





<p class="wp-block-paragraph"><strong>DP: Who comes up with the idea of doing something like this?</strong></p>





<p class="wp-block-paragraph">Robin Herrmann: As part of his professorship at Kaiserslautern University of Applied Sciences in 2014, Thomas Wagner was looking for a way to combine VR simulations with real movement and approached rollercoaster manufacturer Mack Rides. Fortunately, Michael Mack, Managing Partner of Europa-Park, immediately allowed him to carry out his first test rides with VR on the Blue Fire and Pegasus roller coasters. The rest is history: This was followed by a successful patent application and the joint founding of VR Coaster GmbH & Co KG.</p>





<p class="wp-block-paragraph">Alexander Bouquet: I am always fascinated by the people I get to work with every day. On the one hand, the crazy Professor Thomas Wagner, who brought VR to rides, the innovation-driven and extremely creative visionary Michael Mack and the person who brings all the rides here – Christian von Elverfeldt as Managing Director of MACK Rides. In addition, there are brilliant engineers, inventors, creatives, master planners, technology geeks and other crazy people with whom it is great fun to develop the thrills of tomorrow. We call ourselves the Emotioneers of Tomorrow.</p>





<p class="wp-block-paragraph"><strong>DP: What kind of background do your developers have?</strong></p>





<p class="wp-block-paragraph">Robin Herrmann: In the creative team, there are classic artists in the fields of 3D modelling, animation, concept art and game development. We also have pure software developers and a hardware department that deals with the development of electronics and engineering, such as the modification of VR goggles.</p>





<p class="wp-block-paragraph">Alexander Bouquet: In terms of developers, we differentiate between digital experience artists and hardware developers or engineers who develop all the haptic parts. This ranges from specially designed weapons including tracking LEDs to a helicopter vibrating plate with 15 tonnes and zero-G forces.</p>





<figure class="wp-block-image"><img  decoding="async"  src="https://images.creativebase.com/_next/image?url=https://s3.eu-central-1.amazonaws.com/zone.busch.store.image/1894667a-c3fd-4024-9524-521adcc978b1.jpg&w=3840&q=100"  alt="„Alpha Mods P.D.“ ist ein rasantes VR-Erlebnis und eine IP der MACK One" ><figcaption class="wp-element-caption">“Alpha Mods P.D.” is a fast-paced VR experience and an IP of MACK One</figcaption></figure>





<p class="wp-block-paragraph"><strong>DP: And how often does each creator have to ride the ride before the audience arrives?</strong></p>





<p class="wp-block-paragraph">Robin Herrmann: Fortunately, not too often now, as we can record the ride and play it back on the computer during the development process. However, a few dozen rides are usually necessary for the final fine-tuning before the Mack family is the first to test the finished ride.</p>





<p class="wp-block-paragraph"><strong>DP: If we look back over the last few years, what were the dead ends?</strong></p>





<p class="wp-block-paragraph">Marcus Ernst: If we had known 4 years ago that the backpack PCs would be phased out, we would have developed for mobile right away or focussed on streaming. And the decision in favour of a full body tracking system is still a very conscious one, but one that is constantly being rechallenged because it is extremely expensive and time-consuming. What we have learnt is how incredibly important it is to know your B2B/B2C target group and where they spend their time. And that’s before you start developing anything. You need to know exactly who your target group is, and where is it? And where not? And then you have to go with your product exactly where they are, right in the middle. And relieve them of all their technology worries, only very few people are interested in technology.</p>





<figure class="wp-block-image"><img  decoding="async"  src="https://images.creativebase.com/_next/image?url=https://s3.eu-central-1.amazonaws.com/zone.busch.store.image/b3321c0c-c818-4ded-b7b3-52f864d266b4.jpg&w=3840&q=100"  alt="" ></figure>





<p class="wp-block-paragraph"><strong>DP: If we look to the near future, what will be the trends?</strong></p>





<p class="wp-block-paragraph">Marcus Ernst: Mixed reality in particular will be extremely exciting, as a location-based experience where guests once again don’t have to worry about the technology themselves. We offer what they would like to have but can’t or don’t want to afford.</p>





<p class="wp-block-paragraph">**DP: Dreams of the future: What will the VR Coaster Showreel look like in 2043? **</p>





<p class="wp-block-paragraph">Alexander Bouquet: 2043 – everyone builds their own individualised experience at home or on the way to the theme park – fully customisable content. I ride and feel it with all my senses, including full body tracking on the rollercoaster. Content is customised according to my taste or my body stats. Do I need cheerful music with flowers and forests or heavy metal in the Underworld – the glasses already know, because everything communicates and is networked.</p><p>The post <a href="https://digitalproduction.com/2024/06/28/vr-jenseits-vom-hype/">VR beyond the hype</a> first appeared on <a href="https://digitalproduction.com">DIGITAL PRODUCTION</a> and was written by <a href="https://digitalproduction.com/author/belabeier/">Bela Beier</a>. </p></div>]]></content:encoded>
					
		
		
		<enclosure url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2024/09/image-12.webp?fit=1920%2C1080&#038;quality=72&#038;ssl=1" length="226212" type="image/jpg" />
<media:content xmlns:media="http://search.yahoo.com/mrss/" url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2024/09/image-12.webp?fit=1200%2C675&#038;quality=72&#038;ssl=1" width="1200" height="675" medium="image" type="image/jpeg">
	<media:copyright>DIGITAL PRODUCTION</media:copyright>
	<media:title></media:title>
	<media:description type="html"><![CDATA[]]></media:description>
</media:content>
<media:thumbnail xmlns:media="http://search.yahoo.com/mrss/" url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2024/09/image-12.webp?fit=1200%2C675&#038;quality=72&#038;ssl=1" width="1200" height="675" />
<post-id xmlns="com-wordpress:feed-additions:1">144220</post-id>	</item>
		<item>
		<title>3D Audio &#8211; Into the acoustic matrix</title>
		<link>https://digitalproduction.com/2023/11/14/3d-audio-into-the-acoustic-matrix/</link>
		
		<dc:creator><![CDATA[Martin Rieger]]></dc:creator>
		<pubDate>Tue, 14 Nov 2023 10:48:00 +0000</pubDate>
				<category><![CDATA[Articles]]></category>
		<category><![CDATA[3D Audio]]></category>
		<category><![CDATA[Apple]]></category>
		<category><![CDATA[AR]]></category>
		<category><![CDATA[dolby]]></category>
		<category><![CDATA[DP2306]]></category>
		<category><![CDATA[Epic Unreal Engine]]></category>
		<category><![CDATA[fmod]]></category>
		<category><![CDATA[spatial audio]]></category>
		<category><![CDATA[subscribers]]></category>
		<category><![CDATA[VR]]></category>
		<guid isPermaLink="false">https://digitalproduction.com/?p=149573</guid>

					<description><![CDATA[<div style="margin: 5px 5% 10px 5%;"><img src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2024/10/vrtestung_spatial_audio_matrix_Dynamic_neon_blue_photorealistic_4615f98d-f0b8-4589-9aac-fc3390fc6c48.jpg?fit=1080%2C1080&quality=80&ssl=1" width="1080" height="1080" title="" alt="" /></div><div><p>Welcome to the second part, which is all about  3D sound, or rather 3D sound is all about us. In the last article, we answered the questions about how you can enjoy three-dimensional sound and what software and hardware you need.</p>
<p>The post <a href="https://digitalproduction.com/2023/11/14/3d-audio-into-the-acoustic-matrix/">3D Audio – Into the acoustic matrix</a> first appeared on <a href="https://digitalproduction.com">DIGITAL PRODUCTION</a> and was written by <a href="https://digitalproduction.com/author/martinrieger/">Martin Rieger</a>. </p></div>]]></description>
										<content:encoded><![CDATA[<div style="margin: 5px 5% 10px 5%;"><img src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2024/10/vrtestung_spatial_audio_matrix_Dynamic_neon_blue_photorealistic_4615f98d-f0b8-4589-9aac-fc3390fc6c48.jpg?fit=1080%2C1080&quality=80&ssl=1" width="1080" height="1080" title="" alt="" /></div><div><p class="wp-block-paragraph">So far, we’ve only scratched the surface of the question: “Yes, but what am I listening to anyway?” So now it’s all about content. It’s not so easy to say in general terms where 3D audio actually enables good content. Depending on the context, there can be a completely different technology behind it in the form of formats or game engines.</p>





<p class="wp-block-paragraph">That’s why I’ve come up with a structure that I’ll simply call the “3D audio matrix” – or pyramid? Be that as it may, the whole thing is intended to provide a reference for applications and their prime examples, the advantages and disadvantages of 3D sound, and of course an overview of formats and tools with their respective peculiarities. You have to take a few steps to understand this: 3D audio is not just 3D audio. So let’s first go back several dimensions to the origin, i.e. from 3D audio to 2D, 1D, 0D..</p>





<figure class="wp-block-gallery has-nested-images columns-8 is-cropped wp-block-gallery-1 is-layout-flex wp-block-gallery-is-layout-flex">

<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149584"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/0DoF_DolbyAtmosMusic_protoolspanner_screenshot-4k-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149584" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149585"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/0DoF_MPEG-H_authoring-tool-4k-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149585" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149586"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/0DoF_MPEG-H_Mhapi-4k-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149586" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149587"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/0DoF_Music_Sony360RA-create-mixing-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149587" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149589"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_360Video_SUP_Tools-4k-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149589" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149591"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_Ambisonics_7.1.4-Lautsprecher-Setup-4k-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149591" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149593"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_AppleApproach-2023-10-02-um-17.53.27-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149593" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149594"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_AppleApproach-2023-10-02-um-17.53.35-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149594" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149595"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_AppleApproach-2023-10-02-um-17.53.44-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149595" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149596"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_AppleApproach-2023-10-02-um-17.57.13-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149596" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149598"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/6DoF_GameEngine_Unity_AudioObjekt-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149598" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149599"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/6DoF_GameEngine_UnityFmod-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149599" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149597"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/6DoF_Middleware_FMod_Screenshot-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149597" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149602"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/wwise-1-snapshot-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149602" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149601"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/wwise-2-snapshot-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149601" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149603"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/wwise-3-snapshot-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149603" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149592"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_360Video_EUsavesLives-Filmstill-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149592" ></figure>





<figure class="wp-block-image size-thumbnail"><img data-recalc-dims="1" height="150" width="150"  decoding="async"  data-id="149588"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_360Video_SUP_Filmstill-hd-150x150.png?resize=150%2C150&ssl=1"  alt=""  class="wp-image-149588" ></figure>

</figure>





<h2 id="the-3d-audio-matrix-overview" class="wp-block-heading">The 3D audio matrix overview</h2>





<p class="wp-block-paragraph">How, 0D? Admittedly, that’s a bit abstract. How can you visualise it? I’m taking a mathematical approach here. Don’t worry, it won’t be any more difficult than in your first geometry lesson. Let’s imagine a coordinate system in which our head is at the origin.</p>





<p class="wp-block-paragraph">Now let’s combine this with the question: what kind of geometric object do I have?<br />0D: A point in the coordinate system without spatial information, the origin (0|0|0)<br />1D: A line, here I can move from left to right (x|0|0)<br />2D: A plane, now I can also move forwards and backwards (x|y|0)<br />3D: A cube/sphere that adds height information (x|y|z)</p>





<h2 id="audio-formats-that-you-already-know" class="wp-block-heading">Audio formats that you already know</h2>





<p class="wp-block-paragraph">So far so good. Now imagine you want to place an audio object in a room. There are already audio formats that we know from our everyday lives.</p>





<p class="wp-block-paragraph">0D: Mono. No room information can be added.<br />1D: Stereo. You can at least move your sound to the left and right<br />2D: Surround. With 5.1, for example, you can also place sound at the back<br />3D Audio: Here you can also move sound up or down.</p>





<h2 id="but-wait-theres-more-degrees-of-freedom" class="wp-block-heading">But wait, there’s more: degrees of freedom</h2>





<p class="wp-block-paragraph">All overviews in this direction that I know of stop at Dolby Atmos, but beyond that it’s just getting started with 3 or 6 degrees of freedom. OK – what are degrees of freedom? Also known as DoF (Degrees of Freedom), the degree describes the following.</p>





<ul class="wp-block-list">

<li>0DoF: It is not defined where you actually look while consuming the content, except that you look forwards, like in films, you are not supposed to turn around.</li>





<li>3DoF: Here you can also rotate your gaze, as we know it from 360° videos. Also known as head tracking (rotation).</li>





<li>6DoF: This concept is already familiar from 3D games, where the player also moves through a 3D space (translation).</li>

</ul>





<h2 id="headphones-simplify-understanding" class="wp-block-heading">Headphones simplify understanding </h2>





<p class="wp-block-paragraph">It’s as simple as that – or not. Because it’s always a question of which direction I’m looking at my concept from. Here I am referring exclusively to headphone playback. Do some of you remember the localisation and externalisation in the head from the last article? This is a good example of how mono and stereo are always perceived in the head. Even if I add reverb to create a depth gradation, I can still only move an object to the left and right and only create a difference in volume and time (ILD, ITD). But the third factor (HRTF) is missing, with which I can really differentiate between front and back. For me, stereo is therefore one-dimensional (lins right), which is often confused with the two channels.<br />With surround sound via headphones, a binaural attempt is made to generate an impression of the front and rear – with 3D audio, this also includes height information. This is where the aforementioned HRTF comes into play in order to really “let the sound travel out of our head” during the calculation. So we go from an in-head localisation to an externalisation. Although at the end of the day, a two-channel stereo signal is played back. But watch out! It’s not “normal” stereo, but binaural, with HRTF-filtered extension.</p>





<figure class="wp-block-image size-full"><img data-recalc-dims="1"  fetchpriority="high"  decoding="async"  width="1164"  height="1080"  sizes="(max-width: 1200px) 100vw, 1200px"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_360Video_SUP_Filmstill-hd.png?resize=1164%2C1080&quality=72&ssl=1"  alt="360° – Sonnenuntergangstour auf dem Chiemsee mit dem Stand-Up-Paddle, zu finden auch auf YouTube. Hier ein Standbild als „Little Planet“ Projektion, weil es ja irgendwie nett aussieht."  class="wp-image-149588" ><figcaption class="wp-element-caption">360° sunset tour on Lake Chiemsee with the stand-up paddle, also available on YouTube.
Here is a still image as a “Little Planet” projection, because it looks kind of nice.</figcaption></figure>





<figure class="wp-block-image size-full"><img data-recalc-dims="1"  decoding="async"  width="1200"  height="764"  sizes="(max-width: 1200px) 100vw, 1200px"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_360Video_SUP_Tools-4k-hd.png?resize=1200%2C764&quality=72&ssl=1"  alt="So in etwa sieht dann die Mischung aus, viele Spuren, viele Parameter, das Bild einmal equirectangular, um den Überblick zu behalten und drunter die Ansicht im Video-Player."  class="wp-image-149589" ><figcaption class="wp-element-caption">This is roughly what the mix looks like, many tracks, many parameters, the image once equirectangular to keep the overview 
and below that the view in the video player.</figcaption></figure>





<h2 id="the-walls-of-the-concept-are-shaking-for-loudspeakers" class="wp-block-heading">The walls of the concept are shaking for loudspeakers</h2>





<p class="wp-block-paragraph">It’s not so easy for loudspeakers, because even if I only have one loudspeaker in the room, it’s still “somehow three-dimensional”. Besides, the term stereophony actually says: more than mono, so it would even include 3D audio, but I think that in everyday production everyone thinks of stereo as a “two-channel audio file”. With stereo<br />Playback, you place two speakers with yourself as the third point in an equilateral triangle. And as we know, triangles are actually two-dimensional. Nevertheless, I can only move my sound between the two loudspeakers, not beyond them. Even if I add reverb, you get the feeling of depth<br />Feeling of depth, but you don’t know whether the room is actually in front or behind you. Nevertheless, two-channel stereo is still a stable reproduction method and, in my opinion, not broken, regardless of what the various hectic marketing newsletters claim.</p>





<h2 id="0-2d-aka-normal-media" class="wp-block-heading">0-2D aka “normal media”</h2>





<p class="wp-block-paragraph">OK, of course I could go on and on about where to find mono, stereo and surround content. But that’s everyday life. We use mono every day for voice messages, we stream music in stereo and if you have the right TV, you can watch films in surround. The more I think about it, the more I realise that surround is not that far removed from 3D audio in this respect. You still have height information – applause. So the quantum leap from stereo to surround actually seems greater than that from surround to 3D. I would also subscribe to this for film, for example, because when streaming, the great 3D sound from the cinema has to be compressed and is then usually a 5.1 that tries to retain a few treble elements.</p>





<figure class="wp-block-image size-full"><img data-recalc-dims="1"  decoding="async"  width="1200"  height="675"  sizes="(max-width: 1200px) 100vw, 1200px"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/0DoF_Music_Sony360RA-create-mixing-hd.png?resize=1200%2C675&quality=72&ssl=1"  alt="Das PlugIn Sony 360 Reality Audio ermöglich das Platzieren von bunten Audioobjekten kugelförmig um unseren virtuellen Kopf."  class="wp-image-149587" ><figcaption class="wp-element-caption">The Sony 360 Reality Audio plug-in makes it possible to place colourful audio objects in a spherical shape around our virtual head.</figcaption></figure>





<h2 id="audio-should-be-immersive" class="wp-block-heading">Audio should be immersive</h2>





<p class="wp-block-paragraph">But we remember that immersive audio should ideally be so natural that we don’t even think much about the technology. And 3D audio brings us a lot closer to this impression than surround sound alone.<br />However, there is another factor why 3D audio can bring even more benefits under the bonnet than height information. The surround formats meant here are either quadrophones 4.0, 5.1 or 7.1. The numbers are channel information, e.g. for 5.1, channels 1 to 6 are: Front Left, Front Right, Centre, LFE (low frequency effect), Left Surround, Right Surround.<br />So if you want height information, you need even more channels, such as 5.1.4. You then have four more speakers on the ceiling. But as you can guess, that’s kind of impractical. And what if I have a 7.1.2 system, how are the channels converted? That’s why audio technology is moving away from channel-based formats and towards so-called NGA, next-generation audio formats.<br /></p>





<figure class="wp-block-image size-full"><img data-recalc-dims="1"  decoding="async"  width="1200"  height="670"  sizes="(max-width: 1200px) 100vw, 1200px"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/0DoF_MPEG-H_authoring-tool-4k-hd.png?resize=1200%2C670&quality=72&ssl=1"  alt="MPEG-H ermöglicht nicht 3D Audio Panning, sondern auch personalisierte Audiowiedergabe wie Mehrsprachigkeit etc."  class="wp-image-149585" ><figcaption class="wp-element-caption">MPEG-H not only enables 3D audio panning, but also personalised audio playback such as multilingualism etc.</figcaption></figure>





<h2 id="object-based-audio" class="wp-block-heading">Object-based audio</h2>





<p class="wp-block-paragraph">To understand why 3D audio can be even better than surround sound, let’s take a brief look at object-based audio. A major advantage of object-based audio is the independence of the channels, as the rendering only takes place at the end user. Systems that want to use Next Generation Audio must therefore have a corresponding decoder integrated. This ensures optimised audio playback at all times.<br />Another exciting possibility in addition to the movement of audio content in 3D space is the personalisation of these audio objects. My favourite example is watching a football match, where I can simply mute the “Commentator:in” audio object. Podcasts would be an equally exciting application. Let’s say we want to listen to a news podcast that is an hour long, but we only have 10 minutes. We tell our smartphone this and the podcast is automatically shortened to the most important 10 minutes using metadata.<br />MPEG-H is able to do this and is already standard in broadcasting in Korea and Brazil. The biggest competitor is AC-4 aka Dolby Atoms, which only allows such personalisation and interaction according to its own specifications.</p>





<figure class="wp-block-image size-full"><img data-recalc-dims="1"  decoding="async"  width="604"  height="1080"  sizes="(max-width: 1200px) 100vw, 1200px"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/0DoF_DolbyAtmosMusic_protoolspanner_screenshot-4k-hd.png?resize=604%2C1080&quality=72&ssl=1"  alt="Der Dolby Atmos Music Panner lässt die Bewegung der Audioobjekte auf das Tempo der Musik abstimmen."  class="wp-image-149584" ><figcaption class="wp-element-caption">The Dolby Atmos Music Panner allows the movement of audio objects to be synchronised with the tempo of the music.</figcaption></figure>





<h2 id="spatial-audio-0dof-with-without-picture" class="wp-block-heading">Spatial Audio 0DOF with/without picture?</h2>





<p class="wp-block-paragraph">Let’s take a look at what 3D audio content is really available now. Most of them should be familiar, because thanks to Dolby Atmos, consumer favourites such as films and music streaming services are currently being supplied. Podcasts too, but in the vast majority of cases they make more sense in stereo – or even mono.</p>





<figure class="wp-block-image size-full"><img data-recalc-dims="1"  decoding="async"  width="1200"  height="818"  sizes="(max-width: 1200px) 100vw, 1200px"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/0DoF_MPEG-H_Mhapi-4k-hd.png?resize=1200%2C818&quality=72&ssl=1"  alt="Viele Positionierungsmöglichkeiten heißt auch viele Parameter, die anhand von Automationen (Keyframes) in die Timeline geschrieben werden müssen."  class="wp-image-149586" ><figcaption class="wp-element-caption">Many positioning options also mean many parameters that have to be written into the timeline using automations (keyframes) 
must be written into the timeline.</figcaption></figure>





<h2 id="3d-audio-is-better-than-3d-video" class="wp-block-heading">3D audio is better than 3D video?</h2>





<p class="wp-block-paragraph">When I tell people that I do “something with 3D audio”, they immediately ask if I work with “Dolby Atmos”. In fact, Dolby is not that relevant in my immersive audio bubble because it doesn’t enable many things that I need in my daily work. But more on that when it comes to degrees of freedom.<br />Nevertheless, Dolby Atmos (AC-4 is actually the audio format behind the marketing term) is particularly relevant in the film industry. Thousands of Hollywood blockbusters have already been mixed in this format and people are happy to spend a few euros more to enjoy a surround system in the cinema. Sure, the sound experience is more fun when a helicopter suddenly sounds like it’s flying over your head. But at the end of the day, I would argue that all films also work with stereo sound – or mono. You don’t look in all directions, you only look forwards.<br />Even though I like to shoot in the direction of Dolby Atmos, they do a good job of bringing this surround sound into the living room. Many soundbar models now support playback via the TV using streaming apps. And playback via Apple headphones is also really fun, even on an iPad. Although the “Airpods Pro” are in-ear headphones, you have the feeling of being enveloped by the sound and can almost save yourself an expensive home cinema. Now let’s take a look at pure audio enjoyment without visual content: Music streaming has long been part of our everyday lives and is becoming increasingly popular. 3D music streaming is the latest innovation in the industry, adding a third dimension to the listening experience.</p>





<figure class="wp-block-image size-full"><img data-recalc-dims="1"  decoding="async"  width="1200"  height="675"  sizes="(max-width: 1200px) 100vw, 1200px"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_360Video_EUsavesLives-Filmstill-hd.png?resize=1200%2C675&quality=72&ssl=1"  alt="Die 360° Video Produktion #EUsavesLives zeigt hautnah den Schultag eines Kenianischen Jungen, (hier als Equirectangular Projektion, die das 360° Bild komplett zeigt, nicht nur den späteren Bildausschnitt im Videoplayer."  class="wp-image-149592" ><figcaption class="wp-element-caption">The 360° video production #EUsavesLives shows the school day of a Kenyan boy up close, (here as an Equirectangular projection, which shows the 360° image in its entirety, not just the later image section in the video player.</figcaption></figure>





<h2 id="3d-music" class="wp-block-heading">3D music</h2>





<p class="wp-block-paragraph">But now Dolby came up with the idea of converting their 3D audio format for music production. Admittedly, I’m a little sceptical here too, because I felt that most songs sounded better in stereo than with the 3D audio formats. In addition to Dolby Atmos Music, Sony is trying to add 360 Reality Audio to the list of 3D formats.<br />However, one advantage is definitely that you have to make fewer compromises when mixing music and have more options for placing the individual audio tracks. This gives some tracks more depth and you can hear the individual instruments better. You have the feeling that the musicians are sitting around you in the studio.<br />We are currently in a major learning phase here, similar to the move from stereo to mono. The first Beatles songs sound interesting by today’s standards. Today we know better how to mix in stereo. The same applies to these 3D music productions. So just see for yourself which streaming platform is already there and listen to it. I think the quality is getting better and better and since Apple Music got involved, there’s been a bit of a gold-rush atmosphere in the audio community.<br />And what is 8D Audio now? Admittedly: maximum confusion to categorise this again, but 8D doesn’t even stand for dimensions, but directions. Let’s just accept the phenomenon and define it for what it is: music circles around our heads in mono and works quite well through headphones. But in the long run it is a bit tiring and monotonous, so 3D music tries better. Here you work with individual tracks of the various instruments and place or move them through the room where it suits the composition. It rarely sounds as clearly 3D as 8D audio. So just listen to the examples that sound better or worse on my blog and form your own opinion.</p>





<p class="wp-block-paragraph"></p>





<h2 id="dolby-atmos-podcast" class="wp-block-heading">Dolby Atmos Podcast</h2>





<p class="wp-block-paragraph">The last remaining audio-only format is podcasts, which are now also being tackled by Dolby Atmos. Here, however, my toenails roll up a little, for reasons that go beyond the scope of this article. The short version is that Audible has now jumped on the bandwagon, but AC-4 as a 3D format does not allow certain sound sources to be made non-3D. With audio dramas in particular, this means that the narrator’s voice is suddenly in the 3D scene with the protagonists, which means that the listener can no longer really distinguish who is actually part of the action.<br />But I’m already in dialogue with Dolby about this too, because what’s the point of always complaining :-) But there are already other courageous productions that have mixed with or without Dolby in order to get answers to the questions of how well radio plays work as 3D audio productions. Just listen to it yourself and form your own opinion. Head tracking comes later.<br />Most productions have “the problem” of producing too classically. In other words, recording speakers in the studio, then letting them move around virtually with 3D spatialisers and adding an atmo. But that doesn’t really sound convincing or immersive. Can you cite your own productions as a positive example? Then I’ll throw my radio play for BKW Engineering into the ring. A more classically produced one is the “Erdsee-Hörspiel” from WDR. I’m curious about the opinions.</p>





<h2 id="advantages-and-disadvantages-of-3d-sound" class="wp-block-heading">Advantages and disadvantages of 3D sound</h2>





<p class="wp-block-paragraph">Said productions with 3D audio can be fun if they fit the content well. It is also often used as a marketing gimmick to simply offer the listener something new, to have a unique selling point. This is also a disadvantage, as 3D audio is not a seal of quality. However, you can usually rely on your ears to determine whether the production works for you or not, apart from your own taste.<br />That’s why productions are often made in 3D that would probably have worked just as well in stereo. Especially in the music sector, there are genres that have spatialisation but, in contrast to the stereo version, have less pressure. In addition, the conversion to headphones is not yet perfect. The soundtrack often sounds somehow duller than you are used to with stereo directly to the ears.<br />Here, too, we are in a transitional phase. Users first have to get used to this spatial sound again. It usually works better via loudspeaker systems and I have to admit that I had a lot of fun with Dolby Atmos Music in a demo car.<br />However, as you can imagine, with 3D audio mixes there are even more parameters that I can set for a sound. That’s why such mixes are usually more complex, more time-consuming and more expensive than a stereo mix. And, as I said, you usually need a selection of special devices to really get the full benefit.</p>





<figure class="wp-block-image size-full"><img data-recalc-dims="1"  decoding="async"  width="1075"  height="1080"  sizes="(max-width: 1200px) 100vw, 1200px"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_Ambisonics_7.1.4-Lautsprecher-Setup-4k-hd.png?resize=1075%2C1080&quality=72&ssl=1"  alt="Der IEM AllRADecoder ermöglicht die Wiedergabe von Ambisonics-Signalen mit einem beliebigen Lautsprecher Setup – je kugelförmiger, desto besser."  class="wp-image-149591" ><figcaption class="wp-element-caption">The IEM AllRADecoder enables the playback of Ambisonics signals with any speaker setup – the more spherical, the better.</figcaption></figure>





<h2 id="formats-and-object-based-audio" class="wp-block-heading">Formats and object-based audio</h2>





<p class="wp-block-paragraph">As mentioned, all formats are somehow based on the assumption that the<br />the listener is looking towards the front, where there is usually a centre speaker or screen. You don’t actually want people to look across the room because there is usually a TV picture in front of them while they are listening to 3D audio content.<br />As I said, Dolby Atmos has established itself in the form of AC-4 with a large marketing budget and is spreading from films to music, podcasts and gaming. The alternative is MPEG-H made in Germany, which is particularly suitable for live streaming in the broadcast sector. The competition from Dolby Atmos Music is an adaptation of MPEG-H, called Sony 360 Reality Audio, which should provide a boost for the Sony Music label in particular. Both formats can even be found on Amazon Music, although Dolby already has around four times as many tracks, so the battle seems to have been decided.<br />One format that has not established itself for over 30 years, but is currently experiencing a renaissance, is Ambisonics. This sound field-based format has audio channels, but instead of loudspeakers it maps spatial axes. It all sounds a little unusual and only has a very small sweet spot when played back via loudspeakers. However, this disadvantage does not exist with headphones because you have the perfect playback position directly on your ears. The format can also be easily rotated around the X, Y and Z axis. This is why it has established itself more for the use of 360° videos and thus into the world of three degrees of freedom.</p>





<h2 id="new-audio-freedoms-in-three-degrees" class="wp-block-heading">New audio freedoms in three degrees</h2>





<p class="wp-block-paragraph">With advances in technology, new 3D audio techniques are opening up unprecedented possibilities for media production. 3D audio with three degrees of freedom (3DOF) is one such concept that enables unprecedented immersion and dynamic sound experiences that benefit from the fact that you’re not just staring straight ahead.<br />Below we look at the pros and cons for specialised headphones, applications where it can be used effectively and different formats available when implementing this feature into a production workflow.</p>





<h2 id="360-videos-from-a-sound-perspective" class="wp-block-heading">360° videos from a sound perspective</h2>





<p class="wp-block-paragraph">Probably the best-known representative of this genre are 360° videos. These spherical moving images triggered a real hype half a decade ago. Suddenly you could watch such videos<br />on the largest video platforms Video YouTube and Facebook. Strictly speaking, there are many types of user experience:</p>





<ul class="wp-block-list">

<li>On desktop devices, using the mouse to turn your gaze while looking further forward at the screen.</li>

</ul>





<ul class="wp-block-list">

<li>On smartphones, where you hold your device in front of your nose, not turning your head, but rotating your body on its own axis.</li>

</ul>





<ul class="wp-block-list">

<li>Head-mounted displays (HMDs) are in the top class because the image really does adapt to the movement of your head in real time. Such 360° videos are also available as stereoscopic videos, as 3D videos, if you like, where each eye gets to see its own 360° panorama, creating a more vivid image in the brain. But of course there are also 360° videos with 3D sound. In this context, I also like to call it 360° sound, because you immediately understand that the sound is also spherical like the image.</li>

</ul>





<h2 id="audio-head-tracking" class="wp-block-heading">Audio head tracking</h2>





<p class="wp-block-paragraph">If you now remove the image component, but still want the sound to react to head movements, then we find ourselves in the world of audio head tracking. Apple is already building this technology into ALL Airpods, from the entry-level Airpods to the Airpods Pros and, of course, Airpods Max. However, the use cases are currently limited.<br />Technically, it is now possible to listen to Dolby Atmos Music tracks via Apple Music, but, as I said, these tracks were never mixed with the intention of “listening around” in the music. In addition, Dolby metadata is even bypassed so that this feature can be activated at all. As a result, Dolby Atmos Music tracks sound different on Amazon than on Apple Music – not exactly what you want to hear as an audio engineer. But Apple needs content to be able to use its technology as a selling point.</p>





<figure class="wp-block-gallery has-nested-images columns-1 is-cropped wp-block-gallery-2 is-layout-flex wp-block-gallery-is-layout-flex">

<figure class="wp-block-image size-large"><img data-recalc-dims="1"  decoding="async"  width="1200"  height="615"  data-id="149593"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_AppleApproach-2023-10-02-um-17.53.27-hd.png?resize=1200%2C615&quality=72&ssl=1"  alt="Nur Apple hat bis jetzt verstanden, dass man mit diesem Ansatz von drei Kategorien eigentlich für alle immersiven Medienproduktionen gewappnet ist. Daher wird dieses Prinzip auch im Apple RealityKit für die neue Vision Pro so implementiert. Dieses Setup nutze ich auch für Aufnahmen und Post-Produktion für alle immersiven Projekte, egal ob 3 Freiheitsgrade, oder 6 DoF. Und damit können wir uns jetzt zur finalen Stufe aufmachen, sechs Freiheitsgraden."  class="wp-image-149593" ></figure>





<figure class="wp-block-image size-large"><img data-recalc-dims="1"  decoding="async"  width="1200"  height="604"  data-id="149594"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_AppleApproach-2023-10-02-um-17.53.35-hd.png?resize=1200%2C604&quality=72&ssl=1"  alt="Nur Apple hat bis jetzt verstanden, dass man mit diesem Ansatz von drei Kategorien eigentlich für alle immersiven Medienproduktionen gewappnet ist. Daher wird dieses Prinzip auch im Apple RealityKit für die neue Vision Pro so implementiert. Dieses Setup nutze ich auch für Aufnahmen und Post-Produktion für alle immersiven Projekte, egal ob 3 Freiheitsgrade, oder 6 DoF. Und damit können wir uns jetzt zur finalen Stufe aufmachen, sechs Freiheitsgraden."  class="wp-image-149594" ></figure>





<figure class="wp-block-image size-large"><img data-recalc-dims="1"  decoding="async"  width="1200"  height="623"  data-id="149595"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_AppleApproach-2023-10-02-um-17.53.44-hd.png?resize=1200%2C623&quality=72&ssl=1"  alt="Nur Apple hat bis jetzt verstanden, dass man mit diesem Ansatz von drei Kategorien eigentlich für alle immersiven Medienproduktionen gewappnet ist. Daher wird dieses Prinzip auch im Apple RealityKit für die neue Vision Pro so implementiert. Dieses Setup nutze ich auch für Aufnahmen und Post-Produktion für alle immersiven Projekte, egal ob 3 Freiheitsgrade, oder 6 DoF. Und damit können wir uns jetzt zur finalen Stufe aufmachen, sechs Freiheitsgraden."  class="wp-image-149595" ></figure>





<figure class="wp-block-image size-large"><img data-recalc-dims="1"  decoding="async"  width="1200"  height="614"  data-id="149596"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/3DoF_AppleApproach-2023-10-02-um-17.57.13-hd.png?resize=1200%2C614&quality=72&ssl=1"  alt="Nur Apple hat bis jetzt verstanden, dass man mit diesem Ansatz von drei Kategorien eigentlich für alle immersiven Medienproduktionen gewappnet ist. Daher wird dieses Prinzip auch im Apple RealityKit für die neue Vision Pro so implementiert. Dieses Setup nutze ich auch für Aufnahmen und Post-Produktion für alle immersiven Projekte, egal ob 3 Freiheitsgrade, oder 6 DoF. Und damit können wir uns jetzt zur finalen Stufe aufmachen, sechs Freiheitsgraden."  class="wp-image-149596" ></figure>

<figcaption class="blocks-gallery-caption wp-element-caption">So far, only Apple has realised that this three-category approach is actually suitable for all immersive media productions. That’s why this principle is also implemented in the Apple RealityKit for the new Vision Pro. I also use this setup for recordings and post-production for all immersive projects, regardless of whether they have 3 degrees of freedom or 6 DoF. And now we can move on to the final stage, six degrees of freedom.</figcaption></figure>





<h2 id="3d-sound-is-more-than-entertainment" class="wp-block-heading">3D sound is more than entertainment</h2>





<p class="wp-block-paragraph">But there are applications that actually solve problems with 3D audio and head tracking and are not just a fun factor. The medium is also becoming increasingly relevant for communication. MS Teams has added the “spatial audio” feature to its video calls. The good thing is that you don’t need any additional hardware. Simple headphones are all you need and the microphone signal is automatically spatialised in the cloud for the other users – even without head tracking.<br />In a video conference with several people, things can quickly become chaotic as the current mono system has problems keeping the different voices apart. Our brain has difficulty differentiating between the voices as they all come from the same direction. 3D sound makes the conversation situation natural and actually makes it measurably easier to listen, because it takes the strain off our brain – similar to the cocktail party effect. The 3D audio spatialisation of voices makes it much easier to differentiate between them and noise is less noticeable.</p>





<h2 id="head-localisation-is-being-fought-wrongly" class="wp-block-heading">Head localisation is being fought – wrongly</h2>





<p class="wp-block-paragraph">Films can also be watched with head tracking, but there is a fundamental problem here: the approach is to make the entire sound 3D. However, this means that even elements such as narrative voices or background music are part of the scene where they shouldn’t be. Here is a very brief explanation on the subject of diegesis. 360° videos are the most vivid. Everything you can see should also be three-dimensional in terms of sound, as you are in the scene (diegetically). But it shouldn’t be a narrative voice that can’t be seen (non-diegetic). Otherwise you will hear a ghost coming from somewhere and wonder who is talking to you. But if the voice is played in mono, it doesn’t change and you immediately understand that a person is talking to you who isn’t even part of the scene.<br />In other words, in the world of 3 degrees of freedom, not everything is just 3D audio so that you have the feeling that it is coming “from outside”. Rather, the ability to combine the soundtrack with mono or stereo signals via headphones is important so that listeners understand which sound is coming from which narrative level. This is not possible with loudspeakers because they are always perceived from the outside, whereas with headphones you can – and should – make use of the localisation in the head.</p>





<h2 id="confused-then-lets-go-round-in-circles-again" class="wp-block-heading">Confused? Then let’s go round in circles again</h2>





<p class="wp-block-paragraph">Apple does a good job of being able to understand whether I’m listening to stereo or multi-channel content. This is all the more interesting when you realise that Bose burnt its fingers on this very subject years ago. In Cupertino, however, they believe in the technology and have already built it into every pair of in-house Airpod headphones. This also helps with the market launch of the Apple Vision Pro, but more on that later. When it comes to audio playback alone, a distinction is made not only between the two options, but a total of five.</p>





<h2 id="input-stereo" class="wp-block-heading">Input: Stereo.</h2>





<p class="wp-block-paragraph">You can hear the sound as normal stereo, nothing special at first. However, if you don’t like this upside-down localisation, you can activate “Spatialize Stereo/Stereo to 3D Audio”, which adds a reverb algorithm to the signal to make it sound more “natural”.<br />In addition to this spatialisation, you can also activate head tracking, which makes the signal sound through headphones as if you were listening to it through two speakers in the room (3DoF).</p>





<h2 id="input-multi-channel" class="wp-block-heading">Input: Multi-channel</h2>





<p class="wp-block-paragraph">Mostly Dolby Atmos via film platforms such as Disney or music streaming such as Apple Music. The sound is automatically converted from multi-channel audio to binaural stereo so that the sound sounds as if it is happening around you. This setting makes the most sense if you move around and don’t want the sound to change all the time as you move your head.<br />You can also activate head tracking, which is particularly useful for films if you have a visual reference point or if you want to distinguish objects from the front from those from behind (which is always difficult with binaural sound). I would call this level 3DoF.</p>





<h2 id="freedom-comes-with-pitfalls-advantages-of-head-tracking" class="wp-block-heading">Freedom comes with pitfalls – advantages of head tracking</h2>





<p class="wp-block-paragraph">A big advantage of this technology for 360-degree videos is the fact that you can now hear better when something is happening behind you, for example. Something you wouldn’t be able to see because we only have a limited field of view, but our hearing is always mapped in 360°. I have often seen VR experiences that slap their beautifully designed visual scene with arrows so that people look in the right direction at the right moment. Cleverly placed 3D sound can solve this problem intuitively.<br />It also solves one of the biggest obstacles of binaural audio. You often have the feeling of spatiality and that the sound is happening around you. However, you can rarely distinguish the front from the back. But if you are able to turn your head even slightly, you can immediately understand which sound is where. Head tracking does not mean that you have to move 360° – but you can. This solves the aforementioned problem of narrative levels. With films, you don’t have to ask yourself whether an element is dynamic or not. When we listen to epic music in the cinema, we don’t ask ourselves “where is Hans Zimmer now”. But because you now have this clear separation, you have to question how you actually use voice-overs and music. In most cases, a scene with well-designed sound effects is better than desperately trying to keep people entertained with music and speech. The brain is usually already well supplied with 360° images anyway, so three levels of sound (speech, sound effects, music) are more likely to cause confusion.</p>





<h2 id="disadvantages-of-head-tracking" class="wp-block-heading">Disadvantages of head tracking</h2>





<p class="wp-block-paragraph">As already mentioned, the whole thing is not so easy to implement via loudspeakers. Theoretically, it is also possible to display 360° videos as a projection in planetariums, for example. For surround sound, you are surrounded by loudspeakers. But the speaker’s voice still somehow comes from one direction. What still works well in the cinema suddenly becomes a problem with spherical videos and a series of workarounds and compromises are necessary.<br />Unfortunately, you rarely know when listening whether the mix should be heard with head tracking or not. There are mixes that really fall apart when you have the opportunity to turn your head. While other productions don’t even make sense if you’ve deactivated head tracking.<br />Admittedly, I always talk so cleverly about what you should and shouldn’t do. But the reality is simply that there is usually neither the time, money nor knowledge to make your immersive media production really good. If anything, an Ambisonics microphone is put up and labelled as immersive audio. Only to be mixed in stereo in the end for budget reasons “because nobody can hear it anyway”. It may be that listeners who come into contact with immersive media for the first time don’t necessarily scrutinise the sound. But the more points of contact you have, the greater the desire for sound. All the top podcasts are now produced in a studio, even if they started out as a hobby – there must be some kind of quality reason for this ;-)</p>





<h2 id="formats-for-audio-head-tracking" class="wp-block-heading">Formats for audio head tracking</h2>





<p class="has-text-align-left wp-block-paragraph">I have already mentioned the most famous representatives with Dolby Atmos, 360 Reality Audio (based on MPEG-H) and Ambisonics. However, these are all 3D formats that were not primarily developed for audio head tracking.<br />That’s why I don’t want to go into the technical details here, but rather briefly explain why Apple is once again showing a very good approach here. Even if Dolby likes to describe its format as future-proof, at some point it will reach its limits.<br />As mentioned, in the world of degrees of freedom, it’s not just 3D sounds that are relevant. But also precisely those non-diegetic sounds that are not part of the scene, but make sense for music and voice-overs in 0D. But of course there is more than just black and white thinking, i.e. 0D and 3D. Because there has to be something in between. This is often referred to as a bed. Apple refers to the three factors as:</p>





<ul class="wp-block-list">

<li>3D Audio Objects</li>





<li>Ambience Bed</li>





<li>Head-locked audio</li>

</ul>





<h2 id="the-3-layers-for-immersive-soundtracks" class="wp-block-heading">The 3 layers for immersive soundtracks</h2>





<p class="wp-block-paragraph">we have already looked at 3D Audio Objects, which are the objects that I can place in the room. You usually have the option of setting distance parameters or the size of an object so that it doesn’t stand out from the scene. Let’s take a cheering 3D audio fan in a stadium as an example. Then I would like to have a reverb that gives you the feeling of being in the same place. But if I simply add a reverb to the object, the reverb will only come from this corner. However, sound spreads in all directions, so it would also be heard all around us. Theoretically, I could send the reverb to our head-locked audio track. But then the reverb would not be 3D.<br />This is where the aforementioned bed comes into play. All signals that should be spatial but can be diffuse can be sent here. So if you have not just one fan, but hundreds, you would otherwise have to fill 100 audio tracks with objects. This way, you can simply send the group to the bed and only need a fraction of the audio tracks.</p>





<h2 id="what-do-the-other-formats-do-differently" class="wp-block-heading">What do the other formats do differently?</h2>





<p class="wp-block-paragraph">Dolby Atmos, for example, works with a channel-based 7.1.2 bed and you can add up to 128 mono objects. However, it doesn’t actually have a head-locked stereo track because the format is based on loudspeakers. So for me it is not suitable for podcasts. In principle, the Dolby Atmos renderer offers the option of marking an audio<br />Object as “disable binauralisation”. This means that it is not played spatially. However, if you activate head tracking, the Apple renderer bypasses this meta data and only reads out where the object is located in the scene. This means that all Dolby Atmos mixes were never mixed with the intention of head tracking and therefore rarely utilise the advantages of the technology.<br />Ambisonics, on the other hand, has 4, 9, 16 or more channels, depending on the order. So it has a bed, I can even work with head-locked audio, but again it has no objects. Which is why the sound is always a bit diffuse, or I would have to spend a lot of audio channels to get close to the resolution of object-based formats. It therefore supports head-locked audio in mono, but not stereo. However, this optional stereo track is standard with Facebook360 and YouTubeVR, for example. An Ambisonics file is supplied, which rotates depending on the viewing direction. If required, an additional stereo file that is always played in the same way, no matter where you look in the 360° video. This gives you the best of both worlds and a good compromise between resolution and quality.</p>





<h2 id="6dof-anything-but-dumb" class="wp-block-heading">6DoF – anything but dumb</h2>





<p class="wp-block-paragraph">Let’s move on to the premier class of 6 degrees of freedom. Here you not only have three possible rotations around the X, Y or Z axis – but also three translations to these axes. In less unnecessarily clever terms, this means that you can also move towards or away from the sound, making it louder or quieter, for example.<br />All the applications discussed so far have been based on the fact that the listener is at the centre of the action, in the so-called sweet spot. This is where the optimum listening position is. But now we can move away from this point all at once and you can already guess that this makes sound design even more complex. So that there are no acoustic holes in the 3D scene or you are distracted by too many sound sources.</p>





<figure class="wp-block-image size-full"><img data-recalc-dims="1"  decoding="async"  width="1126"  height="1080"  sizes="(max-width: 1200px) 100vw, 1200px"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/6DoF_GameEngine_Unity_AudioObjekt-hd.png?resize=1126%2C1080&quality=72&ssl=1"  alt="Unity sieht erst einmal recht komplex aus, aber viele 3D Audio Parameter kann man gar nicht für sein Audioobjekt einstellen."  class="wp-image-149598" ><figcaption class="wp-element-caption">Unity looks quite complex at first, but you can’t set many 3D audio parameters for your audio object.</figcaption></figure>





<h2 id="games-more-than-a-gimmick" class="wp-block-heading">Games (more than a gimmick)</h2>





<p class="wp-block-paragraph">Here you will inevitably find yourself in game engines. This is why games are the best-known representative of this category. But not every game uses 3D audio. Once again, the genre question is a legitimate one.<br />A 2D game needs sounds from behind/above/below just as little as a strategy game in which I look down on my people from above like a god. Left/right is perfectly adequate here. 3D audio could be used here at most in the ambience, similar to films, so that you have more of a feeling of being part of the scene. Enveloped by sound is the keyword here again.<br />All games that take place in 3D worlds, especially first-person games, benefit from spatial sound. Fans of shooters have long known that being able to hear the enemies behind you before you get them in front of your virtual “weapon” can make all the difference in the game. That’s why gaming headsets are popular in this respect, so that your ears can tell your eyes where to look as well as possible.<br />However, such surround headsets are usually not even necessary to be able to hear spatially. We remember that our brain can do this with just two cleverly rendered audio channels. In most cases, the game automatically recognises whether you are using speakers or headphones and renders the sound accordingly. Nevertheless, it may well be that such surround headsets are even more customised to the software and, above all, enable communication. The Playstation 5, for example, advertises with the Pulse 3D Audio Engine and its own headset, which are very well matched to each other. Recently also in combination with Dolby Atmos in order to be able to control multi-channel soundbars.</p>





<figure class="wp-block-image size-full"><img data-recalc-dims="1"  decoding="async"  width="1200"  height="534"  sizes="(max-width: 1200px) 100vw, 1200px"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/6DoF_GameEngine_UnityFmod-hd.png?resize=1200%2C534&quality=72&ssl=1"  alt="Daher wird bei einer VR-Produktion wie hier in Unity gerne eine Schnittstelle zu einer Middleware integriert."  class="wp-image-149599" ><figcaption class="wp-element-caption">This is why an interface to middleware is often integrated into VR productions, such as here in Unity.</figcaption></figure>





<h2 id="ar-augmented-reality-audio" class="wp-block-heading">AR (augmented reality audio)</h2>





<p class="wp-block-paragraph">Pokemon AR is often used as best practice for augmented reality applications. Even though I like to say that the game didn’t go viral because it was AR, but because it was Pokemon and had a great multiplayer character. The built-in cameras of smartphones are usually used here. The Lidar scanner, which enables even more precise tracking, is also increasingly being used.<br />AR glasses have not really become socially acceptable yet. Magic Leap or Hololens cost several thousand euros and are even slowly establishing themselves in the industry. Google Glass was way ahead of its time, but the features you would want from such a device are correspondingly limited. That’s why augmented reality is currently even more exciting from an auditory perspective. The technology here is already very advanced and, as already mentioned, headphones usually have at least head tracking built in. Combined with the smartphone, experiences with six degrees of freedom are also possible. Applications could include audio guides in museums, where the paintings or statues are brought to life by sound and tell their life stories, for example.</p>





<h2 id="vr-sound-for-virtual-reality" class="wp-block-heading">VR (sound for virtual reality)</h2>





<p class="wp-block-paragraph">When it comes to VR applications, most people also think of gaming. This is supported by the fact that such projects are almost exclusively developed with game engines such as Unity or Unreal. However, the VR bubble is much more diverse than you might think. Training and simulations in particular are currently in the B2B industry without consumers realising it. The possibilities are virtually unlimited and, in addition to the aforementioned rollercoaster games for which the medium is mostly known, real use cases are establishing themselves that offer added value – such as saving time and money when training employees. Nevertheless, it is not easy to transfer games, apps and the like from 2D screens to VR. The user experience with HMDs and controllers is simply fundamentally different. In most cases, VR attempts to replicate reality and is just a poor digital copy. However, the immersive medium really comes into its own when you do things that you can’t do in real life.</p>





<p class="wp-block-paragraph">A prime example is “Notes on Blindness” (is.gd/notes_on_blindness). A VR experience in which you slip into the character of someone who is slowly going blind. Automatically, even non-sound people pay much more attention to the subtle nuances in the sound. But in the vast majority of cases, sound is usually neglected by developers due to a lack of knowledge and time. That’s why most apps sold as immersive experiences sound quite sterile. We are a long way from AAA budgets and need to give this young medium some time.</p>





<figure class="wp-block-image size-full"><img data-recalc-dims="1"  decoding="async"  width="1200"  height="483"  sizes="(max-width: 1200px) 100vw, 1200px"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/6DoF_Middleware_FMod_Screenshot-hd.png?resize=1200%2C483&quality=72&ssl=1"  alt="Unity kommuniziert dann mit der Middleware FMOD. Diese ermöglicht eine viel präzisere Kontrolle über Interaktivität, hier etwa wie die Musik geloopt wird und wann der Track für die nächste Szene abgespielt wird – in Abhängigkeit vom Tempo des Titels."  class="wp-image-149597" ><figcaption class="wp-element-caption">Unity then communicates with the FMOD middleware.  This enables much more precise control over interactivity, such as how the music is looped and when the track for the next scene is played – depending on the tempo of the title.</figcaption></figure>





<h2 id="spatial-computing-meets-spatial-audio" class="wp-block-heading">Spatial computing meets spatial audio</h2>





<p class="wp-block-paragraph">Let’s not get confused by Apple’s new term. For me, spatial computing is the same as XR (eXtended Reality). You are in virtual reality, extending your reality or somehow in between. The boundaries are no longer so easy to separate when even VR glasses have cameras that look into reality. So if I see reality through an HMD, is that VR or AR?<br />Apple doesn’t make the confusion any easier because they didn’t want to use terms that are already used by other companies. Virtual reality or the metaverse from Meta. Or mixed reality from Microsoft. In the long term, the result will be that we don’t have a VR device and an AR device, but a device that can depict all realities. I won’t comment on how AI and other buzzwords such as blockchain will play into this ;-)<br />For me as a sound engineer, however, it is important to separate which sound is part of which reality. In VR, I want to isolate myself, so I use a surround sound that matches what the display tells me. Whereas in AR, I want the sound to sound as if something is happening in my living room, where I am right now. Apple is also closer to a solution here than other companies because Apple Vision Pro, for example, also introduced ray tracing, which recognises the geometry of our surroundings and renders the sound accordingly.<br />In addition, Apple has already built a good infrastructure for 3D audio with the Airpods. If you want to further optimise the sound, you take pictures of your ears, which generates a personalised HRTF. Our hearing as a 3D model, so to speak. This allows the iPhone, for example, to tune the sound for us even more precisely and the distinction as to whether the sound is with us in VR or AR becomes even clearer.</p>





<h2 id="game-audio-playful-or-gambled-away" class="wp-block-heading">Game audio – playful or gambled away?</h2>





<p class="wp-block-paragraph">Gamers know how important it is not only to see your opponents in time, but to hear them beforehand. For this reason, many people like to spend good money on expensive headsets that supposedly give them an advantage in the game. However, the sound design in AAA games is also very well budgeted and therefore correspondingly complex. A short beep sound is enough for the player to know immediately what is happening in the scene.</p>





<p class="wp-block-paragraph">Communication and collaboration in 3D audio and with 6 degrees of freedom are crucial aspects for the feeling of presence in social VR. I recently had the opportunity to be in Social VR myself and was surprised at how long I was in there at a stretch. Even though the room was virtual, afterwards I had the feeling that the other person was actually in the same room as me. The cognitive load on our brain is lower because the sound is not coming from just one direction, as is the case with video calls, but a natural conversation situation is depicted.</p>





<p class="wp-block-paragraph">But augmented audio can also make our everyday lives easier when we are on the move. Everyone is probably familiar with the problem of travelling on the underground using a map service for smartphones. You arrive somewhere on the<br />somewhere on the surface, but have no idea where to go because the sat nav is confused as to which direction we are travelling in. GPS is simply too imprecise. But if you have a second reference system – in the form of surround headphones that know the direction you are facing – you could simply hear a voice from the direction you need to move in.</p>





<h2 id="game-over-with-these-problems" class="wp-block-heading">Game over with these problems?</h2>





<p class="wp-block-paragraph">We humans have been used to hearing in three dimensions since birth. Now we can finally approximate this impression naturally. It all sounds very simple, but to get back from stereo to the original, an extremely large number of parameters are required. Unfortunately, it is not enough to load an audio object into a game engine and tick the “3D Audio” box. Here is a brief overview of what is required for 3D audio:<br />Before you insert an audio clip into a game engine, you should ask yourself a few questions. Where did you get the sound from and does it fit in with the other sounds in your library or recordings that you may have processed with EQ? What is the purpose of the sound? Will it run in the background or will it serve as a trigger for special actions in the game? The secrets of game audio have evolved in recent years. Usually the result is to have loud sounds in a 3D world, but they never really work together.<br />Since you not only give the sounds parameters as to where they are in the environment, but you also move through the world as a character, there are a variety of parameters that you can give your audio object. As already mentioned, a distinction is made between mono and 3D sound and the size of the sound. Another important parameter is the attenuation curve, which regulates how quickly and how much the volume of the sound decreases in the room. By setting the focus parameters and the air absorption and occlusion factors, you can further determine how the sound spreads in the room.<br />So far so good, but so far the sound still subjectively sticks very close to your face. It has a certain distance, but our brain does not yet know what kind of room we are in. At the moment it is an abstract sound source in an empty room. So it’s time for reverb. An important consideration when creating realistic sound effects using 3D reverb is the size of the room and the material of the walls. Here, too, the calculation is usually only approximate. You would actually need real-time ray tracing to be able to realistically simulate initial reflections and reverberation. However, with the right combination of the parameters mentioned above, you can get quite close and don’t need a render farm.</p>





<figure class="wp-block-gallery has-nested-images columns-default is-cropped wp-block-gallery-3 is-layout-flex wp-block-gallery-is-layout-flex">

<figure class="wp-block-image size-large"><img data-recalc-dims="1"  decoding="async"  width="1200"  height="674"  data-id="149602"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/wwise-1-snapshot-hd.png?resize=1200%2C674&quality=72&ssl=1"  alt="Audio-Middleware reagiert auch auf die Geometrie – und kann die entsprechend „akustisch“ interpretieren."  class="wp-image-149602" ></figure>





<figure class="wp-block-image size-large"><img data-recalc-dims="1"  decoding="async"  width="960"  height="1080"  data-id="149603"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/wwise-3-snapshot-hd.png?resize=960%2C1080&quality=72&ssl=1"  alt=""  class="wp-image-149603" ></figure>





<figure class="wp-block-image size-large"><img data-recalc-dims="1"  decoding="async"  width="960"  height="1080"  data-id="149601"  src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2023/11/wwise-2-snapshot-hd.png?resize=960%2C1080&quality=72&ssl=1"  alt=""  class="wp-image-149601" ></figure>

<figcaption class="blocks-gallery-caption wp-element-caption">Audio middleware also reacts to the geometry – and can interpret it “acoustically” accordingly. </figcaption></figure>





<h2 id="if-the-software-wasnt-so-hardware" class="wp-block-heading">If the software wasn’t so hardware..</h2>





<p class="wp-block-paragraph">Hard… you know? Anyway, in this context you usually come across the term “game audio”, which deals with the design of interactive audio content. The three new degrees of freedom create two new problems: You don’t know exactly when this player is actually where.<br />That’s why audio production doesn’t end up with one long audio file with several channels. Instead, many small assets are delivered. These can be loops such as a forest atmosphere, for example, which is repeated in the background until we are no longer in the forest. The second category is trigger sounds, for example if I want to hit a tree with an axe, a “tin” should also come at the right moment.<br />The only audio format that is still under development and that could depict everything is Fraunhofer’s MPEG-I. But that will take a few more years. However, this will take a few more years, which is why it is mostly found in game engines such as Unity or Unreal Engine. The former is only equipped with very rudimentary 3D audio features. With Epic Games, you can go further. Nevertheless, both platforms quickly reach their limits, which is why it is common to implement middleware for your project. Audiokinetic Wwise and FMOD are popular programmes for this and usually provide everything you need. And if not, you can always write your own scripts. Easier said than done, because here the sound designer has to become more of a developer. With endless possibilities but also complexity.</p>





<h2 id="conclusion-on-the-large-3d-audio-matrix" class="wp-block-heading">Conclusion on the large 3D Audio Matrix</h2>





<p class="wp-block-paragraph">To summarise, 6DoF makes it possible to move freely in space and changes the way we experience sound, initially in a playful way. Even if games have somehow been using this for decades, there is a much greater added value than entertainment. That’s why it doesn’t make sense for me to call 3D audio the stereo killer now, as Dolby likes to propagate. I’m starting with what wouldn’t have worked with stereo, true to the motto “Sound First”. In the sound community VDT (Association of German Sound Engineers) and AES (Audio Engineering Society) there is a lot of talk about “immersive audio”, but it’s mostly just about 3D music. And it feels like nerdy details that users don’t understand anyway.<br />Audio professionals have to ensure that the scene is set to music in such a way that it doesn’t sound empty but also not overloaded. Developers suddenly have to deal with very complex audio parameters that they have to operate on their own more often than I would like. However, the bigger the project, the more budget there is for the respective specialists and games can certainly be taken as a prime example in terms of creativity and technical realisation. It’s an exciting time for the audio world because 3D audio is really celebrating one breakthrough after another in a wide variety of areas. So if you have a project that you need help with, or would like to learn more about this area with my video course, just get in touch!</p>





<p class="wp-block-paragraph"></p><p>The post <a href="https://digitalproduction.com/2023/11/14/3d-audio-into-the-acoustic-matrix/">3D Audio – Into the acoustic matrix</a> first appeared on <a href="https://digitalproduction.com">DIGITAL PRODUCTION</a> and was written by <a href="https://digitalproduction.com/author/martinrieger/">Martin Rieger</a>. </p></div>]]></content:encoded>
					
		
		
		<enclosure url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2024/10/vrtestung_spatial_audio_matrix_Dynamic_neon_blue_photorealistic_4615f98d-f0b8-4589-9aac-fc3390fc6c48.jpg?fit=2880%2C2880&#038;quality=80&#038;ssl=1" length="189537" type="image/jpg" />
<media:content xmlns:media="http://search.yahoo.com/mrss/" url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2024/10/vrtestung_spatial_audio_matrix_Dynamic_neon_blue_photorealistic_4615f98d-f0b8-4589-9aac-fc3390fc6c48.jpg?fit=1080%2C1080&#038;quality=80&#038;ssl=1" width="1080" height="1080" medium="image" type="image/jpeg">
	<media:copyright>DIGITAL PRODUCTION</media:copyright>
	<media:title></media:title>
	<media:description type="html"><![CDATA[]]></media:description>
</media:content>
<media:thumbnail xmlns:media="http://search.yahoo.com/mrss/" url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2024/10/vrtestung_spatial_audio_matrix_Dynamic_neon_blue_photorealistic_4615f98d-f0b8-4589-9aac-fc3390fc6c48.jpg?fit=1080%2C1080&#038;quality=80&#038;ssl=1" width="1080" height="1080" />
<post-id xmlns="com-wordpress:feed-additions:1">149573</post-id>	</item>
		<item>
		<title>Augmented Reality-App &#124; Houdini &#038; Blender</title>
		<link>https://digitalproduction.com/2020/11/17/augmented-reality-app-houdini-blender/</link>
		
		<dc:creator><![CDATA[Patrick Poti]]></dc:creator>
		<pubDate>Tue, 17 Nov 2020 11:00:32 +0000</pubDate>
				<category><![CDATA[News]]></category>
		<category><![CDATA[App]]></category>
		<category><![CDATA[AR]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[Blender]]></category>
		<category><![CDATA[Blender Tutorial]]></category>
		<category><![CDATA[Cinema 4D]]></category>
		<category><![CDATA[Houdini]]></category>
		<category><![CDATA[Maxon]]></category>
		<guid isPermaLink="false">https://www.digitalproduction.com/?p=87091</guid>

					<description><![CDATA[<div style="margin: 5px 5% 10px 5%;"><img src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2020/11/Augmented-Reality-App_Houdini-Blender_Banner.jpg?fit=1099%2C545&quality=80&ssl=1" width="1099" height="545" title="" alt="" /></div><div><p>The Adonis of AR apps. Byplay for Houdini, Blender and Cinema 4D.</p>
<p>The post <a href="https://digitalproduction.com/2020/11/17/augmented-reality-app-houdini-blender/">Augmented Reality-App | Houdini & Blender</a> first appeared on <a href="https://digitalproduction.com">DIGITAL PRODUCTION</a> and was written by <a href="https://digitalproduction.com/author/patrick-poti/">Patrick Poti</a>. </p></div>]]></description>
										<content:encoded><![CDATA[<div style="margin: 5px 5% 10px 5%;"><img src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2020/11/Augmented-Reality-App_Houdini-Blender_Banner.jpg?fit=1099%2C545&quality=80&ssl=1" width="1099" height="545" title="" alt="" /></div><div><p><script type='application/json' class='__iawmlf-post-loop-links'>[{"id":5135,"href":"https:\/\/www.instagram.com\/p\/B-X3Gk0qPU3\/?utm_source=ig_embed&utm_campaign=embed_video_watch_again","archived_href":"","redirect_href":"","checks":[],"broken":false,"last_checked":null,"process":"done"},{"id":5136,"href":"https:\/\/byplay.io","archived_href":"http:\/\/web-wp.archive.org\/web\/20250424003240\/https:\/\/www.byplay.io\/","redirect_href":"","checks":[{"date":"2025-12-28 21:44:35","http_code":206},{"date":"2026-01-14 10:58:17","http_code":206},{"date":"2026-01-25 05:31:57","http_code":206},{"date":"2026-01-31 07:53:13","http_code":206},{"date":"2026-02-15 04:53:29","http_code":206},{"date":"2026-02-25 03:27:40","http_code":206},{"date":"2026-03-20 09:38:03","http_code":206}],"broken":false,"last_checked":{"date":"2026-03-20 09:38:03","http_code":206},"process":"done"}]</script></p>
<h2 id="amphibian-app">Amphibian app</h2>
<p>A cross between a mobile and desktop app: the <strong>Byplay</strong> tool. In the first step, you use the mobile app, record a video and let the app do the augmented reality tracking. In the second step, you upload the video & tracking material to the cloud – where your material is processed and saved.</p>
<p>The third and final step is done with the desktop app; it loads the processed video from the cloud and makes it available to you for further VFX-like processing. The result of your work can – but doesn’t have to – be <strong><a href="https://www.instagram.com/p/B-X3Gk0qPU3/?utm_source=ig_embed&utm_campaign=embed_video_watch_again">cold weather fronts over small devices</a></strong>.</p>
<h2 id="further-information">Further information</h2>
<p>For a production presentation of Byblay, click on the video below. For everything else, <strong><a href="https://byplay.io/">take a look around Byplay (online).</a></strong></p>
<p><strong>Byplay Camera Houdini demo</strong><br />
<iframe class="youtube-player" width="1200" height="675" src="https://www.youtube.com/embed/bN7io_dXa9o?version=3&rel=1&showsearch=0&showinfo=1&iv_load_policy=1&fs=1&hl=en-US&autohide=2&wmode=transparent" allowfullscreen="true" style="border:0;" sandbox="allow-scripts allow-same-origin allow-popups allow-presentation allow-popups-to-escape-sandbox"></iframe></p><p>The post <a href="https://digitalproduction.com/2020/11/17/augmented-reality-app-houdini-blender/">Augmented Reality-App | Houdini & Blender</a> first appeared on <a href="https://digitalproduction.com">DIGITAL PRODUCTION</a> and was written by <a href="https://digitalproduction.com/author/patrick-poti/">Patrick Poti</a>. </p></div>]]></content:encoded>
					
		
		
		<enclosure url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2020/11/Augmented-Reality-App_Houdini-Blender_Banner.jpg?fit=1099%2C545&#038;quality=80&#038;ssl=1" length="69179" type="image/jpg" />
<media:content xmlns:media="http://search.yahoo.com/mrss/" url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2020/11/Augmented-Reality-App_Houdini-Blender_Banner.jpg?fit=1099%2C545&#038;quality=80&#038;ssl=1" width="1099" height="545" medium="image" type="image/jpeg">
	<media:copyright>DIGITAL PRODUCTION</media:copyright>
	<media:title></media:title>
	<media:description type="html"><![CDATA[]]></media:description>
</media:content>
<media:thumbnail xmlns:media="http://search.yahoo.com/mrss/" url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2020/11/Augmented-Reality-App_Houdini-Blender_Banner.jpg?fit=1099%2C545&#038;quality=80&#038;ssl=1" width="1099" height="545" />
<post-id xmlns="com-wordpress:feed-additions:1">87091</post-id>	</item>
		<item>
		<title>Foundry SIGGRAPH contributions online</title>
		<link>https://digitalproduction.com/2017/08/22/foundry-siggraph-beitraege-online/</link>
		
		<dc:creator><![CDATA[Bela Beier]]></dc:creator>
		<pubDate>Tue, 22 Aug 2017 11:00:00 +0000</pubDate>
				<category><![CDATA[News]]></category>
		<category><![CDATA[AR]]></category>
		<category><![CDATA[Cloud]]></category>
		<category><![CDATA[Demo]]></category>
		<category><![CDATA[Nuke]]></category>
		<category><![CDATA[Showreel]]></category>
		<category><![CDATA[siggraph]]></category>
		<category><![CDATA[Update]]></category>
		<category><![CDATA[Virtual Reality]]></category>
		<category><![CDATA[VR]]></category>
		<guid isPermaLink="false">https://www.digitalproduction.com/?p=61866</guid>

					<description><![CDATA[<div style="margin: 5px 5% 10px 5%;"><img src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2017/08/maxresdefault-25.jpg?fit=1200%2C675&quality=80&ssl=1" width="1200" height="675" title="" alt="" /></div><div><p>From the new cloud pipeline "Elara" to the features in Nuke 11: The Siggraph presentations from Foundry are now online - the new showreel is also available!</p>
<p>The post <a href="https://digitalproduction.com/2017/08/22/foundry-siggraph-beitraege-online/">Foundry SIGGRAPH contributions online</a> first appeared on <a href="https://digitalproduction.com">DIGITAL PRODUCTION</a> and was written by <a href="https://digitalproduction.com/author/belabeier/">Bela Beier</a>. </p></div>]]></description>
										<content:encoded><![CDATA[<div style="margin: 5px 5% 10px 5%;"><img src="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2017/08/maxresdefault-25.jpg?fit=1200%2C675&quality=80&ssl=1" width="1200" height="675" title="" alt="" /></div><div><p><script type='application/json' class='__iawmlf-post-loop-links'>[{"id":5586,"href":"https:\/\/www.digitalproduction.com\/event\/siggraph-2017","archived_href":"http:\/\/web-wp.archive.org\/web\/20240625191948\/https:\/\/www.digitalproduction.com\/event\/siggraph-2017","redirect_href":"","checks":[{"date":"2025-12-29 02:09:46","http_code":404},{"date":"2026-01-04 16:28:14","http_code":404},{"date":"2026-02-06 07:28:19","http_code":404},{"date":"2026-02-23 14:36:55","http_code":404},{"date":"2026-03-10 07:17:45","http_code":404}],"broken":true,"last_checked":{"date":"2026-03-10 07:17:45","http_code":404},"process":"done"},{"id":5587,"href":"http:\/\/community.foundry.com","archived_href":"http:\/\/web-wp.archive.org\/web\/20251115093305\/https:\/\/community.foundry.com\/","redirect_href":"","checks":[{"date":"2025-12-29 02:09:51","http_code":200},{"date":"2026-01-05 10:12:28","http_code":200},{"date":"2026-02-06 07:28:18","http_code":200},{"date":"2026-02-23 14:36:54","http_code":200},{"date":"2026-03-10 07:17:45","http_code":200}],"broken":false,"last_checked":{"date":"2026-03-10 07:17:45","http_code":200},"process":"done"},{"id":5588,"href":"https:\/\/www.youtube.com\/watch?list=PLi2GhhsPL-RoYnBaq6sPU9MDtHt9RAUNY&v=GucggT8exxY","archived_href":"","redirect_href":"","checks":[],"broken":false,"last_checked":null,"process":"done"},{"id":5589,"href":"https:\/\/www.digitalproduction.com\/2017\/07\/31\/nuke-11-0-ist-da","archived_href":"http:\/\/web-wp.archive.org\/web\/20220703031821\/https:\/\/www.digitalproduction.com\/2017\/07\/31\/nuke-11-0-ist-da\/","redirect_href":"","checks":[{"date":"2025-12-29 02:10:03","http_code":200},{"date":"2026-01-05 10:12:34","http_code":200},{"date":"2026-02-20 16:30:07","http_code":200},{"date":"2026-03-10 19:09:43","http_code":200},{"date":"2026-03-16 08:56:32","http_code":503}],"broken":false,"last_checked":{"date":"2026-03-16 08:56:32","http_code":503},"process":"done"},{"id":5590,"href":"https:\/\/www.youtube.com\/user\/TheFoundryChannel\/videos","archived_href":"http:\/\/web-wp.archive.org\/web\/20250411133203\/https:\/\/www.youtube.com\/user\/TheFoundryChannel\/videos","redirect_href":"","checks":[{"date":"2025-12-29 02:10:04","http_code":200},{"date":"2026-01-05 10:12:28","http_code":200},{"date":"2026-02-20 16:30:03","http_code":200},{"date":"2026-03-10 19:09:40","http_code":200},{"date":"2026-03-16 08:56:28","http_code":200}],"broken":false,"last_checked":{"date":"2026-03-16 08:56:28","http_code":200},"process":"done"}]</script><a href="https://www.digitalproduction.com/event/siggraph-2017/" target="_blank" rel="noopener noreferrer"> Siggraph 2017</a> was a few days ago, but content from the conference is still being published. <a href="http://community.foundry.com/" target="_blank" rel="noopener noreferrer">Foundry</a> has now also uploaded the presentations from Siggraph.</p>
<h2 id="elara-cloud-pipeline">Elara – Cloud Pipeline</h2>
<p>With the <a href="https://www.youtube.com/watch?list=PLi2GhhsPL-RoYnBaq6sPU9MDtHt9RAUNY&v=GucggT8exxY" target="_blank" rel="noopener noreferrer">announcement at NAB 2017</a>, Foundry has set high expectations for the new pipeline infrastructure. <strong>Elara</strong> is a service for post-production that centralises the entire VFX pipeline in the cloud. Users should have fast and flexible access to organisation, VFX tools, storage and cloud rendering functions via a web browser. This presentation will introduce the features of Elara and show an example project from Inverse Normal. A limited release is expected at the end of 2017, with a full release in early 2018. Exact dates for this are not yet known. You can find more information about Elara here.</p>
<p><iframe class="youtube-player" width="1200" height="675" src="https://www.youtube.com/embed/Eppuj_MwZXQ?version=3&rel=1&showsearch=0&showinfo=1&iv_load_policy=1&fs=1&hl=en-US&autohide=2&wmode=transparent" allowfullscreen="true" style="border:0;" sandbox="allow-scripts allow-same-origin allow-popups allow-presentation allow-popups-to-escape-sandbox"></iframe></p>
<h2 id="nuke-11-feature-demo">Nuke 11 – Feature Demo</h2>
<p>Several new features have been added with the <a href="https://www.digitalproduction.com/2017/07/31/nuke-11-0-ist-da/" target="_blank" rel="noopener noreferrer">release of Nuke and Hiero 11</a>. These include LiveGroups, updates for the VFX Reference Platform, new Lens Distortion, Smartvector and more. Juan Salazar presents the new features in this live demo. Further information on the new release <a href="https://www.digitalproduction.com/2017/07/31/nuke-11-0-ist-da/" target="_blank" rel="noopener noreferrer">can be found here</a>.</p>
<p><iframe class="youtube-player" width="1200" height="675" src="https://www.youtube.com/embed/5j4oBcMOZ_E?version=3&rel=1&showsearch=0&showinfo=1&iv_load_policy=1&fs=1&hl=en-US&autohide=2&wmode=transparent" allowfullscreen="true" style="border:0;" sandbox="allow-scripts allow-same-origin allow-popups allow-presentation allow-popups-to-escape-sandbox"></iframe></p>
<p>More presentations from Foundry, including AR/VR Research, CARA VR and more can be <a href="https://www.youtube.com/user/TheFoundryChannel/videos" target="_blank" rel="noopener noreferrer">found here on the Youtube channel</a>.</p><p>The post <a href="https://digitalproduction.com/2017/08/22/foundry-siggraph-beitraege-online/">Foundry SIGGRAPH contributions online</a> first appeared on <a href="https://digitalproduction.com">DIGITAL PRODUCTION</a> and was written by <a href="https://digitalproduction.com/author/belabeier/">Bela Beier</a>. </p></div>]]></content:encoded>
					
		
		
		<enclosure url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2017/08/maxresdefault-25.jpg?fit=1280%2C720&#038;quality=80&#038;ssl=1" length="47095" type="image/jpg" />
<media:content xmlns:media="http://search.yahoo.com/mrss/" url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2017/08/maxresdefault-25.jpg?fit=1200%2C675&#038;quality=80&#038;ssl=1" width="1200" height="675" medium="image" type="image/jpeg">
	<media:copyright>DIGITAL PRODUCTION</media:copyright>
	<media:title></media:title>
	<media:description type="html"><![CDATA[]]></media:description>
</media:content>
<media:thumbnail xmlns:media="http://search.yahoo.com/mrss/" url="https://i0.wp.com/digitalproduction.com/wp-content/uploads/2017/08/maxresdefault-25.jpg?fit=1200%2C675&#038;quality=80&#038;ssl=1" width="1200" height="675" />
<post-id xmlns="com-wordpress:feed-additions:1">61866</post-id>	</item>
	</channel>
</rss>
