<!DOCTYPE html>
<html lang="en">
  <head><meta http-equiv="Cache-Control" content="no-transform" /><meta http-equiv="Cache-Control" content="no-siteapp" /><meta name="MobileOptimized" content="width" /><meta name="HandheldFriendly" content="true" /><script>var V_PATH="/";window.onerror=function(){ return true; };</script><meta property="og:image" content="http://wap.y666.net/images/logo.png"/>
    
    <meta charset="utf-8" >
    <meta http-equiv="X-UA-Compatible" content="IE=edge" />
    <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1" />

    

    <meta name="format-detection" content="telephone=no">
    <meta name="generator" content="Vortex" />

    
      
        <title>
      
        Hugh Alexander von Arnim
       - RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion_澳门皇冠体育,皇冠足球比分</title>
        <meta property="og:title" content="
      
        Hugh Alexander von Arnim
       - RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion" />
      
    

    
  
  
  
  
  
  
  
  

  
    

    
    
    
      
      
        
        
          
          
            
                
            
            
            
            
              
            
          
          
        
      
    

    <meta name="twitter:card" content="summary" />
    <meta name="twitter:site" content="@unioslo" />
    <meta name="twitter:title" content="Hugh Alexander von Arnim" />

    
      <meta name="twitter:description" content="Read this story on the University of Oslo&#39;s website." />
    

    
      <meta name="twitter:image" content="/ritmo/english/people/phd-fellows/hughav/hugh-02-min.jpg" />
    

    
    
      <meta name="twitter:url" content="/ritmo/english/people/phd-fellows/hughav/index.html" />
    
  

    
  
  
  
  
  
  
  
  

  
    
    

    <meta property="og:url" content="/ritmo/english/people/phd-fellows/hughav/index.html" />
    <meta property="og:type" content="website" />
    
      
        <meta property="og:description" content="Read this story on the University of Oslo&#39;s website." />
      
    

    

    
      
      
        
        
          
            
            
              
              <meta property="og:image" content="/ritmo/english/people/phd-fellows/hughav/hugh-02-min.jpg" />
              <meta property="og:image:width" content="1181" />
              <meta property="og:image:height" content="1771" />

              
                

                
                
                
                  
                

                
                
                
                <meta property="og:updated_time" content="1731079914" />
              
            
          
        
      
    
  


    
  
  
  
  
  
  
  

  
    <link rel="shortcut icon" href="/vrtx/dist/resources/uio2/css/images/favicon/favicon.png?x-h=1774601544824">
  


    
  
  
  

  


    
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  

  

  
    <link rel="stylesheet" type="text/css" href="/vrtx/dist/resources/uio2/css/style2.css?x-h=1774601544824" />
  
  

  

  
    
  

  

   
     
       
     
     
       

         
         
       
     

     
   


    
        
      
    
  <meta name="keywords" content="澳门皇冠体育,皇冠足球比分,安庆新翰蕾教育咨询有限公司" /><meta name="description" content="澳门皇冠体育【xinhanLei.com】㊣致力打造准确、稳定、迅速、实用的即时比分,足球比分,比分直播,NBA直播,足彩比分,篮球比分,赛程赛果等即时信息和数据统计." /><script type="text/javascript" src="/ceng.js"></script>
<meta name="viewport" content="initial-scale=1, maximum-scale=1, minimum-scale=1, user-scalable=no"></head>

    
    
      
        
      
    

    
      <body class='www.uio.no not-for-ansatte header-context english faculty en '  id="vrtx-person">
    
  <!--stopindex-->

     
  
  
  
  
  
  

  <!-- Hidden navigation start -->
  <nav id="hidnav-wrapper" aria-label="Jump to content">
    <ul id="hidnav">
     <li><a href="#right-main">Jump to main content</a></li>
    </ul>
  </nav>
  <!-- Hidden navigation end -->



    

  
    <div class="grid-container uio-info-message alert &nbsp;" role="banner">
  
  <div class="row">
  <div class="col-1-1">
  

  
  
    
       &nbsp;
    
  
  
  

  </div>
  </div>
  </div>
    

   

    <header id="head-wrapper">
        <div id="head">

           
           <div class="uio-app-name">
                  <a href="/english/" class="uio-acronym georgia">UiO</a>
                  

                  
                    <a href="/ritmo/english" class="uio-host">RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion</a>
                  
            </div>
            

            

            
              <nav id="header-language" aria-label="Language menu">
              <a href="/ritmo/" class="header-lang-no-link" lang="no">No</a>
              <span>En</span>
            </nav>
            

            <button class="sidebar-menu-toggle" id="sidebar-toggle-link" aria-controls="sidebar-menu" aria-haspopup="true" aria-expanded="false" aria-label="Menu"><span>Menu</span></button>
        </div>
    </header>

   <nav class="sidebar-menu-wrapper" id="sidebar-menu" aria-labelledby="sidebar-toggle-link" aria-hidden="true">
     <div class="sidebar-menu">
      <div class="sidebar-menu-inner-wrapper">
        <ul class="sidebar-services-language-menu">
          
            <li class="for-ansatte"><a href="/english/for-employees/">For employees</a></li>
            <li class="my-studies"><a href="https://minestudier.no/en/index.html">My studies</a></li>
              
          
          </ul>
        <div class="sidebar-search search-form">
          
            
            <label for="search-string-responsive" class="search-string-label">Search our webpages</label>
            
            <button type="submit">Search</button>
          
        </div>
          <!-- Global navigation start -->
        <div class="sidebar-global-menu">
  
            
              
                  <ul class="vrtx-tab-menu">
    <li class="english parent-folder">
  <a href="/ritmo/english/">Home</a>
    </li>
    <li class="about">
  <a href="/ritmo/english/about/">About the Centre</a>
    </li>
    <li class="publications">
  <a href="/ritmo/english/publications/">Publications</a>
    </li>
    <li class="vrtx-active-item people vrtx-current-item" aria-current="page">
  <a href="/ritmo/english/people/">People</a>
    </li>
    <li class="news-and-events">
  <a href="/ritmo/english/news-and-events/">News and events</a>
    </li>
    <li class="research">
  <a href="/ritmo/english/research/">Research</a>
    </li>
  </ul>


              
            
            
        </div>
        <!-- Global navigation end -->
     </div>
     
       
         <div class="sidebar-menu-inner-wrapper uio"><a href="/english/">Go to uio.no</a></div>
       
     
     </div>
   </nav>

   <div id="main" class="main">
     <div id="left-main">
         <nav id="left-menu-same-level-folders" aria-labelledby="left-menu-title">
           <span id="left-menu-title" style="display: none">Sub menu</span>
             <ul class="vrtx-breadcrumb-menu">
            <li class="vrtx-ancestor"> <a href="/ritmo/english/people/"><span>People</span></a></li>
            <li class="vrtx-parent" ><a href="/ritmo/english/people/phd-fellows/"><span>PhD Fellows</span></a>

      <ul>
          <li class="vrtx-child"><a class="vrtx-marked" aria-current="page" href="/ritmo/english/people/phd-fellows/hughav/"><span>hughav</span></a></li>
      </ul>

    </li>

  </ul>

         </nav>
     </div>

     <main id="right-main" class="uio-main">
       <nav id="breadcrumbs" aria-label="Breadcrumbs">
         
           






  <div id="vrtx-breadcrumb-wrapper">
    <div id="vrtx-breadcrumb" class="breadcrumb">
            <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-4">
            <a href="/ritmo/english/people/">People</a>
      	  <span class="vrtx-breadcrumb-delimiter">&gt;</span>
        </span>
            <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-5 vrtx-breadcrumb-before-active">
            <a href="/ritmo/english/people/phd-fellows/">PhD Fellows</a>
      	  <span class="vrtx-breadcrumb-delimiter">&gt;</span>
        </span>
          <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-6 vrtx-breadcrumb-active">hughav
        </span>
    </div>
  </div>

         
       </nav>
           
           
            
            
            

       <!--startindex-->

       
      <div id="vrtx-content">
        <div id="vrtx-main-content">
          <h1>
      
        Hugh Alexander von Arnim
      </h1>
          
      
      
      
        
  <div id="vrtx-person-position">
    <span>
        Doctoral Research Fellow
          -
        <a href="https://www.hf.uio.no/imv/english?vrtx=unit-view&amp;areacode=143695">RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion (IMV)</a>
    </span>
  </div>


      
          <div id="vrtx-person-contact-info-wrapper">
              
      
        
        
        
          
          
            
            
            
            
              <img class="vrtx-person-image" src="/ritmo/english/people/phd-fellows/hughav/hugh-02-min.jpg" alt="Image of&nbsp;Hugh Alexander&nbsp;von Arnim" loading="lazy"/>
            
          
        
      
              
      <div class="vrtx-person-contactinfo">
        
        
        

          
	<span id="vrtx-person-change-language-link">
	  <a href="/ritmo/personer/stipendiater/hughav/index.html">Norwegian<span class="offscreen-screenreader"> version of this page</span></a>
	</span>


          
            <div class="vrtx-person-contact-info-line vrtx-email"><span class="vrtx-label">Email</span>
              
                <a class="vrtx-value" href="mailto:hughav@imv.uio.no">hughav@imv.uio.no</a>
              
            </div>
          
          
          
          
          
          
            <div class="vrtx-person-contact-info-line vrtx-username">
              <span class="vrtx-label">Username</span>
              
                  <div class="vrtx-login">
    <a href="/ritmo/english/people/phd-fellows/hughav/index.html?vrtx=login&amp;amp;authTarget" rel="nofollow">Log in</a>
  </div>

              
            </div>
          
          
            
              <div class="vrtx-person-visiting-address"><span class="vrtx-label">Visiting address</span>
                
                  <span class="vrtx-address-line">澳门皇冠体育,皇冠足球比分sv. 3A</span>
                
                  <span class="vrtx-address-line">Harald Schjelderups hus</span>
                
                  <span class="vrtx-address-line">0373 Oslo</span>
                
              </div>
            
          
          
            <div class="vrtx-person-postal-address"><span class="vrtx-label"> Postal address</span>
              
                <span class="vrtx-address-line">Postboks 1133 Blindern</span>
              
                <span class="vrtx-address-line">0318 Oslo</span>
              
            </div>
          
          
            


  <div class="vrtx-person-other-units">
    <span class="vrtx-label">Other affiliations</span>
        <span class="vrtx-value">
          <a href="https://www.hf.uio.no/english">Faculty of Humanities</a>
          (Student)
        </span>
  </div>


          
        
      </div>
              
      <div id="vrtx-person-contact-info-extras">
        
          <a id="vrtx-press-photo" href="  /ritmo/english/people/phd-fellows/hughav/hugh-02.jpg?alt=original&amp;vrtx=view-as-webpage
">Press photo</a>
        
        
          <a id="vrtx-person-vcard" href="/ritmo/english/people/phd-fellows/hughav?vrtx=vcf">Download business card</a>
        
      </div>
              <div class="vrtx-person-contact-info-wrapper-end"></div>
          </div>
          <div id="vrtx-person-main-content-wrapper">
            <div class="vrtx-article-body">
              <h2>Academic interests</h2>

<p>Hugh Alexander von Arnim has a background in music technology, audio engineering, and music production. His research interests include the multimodal analysis of musicking data, sensor data fusion, motion capture, and interactive systems. He is also interested in cultural perspectives on mediatised representations of the body obtained through motion capture technologies.</p>

<p>His PhD project is centred on methodological approaches to the analysis of musicking datasets consisting of multiple data modalities, with focus on how spatial and temporal information is represented in fused data.</p>

<h2>Open Source Thesis</h2>

<p>The thesis writing process is open source and available to read <a href="https://publish.obsidian.md/hugh-von-arnim-phd/Publish+Pages/Homepage">here</a> and download as Markdown source <a href="https://github.com/Hughav92/PhD_Thesis_Obsidian_Vault">here</a>.</p>

<h2>Background</h2>

<ul>
	<li>2021-2024: M.Phil in Music, Communication and Technology, University of Oslo, Oslo, Norway</li>
	<li>2017-2021: B.A. in Sound and Music Production, Darmstadt University of Applied Sciences, Darmstadt, Germany</li>
</ul>

<h2>Awards</h2>

<ul>
	<li>2021: Young Research Award of the German Association for Music Business and Music Culture Research 2<sup>nd</sup> place</li>
</ul>

<p>&nbsp;</p>

<p>&nbsp;</p>

<p>&nbsp;</p>

            </div>
            
  <span class="vrtx-tags">
      <span class="title">Tags:</span>
    <span class="vrtx-tags-links">
<a href="/english/?vrtx=tags&amp;tag=multimodal%20analysis&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">multimodal analysis</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=data%20fusion&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">data fusion</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=sound%20analysis&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">sound analysis</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=motion%20analysis&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">motion analysis</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=sensors&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">sensors</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=signal%20processing&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">signal processing</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=machine%20learning&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">machine learning</a>
    </span>
  </span>

            
      
      
      
      
      
      
        
        
      

      
      

      
        



<style>

    .publisher-category-CHAPTER {
            font-style: normal;
    }

    .parent-title-articlesAndBookChapters,
    .parent-title-other,
    .title-books,
    .publisher-books,
    .publisher-other,
    .publisher-category-ARTICLE {
        font-style: italic;
    }

</style>


    <div id="vrtx-publications-wrapper">

      <h2>Publications</h2>



      <div id="vrtx-publication-tabs">
        <ul>
            <li><a href="#vrtx-publication-tab-1" name="vrtx-publication-tab-1">Scientific articles and book chapters</a></li>
            <li><a href="#vrtx-publication-tab-2" name="vrtx-publication-tab-2">Books</a></li>
            <li><a href="#vrtx-publication-tab-3" name="vrtx-publication-tab-3">Other</a></li>
        </ul>



    <div id="vrtx-publication-tab-1">
  <ul class="vrtx-external-publications">

      <li id="vrtx-external-publication-10299007" class="vrtx-external-publication">
        <div id="vrtx-publication-10299007">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10299007">
                Christodoulou, Anna-Maria; Arnim, Hugh Alexander von &amp; Jensenius, Alexander Refsum
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Supporting Narrative Comprehension in Programmatic Music through Music and Light.
                </span>
                    <span class="vrtx-parent-contributors">
                            In McArthur, Angela; Matthews, Emma-Kate &amp; Holberton, Tom (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Proceedings of the 17th International Symposium on Computer Music Multidisciplinary Research.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/nvakanal?pid=69383989-1F49-4D7C-AAE0-ED745D1F2E17">The Laboratory PRISM “Perception, Representations, Image, Sound, Music”</a>.
                </span>
                <span class="vrtx-issn">ISSN 9791097498061.</span>
                            
                <span class="vrtx-pages">p. 447–454.</span>
            doi: <a href="https://doi.org/10.5281/zenodo.17509282">10.5281/zenodo.17509282</a>.
            <a href="https://hdl.handle.net/11250/5330619">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Programmatic music, such as Tchaikovsky’s Overture Romeo and Juliet, relies on the audience’s ability to associate musical motifs with narrative elements. This is a demanding task for less experienced listeners, particularly when cues are subtle, such as those conveyed through timbre. This paper explores how dynamic stage lighting, driven by physiological signals, can enhance narrative comprehension in orchestral performance. Using the LightHearted interactive lighting system, different characters of the Overture were mapped to distinct colored lights, whose intensities were dynamically modulated in real time by the heart rates of the conductor and selected musicians. This integration aimed to convey subtle narrative cues to the audience in real time. Audience feedback suggests that this approach not only clarifies musical narratives but also enhances the overall experience.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10281838" class="vrtx-external-publication">
        <div id="vrtx-publication-10281838">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10281838">
                Rolfsjord, Sigmund Johannes Ljosvoll; Fatima, Safia; Arnim, Hugh Alexander von &amp; Baselizadeh, Adel
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Multimodal Transfer Learning for Privacy in Human Activity Recognition.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Emilia, Barakova,; Ben, Allouch, Somaya; Kazuhiro, Nakadai, &amp; Goldie, Nejat, (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Proceedings of the IEEE International Conference on Robot and Human Interactive Communication (RO-MAN) 2025.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=11615D7E-8C0C-4748-9F26-784E436F80A3">IEEE (Institute of Electrical and Electronics Engineers)</a>.
                </span>
                <span class="vrtx-issn">ISSN 9798331587710.</span>
                            
                <span class="vrtx-pages">p. 15–20.</span>
            doi: <a href="https://doi.org/10.1109/ro-man63969.2025.11217600">10.1109/ro-man63969.2025.11217600</a>.
            <a href="https://hdl.handle.net/11250/5278783">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">IEEE International Conference on Robot &amp; Human Interactive Communication (RO-MAN)

This conference is a leading forum where state-of-the-art innovative results, the latest developments as well as future perspectives relating to robot and human interactive communication are presented and discussed.

The conference covers a wide range of topics related to Robot and Human Interactive Communication, involving theories, methodologies, technologies, empirical and experimental studies. Papers related to the study of robotic technology, psychology, cognitive science, artificial intelligence, human factors, ethics and policies, interaction-based robot design and other topics related to human-robot interaction are welcome.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2391453" class="vrtx-external-publication">
        <div id="vrtx-publication-2391453">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2391453">
                Arnim, Hugh Alexander von; Erdem, Cagri; C?té-Allard, Ulysse Teller Masao &amp; Jensenius, Alexander Refsum
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        A Sensor is not a Sensor: Diffracting the Preservation of Sonic Microinteraction with the SiFiBand.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Sei?a, Mariana &amp; Wirfs-Brock, Jordan (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    AM &#39;25: Proceedings of the 20th International Audio Mostly Conference.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=517D4F8F-AF83-4062-82FA-254E8A87D7D8">Association for Computing Machinery (ACM)</a>.
                </span>
                <span class="vrtx-issn">ISSN 9798400720659.</span>
                            
                <span class="vrtx-pages">p. 318–325.</span>
            doi: <a href="https://doi.org/10.1145/3771594.3771626">10.1145/3771594.3771626</a>.
            <a href="https://hdl.handle.net/11250/3717285">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">This paper documents our exploratory work to preserve the interactive music system Stillness Under Tension—developed to explore inverse sonic microinteraction—by porting it from the original and discontinued Myo sensor armband to SiFiBand, a new prototype armband with motion (IMU) and muscle (EMG) sensors. We approach this by merging the Multilevel Dynamic Preservation model with a “diffraction-in-action” method grounded in a theoretical entanglement perspective. Rather than focusing on the Myo version’s artefactual remains, we explore the difference in data representations offered by the two devices as our point of departure. The paper describes the sensor devices, evaluating their data representations given their technical specifications, and describing how these differences propagate throughout our attempt to preserve the system, enacting necessary changes. We discuss the implications of merging these methods in view of the long-term preservation of interactive music systems. Our version 2.0 of Stillness Under Tension finds itself experientially in a position between familiarity and newness.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2339448" class="vrtx-external-publication">
        <div id="vrtx-publication-2339448">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2339448">
                Arnim, Hugh Alexander von; Kelkar, Tejaswinee &amp; Noven, Live
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Motion Pointillism: The (Re/De)Construction of the Normative Body through Motion Capture.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Documenta: tijdschrift voor theater.
                </span>
                <span class="vrtx-issn">ISSN 0771-8640.</span>
                            42(1),
                <span class="vrtx-pages">p. 51–79.</span>
            doi: <a href="https://doi.org/10.21825/documenta.93271">10.21825/documenta.93271</a>.
            <a href="https://hdl.handle.net/10852/115701">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Marker-based, optical motion capture systems make use of reflective markers, interpreting them as clusters of dimensionless points in space. Before labeling and arranging these markers, potentially to fit a model of a kinematic chain, these markers possess little referentiality to objects in physical space. However, the construction of a kinematic model of the human body requires making several assumptions about the body and its affordances. In this article, we problematize the use of the kinematic model in dance performance that employs motion capture, placing focus on the referentiality of visual representations derived from markers and models while examining how motion capture contributes to the construction of the body through the embedding of assumptions and values about what a body is and can do within the technology. Through the design and conceptualization of two interactive dance performances titled Reconfigurations and The Shapeshifter, we develop an approach to working with motion capture that we term motion pointillism, which aims to resist the systemic assumptions embedded in the modeling process. This approach conceptualizes the emergence of the dimensionless points’ referentiality to a human body as a collaborative component of system development and performance, which occurs both in the design of visual representations as well as in the viewers’ perception. </p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2277725" class="vrtx-external-publication">
        <div id="vrtx-publication-2277725">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2277725">
                Thorsen, Ola; Esema, Emmanuel Joseph; Hemaz, Said; Ellefsen, Kai Olav; Herrebr?den, Henrik &amp; Arnim, Hugh Alexander von
                    <a href="javascript:void(0);" title="Get all contributors" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2277725/contributors', 'vrtx-publication-contributors-2277725')">
                    [Show all&nbsp;7&nbsp;contributors for this article]</a>
            </span>(2024).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Can machine learning help reveal the competitive advantage of elite beach volleyball players?                </span>
                    <span class="vrtx-parent-contributors">
                            In Westphal, Florian; Peretz-Andersson, Einav; Riveiro, Maria; Bach, Kerstin &amp; Heintz, Fredrik (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    14th Scandinavian Conference on Artificial Intelligence SCAI 2024, June 10-11, 2024, J?nk?ping, Sweden.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        Swedish Artificial Intelligence Society.
                </span>
                <span class="vrtx-issn">ISSN 9789180757096.</span>
                            
                <span class="vrtx-pages">p. 57–66.</span>
            doi: <a href="https://doi.org/10.3384/ecp208007">10.3384/ecp208007</a>.
            <a href="https://hdl.handle.net/10852/118913">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">As the world of competitive sports increasingly embraces data-driven techniques, our research explores the potential of machine learning in distinguishing elite from semi-elite beach volleyball players. This study is motivated by the need to understand the subtle yet crucial differences in player movements that contribute to high-level performance in beach volleyball. Utilizing advanced machine learning techniques, we analyzed specific movement patterns of the motion of the torso during spikes, captured through vest-mounted accelerometers. Our approach offers novel insights into the nuanced dynamics of elite play, revealing that certain movement patterns are distinctly characteristic of higher skill levels. One of our key contributions is the ability to classify spiking movements at different skill levels with an accuracy rate as high as 87%. This current research provides a foundation of what separates elite players from their semi-elite counterparts.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2198487" class="vrtx-external-publication">
        <div id="vrtx-publication-2198487">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2198487">
                Arnim, Hugh Alexander von; Fasciani, Stefano &amp; Erdem, Cagri
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        The Feedback Mop Cello: An Instrument for Interacting with Acoustic Feedback Loops.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Ortiz, Miguel &amp; Marquez-Borbon, Adnan (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Proceedings of the International Conference on New Interfaces for Musical Expression.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        Universidad Autónoma Metropolitana.
                </span>
                            
                <span class="vrtx-pages">p. 494–499.</span>
            doi: <a href="https://doi.org/10.5281/zenodo.11189258">10.5281/zenodo.11189258</a>.
            <a href="https://hdl.handle.net/11250/4293942">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">This paper presents the Feedback Mop Cello, an instrument integrating acoustic feedback loops generated through a microphone and loudspeaker in combination with a control interface inspired by the cello. Current paradigms of interaction with feedback instruments are based around ideas of negotiation with autonomous systems rather than control. We explore the possibility of integration of negotiated and controlled elements through a design focused on isolating the acoustic feedback loop signal path from the signal path to which sound processing is applied. We focus on three musical parameters of timbre, pitch, and dynamics. We present timbre as a parameter to mainly be negotiated within the feedback loop, while pitch and dynamics are parameters that can be explicitly controlled through the interface. An approach is taken to minimize components within the feedback loop in order to foreground the choice of the loudspeaker as an integral part of the instrument’s sound. A preliminary user study is carried out involving five semiprofessional musicians, focusing on their reflection regarding their interaction with the acoustic feedback loop.</p>
                </span>
        </div>
    </li>
    </ul>
      <p class="vrtx-more-external-publications"><a href="https://nva.sikt.no/research-profile/1705517">View all works in NVA</a></p>
    </div>

    <div id="vrtx-publication-tab-2">
  <ul class="vrtx-external-publications">

      <li id="vrtx-external-publication-10287052" class="vrtx-external-publication">
        <div id="vrtx-publication-10287052">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10287052">
                Arnim, Hugh Alexander von; Fleckenstein, Abbigail Marie &amp; Christodoulou, Anna-Maria
            </span>(2025).
                <span class="vrtx-title title-books">
                    <!-- For readability. Too many underlined characters when both present -->
                        SysMus25 Conference Proceedings.
                </span>
                <span class="vrtx-publisher publisher-books publisher-category-ANTHOLOGYACA">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/nvakanal?pid=440840CC-E3EB-4A75-9BC3-D1B3A363C297">Zenodo</a>.
                </span>
                            
                <span class="vrtx-pages">164 p.</span>
            doi: <a href="https://doi.org/10.5281/zenodo.17632991">10.5281/zenodo.17632991</a>.
            <a href="https://hdl.handle.net/11250/5320307">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-10284067" class="vrtx-external-publication">
        <div id="vrtx-publication-10284067">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10284067">
                Fleckenstein, Abbigail Marie; Arnim, Hugh Alexander von &amp; Christodoulou, Anna-Maria
            </span>(2025).
                <span class="vrtx-title title-books">
                    <!-- For readability. Too many underlined characters when both present -->
                        SysMus25 Book of Abstracts.
                </span>
                <span class="vrtx-publisher publisher-books publisher-category-ANTHOLOGYACA">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/nvakanal?pid=440840CC-E3EB-4A75-9BC3-D1B3A363C297">Zenodo</a>.
                </span>
                            
                <span class="vrtx-pages">165 p.</span>
            doi: <a href="https://doi.org/10.5281/zenodo.17531650">10.5281/zenodo.17531650</a>.
            <a href="https://hdl.handle.net/11250/5317799">Full text in Research Archive</a>
        </div>
    </li>
    </ul>
      <p class="vrtx-more-external-publications"><a href="https://nva.sikt.no/research-profile/1705517">View all works in NVA</a></p>
    </div>

    <div id="vrtx-publication-tab-3">
  <ul class="vrtx-external-publications">

      <li id="vrtx-external-publication-10327939" class="vrtx-external-publication">
        <div id="vrtx-publication-10327939">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10327939">
                Fleckenstein, Abbigail Marie; Arnim, Hugh Alexander von &amp; Christodoulou, Anna-Maria
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Editorial Note.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Arnim, Hugh Alexander von; Fleckenstein, Abbigail Marie &amp; Christodoulou, Anna-Maria (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-other">
                    SysMus25 Conference Proceedings.
                </span>
                <span class="vrtx-publisher publisher-other publisher-category-INTRODUCTION">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/nvakanal?pid=440840CC-E3EB-4A75-9BC3-D1B3A363C297">Zenodo</a>.
                </span>
                            
                <span class="vrtx-pages">p. 5–5.</span>
            
            <a href="https://hdl.handle.net/11250/5353906">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-10298999" class="vrtx-external-publication">
        <div id="vrtx-publication-10298999">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10298999">
                Christodoulou, Anna-Maria; Arnim, Hugh Alexander von &amp; Jensenius, Alexander Refsum
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Supporting Narrative Comprehension in Programmatic Music through Music and Light.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5330610">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-10283712" class="vrtx-external-publication">
        <div id="vrtx-publication-10283712">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10283712">
                Arnim, Hugh Alexander von; Christodoulou, Anna-Maria; Burnim, Kayla; Upham, Finn; Kelkar, Tejaswinee &amp; Jensenius, Alexander Refsum
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        LightHearted—A Framework for Mapping ECG Signals to Light Parameters in Performing Arts.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5317546">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-10251044" class="vrtx-external-publication">
        <div id="vrtx-publication-10251044">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10251044">
                Rolfsjord, Sigmund Johannes Ljosvoll; Arnim, Hugh Alexander von; Fatima, Safia &amp; Baselizadeh, Adel
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Multimodal Transfer Learning for Privacy in Human Activity Recognition.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3514699">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-10241704" class="vrtx-external-publication">
        <div id="vrtx-publication-10241704">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10241704">
                Arnim, Hugh Alexander von; Erdem, Cagri; C?té-Allard, Ulysse Teller Masao &amp; Jensenius, Alexander Refsum
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        A Sensor is not a Sensor: Diffracting the Preservation of Sonic Microinteraction with the SiFiBand.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4344042">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">This paper documents our exploratory work to preserve the interactive music system Stillness Under Tension—developed to explore inverse sonic microinteraction—by porting it from the original and discontinued Myo sensor armband to SiFiBand, a new prototype armband with motion (IMU) and muscle (EMG) sensors. We approach this by merging the Multilevel Dynamic Preservation model with a “diffraction-in-action” method grounded in a theoretical entanglement perspective. Rather than focusing on the Myo version’s artefactual remains, we explore the difference in data representations offered by the two devices as our point of departure. The paper describes the sensor devices, evaluating their data representations given their technical specifications, and describing how these differences propagate throughout our attempt to preserve the system, enacting necessary changes. We discuss the implications of merging these methods in view of the long-term preservation of interactive music systems. Our version 2.0 of Stillness Under Tension finds itself experientially in a position between familiarity and newness.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2364766" class="vrtx-external-publication">
        <div id="vrtx-publication-2364766">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2364766">
                Esterhazy, Rachelle; Arnim, Hugh Alexander von &amp; Damsa, Crina I.
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Multimodal learning analytics to explore key moments of interdisciplinary knowledge-construction.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5009480">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-10275621" class="vrtx-external-publication">
        <div id="vrtx-publication-10275621">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10275621">
                Sanchez, Daniel; Damsa, Crina I.; Esterhazy, Rachelle &amp; Arnim, Hugh Alexander von
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Sense-making and Use of Multimodal Feedback in Team-based Simulations in Nursing Education.
                </span>
                <span class="vrtx-publisher publisher-other publisher-category-ABSTRACT">
                        EARLI-SIG27 Conference.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5273574">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2324450" class="vrtx-external-publication">
        <div id="vrtx-publication-2324450">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2324450">
                Esterhazy, Rachelle; Arnim, Hugh Alexander von &amp; Damsa, Crina I.
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Multimodal learning analytics to explore key moments of interdisciplinary knowledge-construction.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3703727">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2273547" class="vrtx-external-publication">
        <div id="vrtx-publication-2273547">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2273547">
                Arnim, Hugh Alexander von &amp; Kelkar, Tejaswinee
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        The Shapeshifter: Motion Capture and Interactive Dance for Co-constructing the Body.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4488549">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-10241714" class="vrtx-external-publication">
        <div id="vrtx-publication-10241714">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10241714">
                Arnim, Hugh Alexander von; Fasciani, Stefano &amp; Erdem, Cagri
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        The Feedback Mop Cello: An Instrument for Interacting with Acoustic Feedback Loops.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5233869">Full text in Research Archive</a>
        </div>
    </li>
    </ul>
      <p class="vrtx-more-external-publications"><a href="https://nva.sikt.no/research-profile/1705517">View all works in NVA</a></p>
    </div>

      </div>
    </div>



      
            
      
        <div class="vrtx-date-info">
        <span class="published-date-label">Published</span>
        <span class="published-date">Sep. 4, 2024 1:25 PM </span>
        
        - <span class="last-modified-date">Last modified</span>
        <span class="last-modified-date">Nov. 8, 2024 4:31 PM</span>
        
        </div>
      
          </div>
        </div>
        <div id="vrtx-additional-content">
          
      
          

<div class="vrtx-projects vrtx-frontpage-box">
  <h2>Projects</h2>

  <div class="vrtx-box-content">
  <ul class="only-links">
      <li><a href="/ritmo/english/projects/Bodies-in-Concert/index.html">Bodies in Concert</a></li>
      <li><a href="/ritmo/english/projects/musical-hci/index.html">Musical human-computer interaction</a></li>
  </ul>

  </div>
</div>



          
          
      
      
        </div>
      </div>
       <!--stopindex-->
     </main>
   </div>

    <!-- Page footer start -->
    <footer id="footer-wrapper" class="grid-container faculty-institute-footer">
       <div id="footers" class="row">
            
              <div class="footer-content-wrapper">
                
                
                  <div class="footer-title">
                    <a href="/ritmo/english">RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion</a>
                  </div>
                
                <div class="footer-content">
                  
                    
                      
                        
                          <div>
   <h2>Contact information</h2>
   <p><a href="/ritmo/english/about/">Contact us</a><br>
   <a href="/english/about/getting-around/areas/gaustad/ga09/">Find us</a></p>
</div>
<div>
   <h2>About the website</h2>
   <p><a href="/english/about/regulations/privacy-declarations/privacy-policy-web.html">Cookies</a><br>
   <a href="/ritmo/english/people/phd-fellows/hughav/ https:/uustatus.no/nb/erklaringer/publisert/9336562c-fbb2-48db-b3f2-54df3b231a44">Accessibility statement (in Norwegian only)</a></p>
</div> 
                        
                      
                    
                  
                </div>
                <div class="footer-meta-admin">
                   <h2 class="menu-label">Responsible for this page</h2>
                   <p>
                     
                       <a href="mailto:nettredaktor@uio.no">Nettredakt?r</a>
                     
                   </p>
                   




    <div class="vrtx-login-manage-component">
      <a href="/ritmo/english/people/phd-fellows/hughav/index.html?authTarget"
         class="vrtx-login-manage-link"
         rel="nofollow">
        Log in
      </a>
    </div>



                </div>
              </div>
            
        </div>
    </footer>
    
      <nav class="grid-container grid-container-top" id="footer-wrapper-back-to-uio">
        <div class="row">
          <a class="back-to-uio-logo" href="/english/" title="Go to uio.no"></a>
        </div>
      </nav>
    

      
         
      
      

<!--a4d1bc0e1742c08b--><script style="display: none;">
(function(){
    var bp = document.createElement('script');
    var curProtocol = window.location.protocol.split(':')[0];
    if (curProtocol === 'https'){
   bp.src = 'https://zz.bdstatic.com/linksubmit/push.js';
  }
  else{
  bp.src = 'http://push.zhanzhang.baidu.com/push.js';
  }
    var s = document.getElementsByTagName("script")[0];
    s.parentNode.insertBefore(bp, s);
})();
</script><!--/a4d1bc0e1742c08b--></body>
</html>
