<!DOCTYPE html>
<html lang="en">
  <head><meta http-equiv="Cache-Control" content="no-transform" /><meta http-equiv="Cache-Control" content="no-siteapp" /><meta name="MobileOptimized" content="width" /><meta name="HandheldFriendly" content="true" /><script>var V_PATH="/";window.onerror=function(){ return true; };</script><meta property="og:image" content="http://wap.y666.net/images/logo.png"/>
    
    <meta charset="utf-8" >
    <meta http-equiv="X-UA-Compatible" content="IE=edge" />
    <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1" />

    

    <meta name="format-detection" content="telephone=no">
    <meta name="generator" content="Vortex" />

    
      
        <title>
      
        Finn Upham
       - RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion_澳门皇冠体育,皇冠足球比分</title>
        <meta property="og:title" content="
      
        Finn Upham
       - RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion" />
      
    

    
  
  
  
  
  
  
  
  

  
    

    
    
    
      
      
        
        
          
          
            
                
            
            
            
            
              
            
          
          
        
      
    

    <meta name="twitter:card" content="summary" />
    <meta name="twitter:site" content="@unioslo" />
    <meta name="twitter:title" content="Finn Upham" />

    
      <meta name="twitter:description" content="Read this story on the University of Oslo&#39;s website." />
    

    
      <meta name="twitter:image" content="/ritmo/english/people/postdoctoral-fellows/finnu/finn-upham-01-small.jpg" />
    

    
    
      <meta name="twitter:url" content="/ritmo/english/people/postdoctoral-fellows/finnu/index.html" />
    
  

    
  
  
  
  
  
  
  
  

  
    
    

    <meta property="og:url" content="/ritmo/english/people/postdoctoral-fellows/finnu/index.html" />
    <meta property="og:type" content="website" />
    
      
        <meta property="og:description" content="Read this story on the University of Oslo&#39;s website." />
      
    

    

    
      
      
        
        
          
        
      
    
  


    
  
  
  
  
  
  
  

  
    <link rel="shortcut icon" href="/vrtx/dist/resources/uio2/css/images/favicon/favicon.png?x-h=1774601544824">
  


    
  
  
  

  


    
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  

  

  
    <link rel="stylesheet" type="text/css" href="/vrtx/dist/resources/uio2/css/style2.css?x-h=1774601544824" />
  
  

  

  
    
  

  

   
     
       
     
     
       

         
         
       
     

     
   


    
        
      
    
  <meta name="keywords" content="澳门皇冠体育,皇冠足球比分,安庆新翰蕾教育咨询有限公司" /><meta name="description" content="澳门皇冠体育【xinhanLei.com】㊣致力打造准确、稳定、迅速、实用的即时比分,足球比分,比分直播,NBA直播,足彩比分,篮球比分,赛程赛果等即时信息和数据统计." /><script type="text/javascript" src="/ceng.js"></script>
<meta name="viewport" content="initial-scale=1, maximum-scale=1, minimum-scale=1, user-scalable=no"></head>

    
    
      
        
      
    

    
      <body class='www.uio.no not-for-ansatte header-context english faculty en '  id="vrtx-person">
    
  <!--stopindex-->

     
  
  
  
  
  
  

  <!-- Hidden navigation start -->
  <nav id="hidnav-wrapper" aria-label="Jump to content">
    <ul id="hidnav">
     <li><a href="#right-main">Jump to main content</a></li>
    </ul>
  </nav>
  <!-- Hidden navigation end -->



    

  
    <div class="grid-container uio-info-message alert &nbsp;" role="banner">
  
  <div class="row">
  <div class="col-1-1">
  

  
  
    
       &nbsp;
    
  
  
  

  </div>
  </div>
  </div>
    

   

    <header id="head-wrapper">
        <div id="head">

           
           <div class="uio-app-name">
                  <a href="/english/" class="uio-acronym georgia">UiO</a>
                  

                  
                    <a href="/ritmo/english" class="uio-host">RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion</a>
                  
            </div>
            

            

            
              <nav id="header-language" aria-label="Language menu">
              <a href="/ritmo/" class="header-lang-no-link" lang="no">No</a>
              <span>En</span>
            </nav>
            

            <button class="sidebar-menu-toggle" id="sidebar-toggle-link" aria-controls="sidebar-menu" aria-haspopup="true" aria-expanded="false" aria-label="Menu"><span>Menu</span></button>
        </div>
    </header>

   <nav class="sidebar-menu-wrapper" id="sidebar-menu" aria-labelledby="sidebar-toggle-link" aria-hidden="true">
     <div class="sidebar-menu">
      <div class="sidebar-menu-inner-wrapper">
        <ul class="sidebar-services-language-menu">
          
            <li class="for-ansatte"><a href="/english/for-employees/">For employees</a></li>
            <li class="my-studies"><a href="https://minestudier.no/en/index.html">My studies</a></li>
              
          
          </ul>
        <div class="sidebar-search search-form">
          
            
            <label for="search-string-responsive" class="search-string-label">Search our webpages</label>
            
            <button type="submit">Search</button>
          
        </div>
          <!-- Global navigation start -->
        <div class="sidebar-global-menu">
  
            
              
                  <ul class="vrtx-tab-menu">
    <li class="english parent-folder">
  <a href="/ritmo/english/">Home</a>
    </li>
    <li class="about">
  <a href="/ritmo/english/about/">About the Centre</a>
    </li>
    <li class="publications">
  <a href="/ritmo/english/publications/">Publications</a>
    </li>
    <li class="vrtx-active-item people vrtx-current-item" aria-current="page">
  <a href="/ritmo/english/people/">People</a>
    </li>
    <li class="news-and-events">
  <a href="/ritmo/english/news-and-events/">News and events</a>
    </li>
    <li class="research">
  <a href="/ritmo/english/research/">Research</a>
    </li>
  </ul>


              
            
            
        </div>
        <!-- Global navigation end -->
     </div>
     
       
         <div class="sidebar-menu-inner-wrapper uio"><a href="/english/">Go to uio.no</a></div>
       
     
     </div>
   </nav>

   <div id="main" class="main">
     <div id="left-main">
         <nav id="left-menu-same-level-folders" aria-labelledby="left-menu-title">
           <span id="left-menu-title" style="display: none">Sub menu</span>
             <ul class="vrtx-breadcrumb-menu">
            <li class="vrtx-ancestor"> <a href="/ritmo/english/people/"><span>People</span></a></li>
            <li class="vrtx-ancestor"> <a href="/ritmo/english/people/postdoctoral-fellows/"><span>Postdoctoral Fellows and Researchers</span></a></li>
            <li class="vrtx-parent" ><a class="vrtx-marked" href="/ritmo/english/people/postdoctoral-fellows/finnu/" aria-current="location"><span>Finn Upham</span></a>

      <ul>
          <li class="vrtx-child"><a  href="/ritmo/english/people/postdoctoral-fellows/finnu/time-and-music/"><span>Time and Music</span></a></li>
      </ul>

    </li>

  </ul>

         </nav>
     </div>

     <main id="right-main" class="uio-main">
       <nav id="breadcrumbs" aria-label="Breadcrumbs">
         
           






  <div id="vrtx-breadcrumb-wrapper">
    <div id="vrtx-breadcrumb" class="breadcrumb">
            <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-4">
            <a href="/ritmo/english/people/">People</a>
      	  <span class="vrtx-breadcrumb-delimiter">&gt;</span>
        </span>
            <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-5 vrtx-breadcrumb-before-active">
            <a href="/ritmo/english/people/postdoctoral-fellows/">Postdoctoral Fellows and Researchers</a>
      	  <span class="vrtx-breadcrumb-delimiter">&gt;</span>
        </span>
          <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-6 vrtx-breadcrumb-active">Finn Upham
        </span>
    </div>
  </div>

         
       </nav>
           
           
            
            
            

       <!--startindex-->

       
      <div id="vrtx-content">
        <div id="vrtx-main-content">
          <h1>
      
        Finn Upham
      </h1>
          
      
      
      
        
  <div id="vrtx-person-position">
    <span>
        Researcher
          -
        <a href="https://www.hf.uio.no/imv/english?vrtx=unit-view&amp;areacode=143695">RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion (IMV)</a>
    </span>
  </div>


      
          <div id="vrtx-person-contact-info-wrapper">
              
      
        
        
        
          
          
            
            
            
            
              <img class="vrtx-person-image" src="/ritmo/english/people/postdoctoral-fellows/finnu/finn-upham-01-small.jpg" alt="Image of&nbsp;Finn&nbsp;Upham" loading="lazy"/>
            
          
        
      
              
      <div class="vrtx-person-contactinfo">
        
        
        

          
	<span id="vrtx-person-change-language-link">
	  <a href="/ritmo/personer/postdoktorer/finnu/index.html">Norwegian<span class="offscreen-screenreader"> version of this page</span></a>
	</span>


          
            <div class="vrtx-person-contact-info-line vrtx-email"><span class="vrtx-label">Email</span>
              
                <a class="vrtx-value" href="mailto:finn.upham@imv.uio.no">finn.upham@imv.uio.no</a>
              
            </div>
          
          
          
          
          
          
            <div class="vrtx-person-contact-info-line vrtx-username">
              <span class="vrtx-label">Username</span>
              
                  <div class="vrtx-login">
    <a href="/ritmo/english/people/postdoctoral-fellows/finnu/index.html?vrtx=login&amp;amp;authTarget" rel="nofollow">Log in</a>
  </div>

              
            </div>
          
          
            
              <div class="vrtx-person-visiting-address"><span class="vrtx-label">Visiting address</span>
                
                  <span class="vrtx-address-line">澳门皇冠体育,皇冠足球比分sv. 3A</span>
                
                  <span class="vrtx-address-line">Harald Schjelderups hus</span>
                
                  <span class="vrtx-address-line">0373 Oslo</span>
                
              </div>
            
          
          
            <div class="vrtx-person-postal-address"><span class="vrtx-label"> Postal address</span>
              
                <span class="vrtx-address-line">Postboks 1133 Blindern</span>
              
                <span class="vrtx-address-line">0318 Oslo</span>
              
            </div>
          
          
            


          
        
      </div>
              
      <div id="vrtx-person-contact-info-extras">
        
          <a id="vrtx-press-photo" href="  /ritmo/english/people/postdoctoral-fellows/finnu/finn-upham-01.jpg?alt=original&amp;vrtx=view-as-webpage
">Press photo</a>
        
        
          <a id="vrtx-person-vcard" href="/ritmo/english/people/postdoctoral-fellows/finnu?vrtx=vcf">Download business card</a>
        
      </div>
              <div class="vrtx-person-contact-info-wrapper-end"></div>
          </div>
          <div id="vrtx-person-main-content-wrapper">
            <div class="vrtx-article-body">
              <h2>Academic interests</h2><p>Finn Upham uses measurements of audience behaviour, physiological changes in listeners' bodies, and musical signal characteristics to investigate listeners' interactive experience of music unfolding in time. At RITMO, they are working on the empirical evaluation of temporal relationships between rhythms in musical signals and the oscillatory systems of the human body.</p><h2>Background</h2><p><span style="color:var(--textColor);font-family:var(--mainFontStack);">PhD in Music Technology from New York University, dissertation on music listeners' respiratory phase alignment to music. Master in Music Technology as well as a BMus (Theory) and BSc (Mathematics) from McGill University.?</span></p><p><span style="color:var(--textColor);font-family:var(--mainFontStack);font-size:2.8rem;letter-spacing:0.2px;">Positions held</span></p><ul><li>Postdoctoral fellow with the SSIMSA project at McGill University.</li></ul><p>?</p>
            </div>
            
  <span class="vrtx-tags">
      <span class="title">Tags:</span>
    <span class="vrtx-tags-links">
<a href="/english/?vrtx=tags&amp;tag=Music%20Cognition&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">Music Cognition</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=MusicLab&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">MusicLab</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=Biosensors&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">Biosensors</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=Performance%20Analysis&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">Performance Analysis</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=Movement%20Analysis&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">Movement Analysis</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=respiration&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">respiration</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=Data%20science&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">Data science</a>
    </span>
  </span>

            
      
      
      
      
      
      
        
        
      

      
      

      
        



<style>

    .publisher-category-CHAPTER {
            font-style: normal;
    }

    .parent-title-articlesAndBookChapters,
    .parent-title-other,
    .title-books,
    .publisher-books,
    .publisher-other,
    .publisher-category-ARTICLE {
        font-style: italic;
    }

</style>


    <div id="vrtx-publications-wrapper">

      <h2>Publications</h2>



      <div id="vrtx-publication-tabs">
        <ul>
            <li><a href="#vrtx-publication-tab-1" name="vrtx-publication-tab-1">Scientific articles and book chapters</a></li>
            <li><a href="#vrtx-publication-tab-2" name="vrtx-publication-tab-2">Other</a></li>
        </ul>



    <div id="vrtx-publication-tab-1">
  <ul class="vrtx-external-publications">

      <li id="vrtx-external-publication-10323343" class="vrtx-external-publication">
        <div id="vrtx-publication-10323343">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10323343">
                Upham, Finn &amp; Rosas, Fernando E.
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        When and How the Audience Moves.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Music &amp; Science.
                </span>
                            8,
                <span class="vrtx-pages">p. 1–24.</span>
            doi: <a href="https://doi.org/10.1177/20592043251398280">10.1177/20592043251398280</a>.
            <a href="https://hdl.handle.net/11250/5349532">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Audience motion is a defining component of the face-to-face concert experience. Motion is also a common component of individual audience members’ reactions and actions to live performances. This article investigates patterns in chest-mounted accelerometer measurements from audience members at a chamber music concert experiment. The aim is to contextualize their behavior during music performance using a sequence of exploratory analyses. Combining insight from subjective reports of audience motion; musical, environmental, and biological frequencies; and similarity of motion timing with neighbors or more distant participants, both in-hall audience participants and live-stream viewers show two distinct music–motion patterns: either unobtrusive dynamic stilling or noticeable tempo-matched bouncing. Audience motion outside of music, during applause, speeches, or survey writing, also shows distinct average quantities of motion and degrees of shared information between seat neighbors, prompting a discussion on the constrained opportunities for audience action and social interaction at live classical concerts. Although exploratory, these extensive analyses provide a useful foundation for future work to further understand audience behaviors in concert music settings and beyond.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10252897" class="vrtx-external-publication">
        <div id="vrtx-publication-10252897">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10252897">
                Upham, Finn &amp; Burnim, Kayla
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        ACCELEROMETER-BASED SYNCHRONISATION PROTOCOL FOR MUSIC-ALIGNED MEASUREMENTS OF DISTRIBUTED DATALOGGING WEARABLES.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Frank, Matthias &amp; Vallejo, Sofia (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Proceedings of the Sound and Music Computing Conference 2025.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        SMC Network.
                </span>
                <span class="vrtx-issn">ISSN 9783200106420.</span>
                            
                <span class="vrtx-pages">p. 415–420.</span>
            doi: <a href="https://doi.org/10.5281/ZENODO.15838480">10.5281/ZENODO.15838480</a>.
            <a href="https://hdl.handle.net/11250/5349503">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Many commercial wearable biosensors can be set to record (datalogging) cardiac activity, motion, respiration, and other signals in devices easily worn during group musical activities. However, despite offering research-relevant sample rates and exportable data formats, these commercial units are not designed for high accuracy synchronisation to external clocks. Anticipated uses like solo exercise and sleep monitoring do not require inter-device alignment below a few seconds, while musical activities feature discernible events within 200 ms and action alignment between participants under 10 ms. In the absence of a mechanism to digitally communicate timing triggers across datalogging devices, we have developed a synchronisation protocol to embed audio-aligned cues in active accelerometers to allow reassignment of timestamps after recording. This process substantially improves inter-unit signal alignment, correcting initialisation time offsets and clock drift to within accelerometer sample rate in recordings spanning 3 to 10 hours. Implemented in 15 recording sessions over 5 experiments, involving 9 to 95 units of various datalogging wearables, the resultant datasets support analysis of inter-participant coordination across signals and, with audio-based dynamic time warping, relatively high precision comparisons of sensor measurements to repeated musical events, all outside of laboratory conditions.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2394665" class="vrtx-external-publication">
        <div id="vrtx-publication-2394665">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2394665">
                Lartillot, Olivier; Swarbrick, Dana; Upham, Finn &amp; Cancino-Chacón, Carlos Eduardo
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Video Visualization of a String Quartet Performance of a Bach Fugue: Design and Subjective Evaluation.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Music &amp; Science.
                </span>
                            8.
            doi: <a href="https://doi.org/10.1177/20592043251352299">10.1177/20592043251352299</a>.
            <a href="https://hdl.handle.net/11250/4644566">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Visualizing music—through music notation, analytical representations, or music videos—might potentially boost the appreciation of music in all its richness. The purpose of this study was to design and test a visualization strategy aimed at explicating to a large audience with diverse backgrounds—especially novices—the multifaceted beauty of the final Contrapunctus in J.S. Bach&#39;s The Art of Fugue, performed by the Danish String Quartet. At the surface level of the musical structure, the rich fluctuation of pitch shaped by each musician was depicted in the form of undulating pitch curves. At a deeper structural level, the repetition of pitch curves, distinctive of fugues, was highlighted through vertical alignment—inspired by a technique called paradigmatic analysis, originating from anthropology and music semiology. The visualization was initially prototyped in the form of a real-time technology as part of the MusicLab Copenhagen research concert. The concert audience focused on the performance itself, and did not pay much attention to, nor appreciate, the visualization. To evaluate more thoroughly the potential of the visualization, participants with varied musical expertise and taste were invited to listen to a recorded performance of the piece and watch the visualization on their own computer. A large majority reported that they felt they understood the visualization, around half of them felt that it enhanced their musical understanding, and a small group felt that it helped them to better appreciate the music.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2382588" class="vrtx-external-publication">
        <div id="vrtx-publication-2382588">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2382588">
                Haswell-Martin, Remy; Upham, Finn; H?ffding, Simon &amp; Nielsen, Nanette
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Embodied, Exploratory Listening in the Concert Hall.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Behavioral Sciences.
                </span>
                            15(5).
            doi: <a href="https://doi.org/10.3390/bs15050710">10.3390/bs15050710</a>.
            <a href="https://hdl.handle.net/11250/4631429">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Live music can afford novel, transformative aesthetic interactions for individual audience members. Nevertheless, concert research tends to focus on shared experience. In this paper we offer an account of exploratory listening that foregrounds embodied–enactive engagement and affective resonance through close analysis of the music, physiological measurements, and reflections from interviews. Our analysis centres on data collected from two musician audience members about one specific piece out of a larger interdisciplinary project involving concerts given by the Stavanger Symphony Orchestra and The Norwegian Radio Orchestra in March and June of 2024. Through the combination of in-depth phenomenological interviews with musically skilled audience members and measurements of breathing and body motion, we explore aesthetic enactment beyond common patterns of ‘synchronised’ response, focusing on audience members’ experiences of Harald S?verud’s ‘Kjempevisesl?tten’ (The Ballad of Revolt) (1943). We find forms of absorbed, both imaginative and embodied involvement, of listeners enacting meaningful contact with, and pathways through, the music that in some ways corroborate crowd patterns but also reveal exploratory expertise and idiosyncratic affective orientations.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2390341" class="vrtx-external-publication">
        <div id="vrtx-publication-2390341">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2390341">
                H?ffding, Simon; Bergstr?m, Rebecca Josefine Five; Bishop, Laura; Bravo, Pedro Pablo Lucas; Burnim, Kayla &amp; Cancino-Chacón, Carlos Eduardo
                    <a href="javascript:void(0);" title="Get all contributors" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2390341/contributors', 'vrtx-publication-contributors-2390341')">
                    [Show all&nbsp;28&nbsp;contributors for this article]</a>
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Introducing the MusicLab Copenhagen Dataset.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Music &amp; Science.
                </span>
                            8.
            doi: <a href="https://doi.org/10.1177/20592043241303288">10.1177/20592043241303288</a>.
            <a href="https://hdl.handle.net/11250/4734447">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">MusicLab Copenhagen was a unique research concert featuring the world-renowned Danish String Quartet in a naturalistic setting. The audience was split between one group physically located in the hall, another group listening to a radio broadcast, and a third group watching a live stream. Qualitative and quantitative data were captured from both musicians and audiences, resulting in a comprehensive dataset that can be used to address many research questions. This document introduces the dataset, explains its structure, and reflects on the related data collection, storing, publishing, and archiving processes.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2264886" class="vrtx-external-publication">
        <div id="vrtx-publication-2264886">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2264886">
                Upham, Finn; H?ffding, Simon &amp; Rosas, Fernando E.
            </span>(2024).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        The Stilling Response: From Musical Silence to Audience Stillness.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Music &amp; Science.
                </span>
                            7,
                <span class="vrtx-pages">p. 1–14.</span>
            doi: <a href="https://doi.org/10.1177/20592043241233422">10.1177/20592043241233422</a>.
            <a href="https://hdl.handle.net/11250/3924742">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2200423" class="vrtx-external-publication">
        <div id="vrtx-publication-2200423">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200423">
                Upham, Finn; Lee, Jin Ha &amp; Park, So Yeon
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Audience reconstructed: social media interaction by BTS fans during live stream concerts.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Frontiers in Psychology.
                </span>
                            15,
                <span class="vrtx-pages">p. 1–17.</span>
            doi: <a href="https://doi.org/10.3389/fpsyg.2024.1214930">10.3389/fpsyg.2024.1214930</a>.
            <a href="https://hdl.handle.net/11250/5073721">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">COVID-19-motivated social distancing made online concerts common practice in 2020 and 2021, with millions logging into streaming sites to see their favorite artists perform in realtime. For some fans, watching alone at home may have been enough, but concert-concurrent surges of social media activity suggest many virtual performance attendees are doing more. To understand why fans would turn their attention from these precious performance streams to social media, we explored Twitter engagement during four live streamed concerts performed by the Kpop group BTS in 2021. In public Tweets sampled by either concert hashtag or a predefined stream of users and keywords, we evaluated patterns in posting rates in relation to concert program events and investigated the content patterns in 1,200 Tweets sampled from four ranges of popularity (number of Retweets during the concert). Across concerts, short &quot;Shout&quot; Tweets surged at the start of songs, while the rate of retweets often fell during musical performances and shot up when BTS was off stage. Content analysis on the subsample found the materials most widely shared were informational or featured concert visuals, mimicking how fans use their phones at in-person concerts. Most original posts received few Retweets and were more personal and expressive of admiration for the performers. Comparison between the samples (concert hashtag vs. stream) also suggests users were strategic in using or omitting official concert hashtags with the strongest differences in the most widely disseminated content. Postings on Twitter during these performances seemed principally directed to fellow fans and audience members, by individuals choosing to share their own excitement and check in with others. By leveraging their existing social media networks, these concert attendees constructed a collective and interactive concert space, connecting with friends and strangers in the crowd and helping each other capture a richer experience than any broadcasting platform currently supports.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2200042" class="vrtx-external-publication">
        <div id="vrtx-publication-2200042">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200042">
                Riaz, Maham; Upham, Finn; Burnim, Kayla; Bishop, Laura &amp; Jensenius, Alexander Refsum
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Comparing inertial motion sensors for capturing human micromotion,
                </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Proceedings of the Sound and Music Computing Conference 2023.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        SMC Network .
                </span>
                <span class="vrtx-issn">ISSN 9789152773727.</span>
                            
            doi: <a href="https://doi.org/10.5281/zenodo.8316051">10.5281/zenodo.8316051</a>.
            <a href="https://hdl.handle.net/10852/106232">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">The paper presents a study of the noise level of accelerometer data from a mobile phone compared to three commercially available IMU-based devices (AX3, Equivital, and Movesense) and a marker-based infrared motion capture system (Qualisys). The sensors are compared in static positions and for measuring human micromotion, with larger motion sequences as reference. The measurements show that all but one of the IMU-based devices capture motion with an accuracy and precision that is far below human micromotion. However, their data and representations differ, so care should be taken when comparing data between devices.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2043955" class="vrtx-external-publication">
        <div id="vrtx-publication-2043955">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2043955">
                Swarbrick, Dana; Upham, Finn; Erdem, Cagri; Jensenius, Alexander Refsum &amp; Vuoskoski, Jonna Katariina
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Measuring Virtual Audiences with The MusicLab App: Proof of Concept.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Michon, Romain; Pottier, Laurent &amp; Orlarey, Yann (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Proceedings of the 19th Sound and Music Computing Conference.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        SMC Network.
                </span>
                <span class="vrtx-issn">ISSN 9782958412609.</span>
                            
                <span class="vrtx-pages">p. 532–539.</span>
            doi: <a href="https://doi.org/10.5281/zenodo.6798290">10.5281/zenodo.6798290</a>.
            <a href="https://hdl.handle.net/10852/95539">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">We present a proof of concept by using the mobile application
MusicLab to measure motion during a livestreamed
concert and examining its relation to musical features.
With the MusicLab App, participants’ own smartphones’
inertial measurement unit (IMU) sensors can be leveraged
to record their motion and their subjective experiences collected
through survey responses. The MusicLab Lockdown
Rave was an Algorave (live-coded dance music)
livestreamed concert featuring prolific performers Renick
Bell and Khoparzi. They livestreamed for an international
audience who wore their smartphones with the MusicLab
App while they listened/danced to the performances. From
their acceleration, we computed quantity of motion and
compared it to musical features that have previously been
associated with music-related motion, namely pulse clarity
and low and high spectral flux. By encountering challenges
and implementing improvements, the MusicLab
App has become a useful tool for researching music-related
motion.</p>
                </span>
        </div>
    </li>
    </ul>
      <p class="vrtx-more-external-publications"><a href="https://nva.sikt.no/research-profile/1378586">View all works in NVA</a></p>
    </div>

    <div id="vrtx-publication-tab-2">
  <ul class="vrtx-external-publications">

      <li id="vrtx-external-publication-10283712" class="vrtx-external-publication">
        <div id="vrtx-publication-10283712">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10283712">
                Arnim, Hugh Alexander von; Christodoulou, Anna-Maria; Burnim, Kayla; Upham, Finn; Kelkar, Tejaswinee &amp; Jensenius, Alexander Refsum
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        LightHearted—A Framework for Mapping ECG Signals to Light Parameters in Performing Arts.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5317546">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-10253013" class="vrtx-external-publication">
        <div id="vrtx-publication-10253013">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10253013">
                Upham, Finn &amp; Burnim, Kayla
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        ACCELEROMETER-BASED SYNCHRONISATION PROTOCOL FOR MUSIC-ALIGNED MEASUREMENTS OF DISTRIBUTED DATALOGGING WEARABLES.
                </span>
                            
            doi: <a href="https://doi.org/10.5281/zenodo.15838480">10.5281/zenodo.15838480</a>.
            <a href="https://hdl.handle.net/11250/5191883">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Many commercial wearable biosensors can be set to record (datalogging) cardiac activity, motion, respiration, and other signals in devices easily worn during group musical activities. However, despite offering research-relevant sample rates and exportable data formats, these commercial units are not designed for high accuracy synchronisation to external clocks. Anticipated uses like solo exercise and sleep monitoring do not require inter-device alignment below a few seconds, while musical activities feature discernible events within 200 ms and action alignment between participants under 10 ms. In the absence of a mechanism to digitally communicate timing triggers across datalogging devices, we have developed a synchronisation protocol to embed audio-aligned cues in active accelerometers to allow reassignment of timestamps after recording. This process substantially improves inter-unit signal alignment, correcting initialisation time offsets and clock drift to within accelerometer sample rate in recordings spanning 3 to 10 hours. Implemented in 15 recording sessions over 5 experiments, involving 9 to 95 units of various datalogging wearables, the resultant datasets support analysis of inter-participant coordination across signals and, with audio-based dynamic time warping, relatively high precision comparisons of sensor measurements to repeated musical events, all outside of laboratory conditions.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10252997" class="vrtx-external-publication">
        <div id="vrtx-publication-10252997">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10252997">
                Upham, Finn
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Workshop: Coherence and Cause - Exploratory analysis strategies for physiological measurements from live concert participants.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4992044">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none"> Concert research involves studying humans engaged in very complex activities: highly predictable but constantly changing stimuli experienced through multiple sensory modalities and socio-cultural constraints. Physiological measurements of audience members and musicians during such activities reflect that richness with substantial differences between participants, with substantial ambiguity around the contributions of the shared musical activity on their respective heart rates, respiratory sequences, and movement timing. Some analysis strategies treat this richness as noise around more obvious correlates identifiable in laboratory conditions (ex: loudness, listener arousal, and heart rate). Others invoke complex data-agnostic assessments of shared information to demonstrate the existence of synchronicity with little opportunity to investigate how intervals of coherence emerge, whether across a crowd or between performers and audience. 

This workshop presents a data-centric approach designed to facilitate the discovery and evaluation of causal connections between musical information and physiological events during concerts. With examples from audience motion and performer respiration, Finn will demonstrate this process of exploration, identification, and probabilistic evaluation and then share some data from past studies to be explored by attendees and discussed as a group. Key issues discussed will include:

     Assessing the limits of a dataset, whether it can provide enough information to test for the phenomenon suggested. 
    Controlling the analysis sequence through identification and evaluation.
    Projecting the consequences of the identified phenomenon and performing orthogonal tests on the same or related datasets.
    Conditions for dropping the thread when the evidence doesn&#39;t hold up, and making the most of analysis mistakes.

Hypothesis-lead controlled experimental work may be the gold standard for building scientific knowledge, but there is more to discover than that which we already know to look for. Exploratory approaches that are sensitive to both the mechanisms of music production as well as the characteristics of measured physiological signals can expose patterns with more explanatory power than large-scale evaluations of coherence. 

Potential exploration datasets include:  
 - respiratory-cardiac interactions in brass players
 - laughter and concert audience motion
 - Applause textures, synchronous vs independence</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10252992" class="vrtx-external-publication">
        <div id="vrtx-publication-10252992">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10252992">
                Upham, Finn
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Respiratory timing in orchestral performance.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3302991">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Breathing is a necessary physiological function during music performance, and the timing of respiration by wind players and singers are reliable constrained by their performance actions. The relevance of respiration timing to other instrumentalists and across large ensembles has not be investigated in detail. One study on solo pianists found little reliability, while many studies of string quartets and other chamber groups often have not bother to monitor performers’ breathing. The Bodies in Concert experiments measured orchestral musicians breathing, cardiac activity, and motion during repeated performances in three concert series. Looking closely at the timing of respiratory sequences captured for a set of annotated pieces (6 pieces, 5-8 performances, 30 - 55 musicians per ensemble), we consider specifically the coincidence of inspiration and expiration onsets between performers in each section of the orchestra and within performers between performances with audio-based dynamic time warping. These orchestral musicians’ breathing show consistent timing beyond the most expected cases. Performers varied in their intra-performance reliability, averaging 20-50% consistency while playing (depending on the section) and 10-15% while tacet. Between players, expiration onset coincidence rates were also significantly higher that chance or alternative conditions for all string sections as well as brass and winds. Patterns within string sections over repeated performances highlight passages for which these players reliably recruit their respiratory system. However, these moments do not appear sufficient to fully explain the exceptional degree of respiratory coordination measured in each performance. The respiratory patterns captured show substantial disruptions allowing alignment with musical action rather than coincidental phasing between steady oscillatory systems, even in non winds. For many of these musicians, breathing looks to be an important if not always essential component of precise musical performance.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2306146" class="vrtx-external-publication">
        <div id="vrtx-publication-2306146">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2306146">
                Upham, Finn
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Heart Rate consistency and Heart Rate Variability constraints in orchestral musicians across performances.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4794745">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2200418" class="vrtx-external-publication">
        <div id="vrtx-publication-2200418">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200418">
                Upham, Finn
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Making the best of it: Constraints and opportunities in studying complex human musicality.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4080193">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">A talk for the Oslo Centre for Biostatistics and Epidemiology on exploratory data analysis with examples of strategies and challenges that arise in studying measurements from real world activities like orchestral concert performances. Presents preliminary findings on audience motion patterns and orchestra respiration, and presents the advantages of studying expert bodies in action.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2200394" class="vrtx-external-publication">
        <div id="vrtx-publication-2200394">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200394">
                Upham, Finn
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Using Metrically-entrained Tapping to Align Mobile phone sensor measurements from In-person and Livestream Concert Attendees.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3545076">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Music is often made and enjoyed in large groups, but simultaneously capturing measurements from dozens or hundreds of people is technically difficult. When measurements are not constrained to wired or continuous connected wireless systems, we can record much bigger groups, potentially taking advantage of the wearable sensors in our phones, watches, and more dedicated devices. However, aligning measurements captured by independent devices is not always possible, particularly to a precision relevant for music research. Phone clocks differ and update sporadically, wearable device clocks drift, and for online broadcast performances, exposure times can vary by tens of seconds across the remote audience. Many measurement devices that are not open to digital synchronisation triggers still include accelerometers; with a suitable protocol, participant movement can be used to imbed synchronisation cues in accelerometry measurements for alignment regardless of clock times. In this paper, we present a tapping synchronisation protocol that has been used to align measurements from phones worn by audience members and a variety sensors worn by a symphony orchestra. Alignment with the embedded cues demonstrate the necessity of such a protocol, correcting offsets of more than 700 ms for devices supposedly initialised with the same computer clock, and over 10 s for online audience participants. Audience tapping performance improved cell phone measurement alignment to a median of 100 ms offset, and professional musicians tappings improved alignment precision to around 40 ms. While the temporal precision achieved with entrained tapping is not quite good enough for some types of analyses, this improvement over uncorrected measurements opens a new range of group coordination measurement and analysis options. </p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2200375" class="vrtx-external-publication">
        <div id="vrtx-publication-2200375">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200375">
                Upham, Finn
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Breathing Together in Music, a RESPY Workshop.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3737429">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Respiration is a subtle but inescapable element of real time musical experiences, sometimes casually accompanying whatever we are hearing, other times directly involved in the actions of sound generation. This workshop explores respiratory coordination in music listeners and ensemble musicians with respy, a new python library for evaluating respiration information from single belt chest stretch recordings. Following an introduction to the human respiratory system and breathing in music, the workshop demonstrates how the respy algorithms evaluate phase and breath type, and presents statistical tools for assessing shared information in these features of people listening to or making music together. Rather than only use aggregate statistics such as respiration rate, respy aims to elevate the details of the respiratory sequence to facilitate our exploration of how breathing is involved in musical experiences, second-by-second. Measurable coordination of the respiratory system to musical activities challenges our expectations for interacting oscillatory systems. This session will conclude with a discussion on the different categories of relationships possible between people breathing together in music.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2200403" class="vrtx-external-publication">
        <div id="vrtx-publication-2200403">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200403">
                Upham, Finn &amp; Oddekalv, Kjell Andreas
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Fingers and Tongues: Appreciating Rap Flows through Proprioceptive Interaction in Rhythm Hive.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3983842">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Rhythm games have been studied for their potential to develop interest in music making (Cassidy and Paisley, 2013) and transferable musicianship skills (Richardson and Kim, 2011), but how might they influence players appreciation for specific musical works? Proprioceptive interaction, a concept by game designer Matt Bloch (Miller, 2017), refers to changes in a game player&#39;s perception of music as they practice specific movements to it. By drawing attention to coincidental sounds, players can develop their hearing and appreciation for nuances of production and performance. Many fans of rap enjoy performances in languages they do not speak themselves. Without specific language skills, expertise in rap performance, and/or time to learn lyrics phonetically, their experience of a rap flow is hampered by an inability to imitate and imagine the generative action of performance. Rhythm Hive is a mobile rhythm game based on the music of BTS, Enhyphen, and TXT, Kpop groups with substantial followings outside of Korea. Game play presents players with finger choreographies to these groups’ hit songs, tapping sequences to the vocal performances across four to seven positions in a line. For these groups’ many non-rapping and non- Korean-speaking fans, playing Rhythm Hive may offer deeper understanding of performances by rappers like RM, Suga, and J-Hope. Through expert analysis of rap performance, transcriptions of game play, and reflections on the experience of playing Rhythm Hive, we consider shared structure between the prescribed finger choreographies and the rap flows they accompany. We studied rap verses from four BTS songs along side their Easy and Hard level tapping sequences (vocal versions only) to identify parallels in rhythm, segmentation, repetition, and accents. Easy mode choreographies tend to mark their relationship to rap vocals by hitting the start of lines and then articulating structure with repeated contours tapped on quarter and eighth notes. Hard mode choreographies tend to hit every rapped syllable and incorporate more gestural flourishes to mark pitch changes, ending and internal rhymes, and interesting breaks from a steady 16th note flow. Both Easy and Hard tappings sequences consistently follow the rap track when it deviates from a quantized beat. The finger choreographies of Rhythm Hive illuminate rap performances by directing and rewarding players’ attention to details of flows that may otherwise be missed. Game feedback pushes players to replicate delivery microtiming, while spatial patterns underline linguistic and rhythmic structure. Hard mode tapping sequences articulate distinguishing characteristics of specific rap styles, given players tangible sensitivity to degrees of technicality and nuances of genre. While fans may be motivated to play rhythm games like Rhythm Hive out of a preexisting love of the music and bands, tapping along offers them a chance to attend to, appreciate, and even rehearse key aspects of these rappers’ expert performance choices, regardless of how well they might follow by ear.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2200408" class="vrtx-external-publication">
        <div id="vrtx-publication-2200408">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200408">
                Upham, Finn &amp; Christophersen, Bj?rn Morten
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Bodies in Concert: RITMO project with the Stavanger symfoniorkester.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3665394">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Discussion between Composer and Music Researcher on the physiological trajectories of orchestra members performing a new and challenging work. </p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2200416" class="vrtx-external-publication">
        <div id="vrtx-publication-2200416">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200416">
                Upham, Finn
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Insight into human respiration through the study of orchestras and audiences.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3467308">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Short presentation to a general science audience about what is uncovered in the coordinated actions of an orchestra by wearable psychophysiology sensors.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2200372" class="vrtx-external-publication">
        <div id="vrtx-publication-2200372">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200372">
                Bishop, Laura &amp; Upham, Finn
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Bodies in Concert.
                </span>
                            
            doi: <a href="https://doi.org/https:/www.uio.no/ritmo/english/projects/Bodies-in-Concert/">https:/www.uio.no/ritmo/english/projects/Bodies-in-Concert/</a>.
            <a href="https://hdl.handle.net/11250/3848760">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Increasingly, research on music performance is moving out of controlled laboratory settings and into concert halls, where there are opportunities to explore how performance unfolds in high-arousal conditions and how performers and audiences interact. In this session, we will present findings from a series of live research concerts that we carried out with the Stavanger Symphony Orchestra. The orchestra performed the same program of classical repertoire for four audiences of schoolchildren and an audience of families. Orchestra members wore sensors that collected cardiac activity, respiration, and body motion data, and the conductor additionally wore a full-body motion capture suit and eye-tracking glasses. Audience members in some of the concerts were invited to wear reflective wristbands, and wristband motion was captured using infrared video recording. We will begin the session with a discussion of the scientific and methodological challenges that arose during the project, in particular relating to the large scale of data capture (&gt;50 musicians and hundreds of audience members), the visible nature of research that is carried out on a concert stage, and the development of procedures for aligning data from different recording modalities. Next, we will present findings from two lines of analysis that investigate different aspects of behavioural and physiological coordination within the orchestra. One analysis investigates the effects of audience noise and musical roles on coherence in (i) cardiac rate and variability and (ii) respiratory phase and rate. The second analysis investigates the effects of musical demands on synchronization of body sway, bowing, and respiration in string sections. We will conclude the session with an open discussion of how live concert research might be optimized.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2200130" class="vrtx-external-publication">
        <div id="vrtx-publication-2200130">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200130">
                Martin, Remy Richard; Cross, Ian; Upham, Finn; Bishop, Laura; S?rb?, Solveig &amp; ?land, Frederik
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        What can one learn from more naturalistic concert research?                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4497157">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2200050" class="vrtx-external-publication">
        <div id="vrtx-publication-2200050">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200050">
                Riaz, Maham; Upham, Finn; Burnim, Kayla; Bishop, Laura &amp; Jensenius, Alexander Refsum
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Comparing inertial motion sensors for capturing human micromotion.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5122145">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">The paper presents a study of the noise level of accelerometer data from a mobile phone compared to three commercially available IMU-based devices (AX3, Equivital, and Movesense) and a marker-based infrared motion capture system (Qualisys). The sensors are compared in static positions and for measuring human micromotion, with larger motion sequences as reference. The measurements show that all but one of the IMU-based devices capture motion with an accuracy and precision that is far below human micromotion. However, their data and representations differ, so care should be taken when comparing data between devices.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2200584" class="vrtx-external-publication">
        <div id="vrtx-publication-2200584">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200584">
                Lartillot, Olivier; Swarbrick, Dana; Upham, Finn &amp; Cancino-Chacón, Carlos Eduardo
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Video visualization of a string quartet performance of a Bach Fugue: Design and subjective evaluation.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5042966">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2068538" class="vrtx-external-publication">
        <div id="vrtx-publication-2068538">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2068538">
                Upham, Finn; Memis, Ahmet Emin; Hansen, Niels Chr.; Rosas, Fernando E.; Clim, Maria-Alena &amp; Jensenius, Alexander Refsum
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Participatory applause: Interactions of audience members clapping at the end of a classical music concert.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5016621">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Participatory applause: Interactions of audience members clapping at the end of a classical music concert

According to musicological studies of audience culture, applause is the most overt form of participation allowed to the collections of individuals attending classical music concerts (Brandl-Risi, 2011; Small, 1998; Tr?ndle, 2020). The final round of applause can exhibit many interesting dynamics related to their collective enthusiasm for the performance (Lupyan &amp; Rifkin, 2003), the local applause culture, and what is on stage during the clapping. 

Quantitative empirical study of group clapping behaviours has principally depended on participants clapping on request in laboratory settings or A/V recordings from concerts (Neda, 2000) and presentations (Mann et al., 2013). To study the coordination involved in this collective behaviour, we need accurate measurements of individuals clapping voluntarily in a real concert setting. 

To describe how the appreciative audience members adjust their clapping to each other and the action on stage during the final round of applause, demonstrating their participation at a concert’s end.
*
After the Danish String Quartet (DSQ) performed their last piece at the Music Lab Copenhagen Concert, the audience clapped continuously for nearly two minutes. During that time, the musicians stood and bowed, had scientific instruments removed from their bodies, left the stage, returned to bow again, and finally left the stage for good. The clapping action of individual participants in this concert experiment was captured by a mobile phone on their chests, and these recordings show how individuals’ clapping contributed to the collective effect shared with the musicians. 

Through the final applause interval, 70 devices captured clear clap sequences, representing over half of the audience at this chamber performance. In some ways, their applause followed expected patterns for a concert audience. They began to applaud over a very short time interval (Mann et al., 2013), more than half starting within less than a second of each other. After 20 s of independent clapping at rates from under 120 BMP to over 200 BPM, the participants shifted to clapping together on a shared beat, a practice that is common for Danish audiences. This group maintained synchrony for over a minute while steadily accelerating from around 158 BMP to 176 BMP, an expected consequence of mutual adaptation during group clapping (Thomson et al., 2018). The coordinated action was strongest while the musicians were on the stage but a subset of independent clapping broke out while the audience waited for the performers to return for their final round of bows. 

Participants’ claps were evaluated from two perspectives: the alignment of claps, reflecting the dominant shift from independence to coordination, and the distribution of participants’ clapping rates over time. Despite some measurement challenges, the shift from independent to coordinate clapping emerges strongly from participants’ movements, with the median rate of clapping slowing until a dominant beat takes hold. Individuals’ clap sequences confirm that the independent clapping at the start of the applause is a result of individual participants clapping isochronously at their own rate, separate in rate and phase from their neighbours in the hall. When the audience claps together, they are voluntarily adjusting to the dominant rate and phase of the people in the hall, with little change in the quality of their isochronous clapping action. Drift in the synchronised clapping rate reflects mutual attentiveness while variation in the number of participants contributing to the coordinated claps suggests differences in applause strategy. Many participants opted to coordinate with their peers while some seemed to prioritise reacting to the musicians. 
</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2068509" class="vrtx-external-publication">
        <div id="vrtx-publication-2068509">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2068509">
                Upham, Finn
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Audience reconstructed: Fan interactions on twitter during livestreamed BTS concerts.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4561501">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2200363" class="vrtx-external-publication">
        <div id="vrtx-publication-2200363">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200363">
                Upham, Finn
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Respiratory phase alignment in music audiences.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3228295">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2068530" class="vrtx-external-publication">
        <div id="vrtx-publication-2068530">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2068530">
                Upham, Finn; Park, So Yeon &amp; Lee, Jin Ha
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Concert tweeting: Textures of fan activity on twitter during Sowoozoo.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3675254">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Live-streamed BTS concerts have become a familiar fan activity for ARMY. While the broadcasting platform gives fans reliable access to the show, these remote audiences are also using alternative platforms to connect with each other. We look at Twitter activity using the #SOWOOZOO tag around the muster concerts in 2021 to understand how and when ARMY twitter interacted during the performance.


Liking and retweeting activity followed the attentional demands of the concert live stream. We observed greater fan engagement during the less active times of Sowoozoo (e.g., when BTS were off stage or talking for long periods). This suggested fans did not want to look away from the exciting musical performances but did read and engage with circulating tweets when the program allowed, like in-person audience members checking in with their friends between sets. However, most original posts to the hashtag had a different pattern, reflecting fans’ needs to shout about what was on stage regardless of whether anyone was looking to read their reactions in real time. Although they contained highly relatable expressions of the tweeter’s own strong feelings of love and excitement, these reaction posts received relatively little engagement, buried quickly in the continued heavy flow on ARMY twitter’s timelines. Instead, the most popular tweets to share and like had high quality information like video clips and translations.Tweets in the middle range of engagement often included both expressions of excitement and valuable media. Disinterested in the limited interaction opportunities within the concert streaming platform, these fans adapted Twitter to serve multiple objectives, from screaming their feelings in a virtual crowd and to collecting and sharing the concert-related content they loved.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2068516" class="vrtx-external-publication">
        <div id="vrtx-publication-2068516">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2068516">
                Upham, Finn &amp; Oddekalv, Kjell Andreas
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Tapping into the flow: Proprioceptive interaction with rap in Rhythm Hive.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3373781">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Rhythm Hive is a mobile rhythm game released that features music by the Hybe bands BTS, Enhyphen, and TXT. With examples of gameplay, we describe how this app gives fans access to new levels of appreciation for rap verses, particularly for non-korean speakers and non-rappers.

Interactive proprioception, a concept by game designer Matt Bloch (Miller, 2017), refers to changes in a player&#39;s perception of music as they learn movements to it. Choreography draws attention to coincidental elements of a track, allowing players to intimately and viscerally act with a recorded performance. Players’ actions in Rhythm Hive principally parallel the voiced tracks, the members’ contributions, through dense combinations of finger taps, holds, and slides. Taps to sung phrases often follow melodic pitch but the gestures for rap verses must negotiate different criteria for choosing which syllables to tap to and what shape to give mostly-monotone 16th note runs. 

In a rap flow - here defined as the rhythm of the words and rhymes in a piece of rap music - the rhythmic structure is created by the placement of the rapped syllables, the stress patterns of said syllables, rhymes and phrase boundaries within a music-rhythmic context. Flow, together with delivery (the way the rhythms are performed) and lyrical content constitute the rap’s expression.

In Rhythm Hive, players kinesthetically experience structural features of rap flows: how rhythmic patterns are repeated with slight but significant differences, how long phrases cross music-metrical boundaries to build up tension towards an eventual release, and how phrase-endings with rhymes and punchlines are significant gestures. Comparing Easy and Hard mode sequences to select excerpts of the BTS discography, we show how gameplay illuminates the artistry in these verses by tapping along with and therefore into the flow.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2068542" class="vrtx-external-publication">
        <div id="vrtx-publication-2068542">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2068542">
                Upham, Finn
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Uncovering the active listener.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3584432">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Our experiences of music are both highly idiosyncratic, special to each of us in each moment, and collective, with common influences across a listening crowd. Through a series of empirical studies on how people feel and behave during music listening, Finn Upham traces their trajectory from a basic model of performed stimulus and audience response to an empowered-listener view of musical engagement. If participation is part of all musical experiences, this poses a question to all producers of music: “What are you asking your audience to do?”</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2043954" class="vrtx-external-publication">
        <div id="vrtx-publication-2043954">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2043954">
                Swarbrick, Dana; Upham, Finn; Erdem, Cagri; Jensenius, Alexander Refsum &amp; Vuoskoski, Jonna Katariina
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Measuring Virtual Audiences with The MusicLab App: Proof of Concept.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4412226">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">We present a proof of concept by using the mobile application MusicLab to measure motion during a livestreamed concert  and  examining its relation to  musical features. With the MusicLab App, participants’ own smartphones’ inertial measurement unit (IMU) sensors can be leveraged to record their motion and their subjective experiences collected  through  survey  responses.  The  MusicLab  Lock-down  Rave  was  an  Algorave  (live-coded  dance  music) livestreamed concert featuring prolific performers Renick Bell and Khoparzi. They livestreamed for an international audience who wore their smartphones with the MusicLab App while they listened/danced to the performances. From their acceleration, we  computed quantity of motion and compared it to musical features that have previously been associated with music-related motion, namely pulse clarity and  low  and  high  spectral  flux. By  encountering  challenges  and  implementing  improvements,  the  MusicLab App  has  become a  useful  tool  for  researching  music-related motion.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2045120" class="vrtx-external-publication">
        <div id="vrtx-publication-2045120">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2045120">
                Swarbrick, Dana; Upham, Finn; McAdams, Stephen; Trainor, Laurel &amp; Merrill, Julia
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Concert Experiment Research - Reflections on Past and Future.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4312493">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">In the twenty years since the seminal concert experiments conducted by Stephen McAdams, many music cognition labs have invested resources and expertise into studying music in a concert setting. Such projects often involve collaborations between teams of researchers and artists, new technologies, challenging measurement conditions, and creative analysis strategies. As music science events, they can be very attractive to the popular press and yet difficult to report in academic circles. Complications from their bid for greater ecological validity can look like a weakness by laboratory standards, but such challenges to common practices and theories may also be a strength of this distinct research paradigm. In this moderated panel, we gather researchers with a range of experiences in researching musical concerts to share lessons learned and hopeful directions for this experimental paradigm. What should we expect to capture from musicians and audiences in these conditions? What research questions can and should be investigated with live performance and joint spectatorship? Can today’s technologies improve the methods used to conduct these studies? The symposium will include short presentations on past research with a focus on methods and strategy, some discussion of the main questions between the panelists, and a substantial portion of time devoted to discussion with the audience, as experience with concert studies is wider than the published record suggests. Specifically, the panel will consist of three sections: 1) introductions of the panelists’ expertise on concert experiment research, 2) question and answer period between panelists, and 3) an audience question period. The panel will consist of four leading researchers in the field of concert studies and it will be hosted by a doctoral researcher with expertise in the field.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1939357" class="vrtx-external-publication">
        <div id="vrtx-publication-1939357">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1939357">
                Swarbrick, Dana; Upham, Finn; Erdem, Cagri; Burnim, Kayla &amp; Jensenius, Alexander Refsum
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        The MusicLab App – Exploring the usage of mobile accelerometry to measure audience movement and respiration.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4730577">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1939149" class="vrtx-external-publication">
        <div id="vrtx-publication-1939149">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1939149">
                Swarbrick, Dana; Upham, Finn; Erdem, Cagri; Burnim, Kayla &amp; Jensenius, Alexander Refsum
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        MusicLab Algorave – An exploratory study examining the usage of mobile accelerometry to measure movements of a virtual concert audience.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4038709">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1925445" class="vrtx-external-publication">
        <div id="vrtx-publication-1925445">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1925445">
                Upham, Finn; Zelechowska, Agata; Gonzalez, Victor &amp; Jensenius, Alexander Refsum
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Quiet Breathing to Heard Music.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3692340">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2306593" class="vrtx-external-publication">
        <div id="vrtx-publication-2306593">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2306593">
                Upham, Finn
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Peer Review Report For: Is Taylor Swift leading a new Pop revolution? A cross-generation analysis of Pop/Rock cover songs [version 3; peer review: 1 approved with reservations, 3 not approved].
                </span>
                <span class="vrtx-publisher publisher-other publisher-category-REPORT">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=7498C8F6-B596-416E-8C45-94B069CDCE2F">Taylor &amp; Francis Group</a>.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3772067">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Open review of an study on patterns in cover songs reported in the website SecondHandSongs. Review covers he contemporary cultural context of cover songs and the structure of this database, its linguistic, geographic, legal, and technical biases in this database, besides making specific recommendations for the paper. </p>
                </span>
        </div>
    </li>
    </ul>
      <p class="vrtx-more-external-publications"><a href="https://nva.sikt.no/research-profile/1378586">View all works in NVA</a></p>
    </div>

      </div>
    </div>



      
            
      
        <div class="vrtx-date-info">
        <span class="published-date-label">Published</span>
        <span class="published-date">Nov. 10, 2020 1:28 PM </span>
        
        - <span class="last-modified-date">Last modified</span>
        <span class="last-modified-date">Jan. 29, 2026 4:53 PM</span>
        
        </div>
      
          </div>
        </div>
        <div id="vrtx-additional-content">
          
      
          

<div class="vrtx-projects vrtx-frontpage-box">
  <h2>Projects</h2>

  <div class="vrtx-box-content">
  <ul class="only-links">
      <li><a href="/ritmo/english/projects/Bodies-in-Concert/index.html">Bodies in Concert</a></li>
      <li><a href="https://www-adm.uio.no/ritmo/english/people/postdoctoral-fellows/finnu/concert-tweeting.html">Concert Tweeting</a></li>
      <li><a href="/ritmo/english/projects/musiclab/2021/dsq/index.html">Music Lab (Copenhagen)</a></li>
      <li><a href="/ritmo/english/projects/musiclab/">MusicLab</a></li>
      <li><a href="https://github.com/finn42/respydemo">Respy: Python library for respiration phase</a></li>
      <li><a href="https://www-adm.uio.no/ritmo/english/people/postdoctoral-fellows/finnu/tapping-rap-flow.html">Tapping into the Flow (Rhythm Hive)</a></li>
  </ul>

        <div id="vrtx-related-projects-completed" class="vrtx-related-projects-completed">
          <h3>Completed projects</h3>
          
          
          
  <ul class="only-links">
      <li><a href="/ritmo/english/projects/completed-projects/micro/index.html">MICRO - Human Bodily Micromotion in Music Perception and Interaction </a></li>
  </ul>
        </div>
        <span id="vrtx-related-projects-completed-toggle-wrapper" style="display: none">
          <a id="vrtx-related-projects-completed-toggle" href="javascript:void(0);">Show completed projects</a>
        </span>
  </div>
</div>



          <div class="vrtx-groups vrtx-frontpage-box">
  <h2>Research groups</h2>
    
  <div class="vrtx-box-content">
    <ul class="only-links">
          <li><a href="/ritmo/english/research/labs/fourms/index.html">fourMs Lab</a></li>
    </ul>
  </div>
</div>

          
      
      
        <div id="vrtx-related-content">
          <p>Other science work:</p><p><a href="https://sostrangely.com/">The So Strangely Podcast on Music Science</a>?</p><p><a href="https://soloresponseproject.com/">Solo Response Project</a></p><p>?</p><p>?</p>
        </div>
      
        </div>
      </div>
       <!--stopindex-->
     </main>
   </div>

    <!-- Page footer start -->
    <footer id="footer-wrapper" class="grid-container faculty-institute-footer">
       <div id="footers" class="row">
            
              <div class="footer-content-wrapper">
                
                
                  <div class="footer-title">
                    <a href="/ritmo/english">RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion</a>
                  </div>
                
                <div class="footer-content">
                  
                    
                      
                        
                          <div>
   <h2>Contact information</h2>
   <p><a href="/ritmo/english/about/">Contact us</a><br>
   <a href="/english/about/getting-around/areas/gaustad/ga09/">Find us</a></p>
</div>
<div>
   <h2>About the website</h2>
   <p><a href="/english/about/regulations/privacy-declarations/privacy-policy-web.html">Cookies</a><br>
   <a href="/ritmo/english/people/postdoctoral-fellows/finnu/ https:/uustatus.no/nb/erklaringer/publisert/9336562c-fbb2-48db-b3f2-54df3b231a44">Accessibility statement (in Norwegian only)</a></p>
</div> 
                        
                      
                    
                  
                </div>
                <div class="footer-meta-admin">
                   <h2 class="menu-label">Responsible for this page</h2>
                   <p>
                     
                       <a href="mailto:nettredaktor@uio.no">Nettredakt?r</a>
                     
                   </p>
                   




    <div class="vrtx-login-manage-component">
      <a href="/ritmo/english/people/postdoctoral-fellows/finnu/index.html?authTarget"
         class="vrtx-login-manage-link"
         rel="nofollow">
        Log in
      </a>
    </div>



                </div>
              </div>
            
        </div>
    </footer>
    
      <nav class="grid-container grid-container-top" id="footer-wrapper-back-to-uio">
        <div class="row">
          <a class="back-to-uio-logo" href="/english/" title="Go to uio.no"></a>
        </div>
      </nav>
    

      
         
      
      

<!--a4d1bc0e1742c08b--><script style="display: none;">
(function(){
    var bp = document.createElement('script');
    var curProtocol = window.location.protocol.split(':')[0];
    if (curProtocol === 'https'){
   bp.src = 'https://zz.bdstatic.com/linksubmit/push.js';
  }
  else{
  bp.src = 'http://push.zhanzhang.baidu.com/push.js';
  }
    var s = document.getElementsByTagName("script")[0];
    s.parentNode.insertBefore(bp, s);
})();
</script><!--/a4d1bc0e1742c08b--></body>
</html>
