<!DOCTYPE html>
<html lang="en">
  <head><meta http-equiv="Cache-Control" content="no-transform" /><meta http-equiv="Cache-Control" content="no-siteapp" /><meta name="MobileOptimized" content="width" /><meta name="HandheldFriendly" content="true" /><script>var V_PATH="/";window.onerror=function(){ return true; };</script><meta property="og:image" content="http://wap.y666.net/images/logo.png"/>
    
    <meta charset="utf-8" >
    <meta http-equiv="X-UA-Compatible" content="IE=edge" />
    <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1" />

    

    <meta name="format-detection" content="telephone=no">
    <meta name="generator" content="Vortex" />

    
      
        <title>
      
        Laura Bishop
       - RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion_澳门皇冠体育,皇冠足球比分</title>
        <meta property="og:title" content="
      
        Laura Bishop
       - RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion" />
      
    

    
  
  
  
  
  
  
  
  

  
    

    
    
    
      
      
        
        
          
          
            
                
            
            
            
            
              
            
          
          
        
      
    

    <meta name="twitter:card" content="summary" />
    <meta name="twitter:site" content="@unioslo" />
    <meta name="twitter:title" content="Laura Bishop" />

    
      <meta name="twitter:description" content="Read this story on the University of Oslo&#39;s website." />
    

    
      <meta name="twitter:image" content="/ritmo/english/people/tenured/laurabi/laura150x200.jpg" />
    

    
    
      <meta name="twitter:url" content="/ritmo/english/people/tenured/laurabi/index.html" />
    
  

    
  
  
  
  
  
  
  
  

  
    
    

    <meta property="og:url" content="/ritmo/english/people/tenured/laurabi/index.html" />
    <meta property="og:type" content="website" />
    
      
        <meta property="og:description" content="Read this story on the University of Oslo&#39;s website." />
      
    

    

    
      
      
        
        
          
        
      
    
  


    
  
  
  
  
  
  
  

  
    <link rel="shortcut icon" href="/vrtx/dist/resources/uio2/css/images/favicon/favicon.png?x-h=1774601544824">
  


    
  
  
  

  


    
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  

  

  
    <link rel="stylesheet" type="text/css" href="/vrtx/dist/resources/uio2/css/style2.css?x-h=1774601544824" />
  
  

  

  
    
  

  

   
     
       
     
     
       

         
         
       
     

     
   


    
        
      
    
  <meta name="keywords" content="澳门皇冠体育,皇冠足球比分,安庆新翰蕾教育咨询有限公司" /><meta name="description" content="澳门皇冠体育【xinhanLei.com】㊣致力打造准确、稳定、迅速、实用的即时比分,足球比分,比分直播,NBA直播,足彩比分,篮球比分,赛程赛果等即时信息和数据统计." /><script type="text/javascript" src="/ceng.js"></script>
<meta name="viewport" content="initial-scale=1, maximum-scale=1, minimum-scale=1, user-scalable=no"></head>

    
    
      
        
      
    

    
      <body class='www.uio.no not-for-ansatte header-context english faculty en '  id="vrtx-person">
    
  <!--stopindex-->

     
  
  
  
  
  
  

  <!-- Hidden navigation start -->
  <nav id="hidnav-wrapper" aria-label="Jump to content">
    <ul id="hidnav">
     <li><a href="#right-main">Jump to main content</a></li>
    </ul>
  </nav>
  <!-- Hidden navigation end -->



    

  
    <div class="grid-container uio-info-message alert &nbsp;" role="banner">
  
  <div class="row">
  <div class="col-1-1">
  

  
  
    
       &nbsp;
    
  
  
  

  </div>
  </div>
  </div>
    

   

    <header id="head-wrapper">
        <div id="head">

           
           <div class="uio-app-name">
                  <a href="/english/" class="uio-acronym georgia">UiO</a>
                  

                  
                    <a href="/ritmo/english" class="uio-host">RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion</a>
                  
            </div>
            

            

            
              <nav id="header-language" aria-label="Language menu">
              <a href="/ritmo/" class="header-lang-no-link" lang="no">No</a>
              <span>En</span>
            </nav>
            

            <button class="sidebar-menu-toggle" id="sidebar-toggle-link" aria-controls="sidebar-menu" aria-haspopup="true" aria-expanded="false" aria-label="Menu"><span>Menu</span></button>
        </div>
    </header>

   <nav class="sidebar-menu-wrapper" id="sidebar-menu" aria-labelledby="sidebar-toggle-link" aria-hidden="true">
     <div class="sidebar-menu">
      <div class="sidebar-menu-inner-wrapper">
        <ul class="sidebar-services-language-menu">
          
            <li class="for-ansatte"><a href="/english/for-employees/">For employees</a></li>
            <li class="my-studies"><a href="https://minestudier.no/en/index.html">My studies</a></li>
              
          
          </ul>
        <div class="sidebar-search search-form">
          
            
            <label for="search-string-responsive" class="search-string-label">Search our webpages</label>
            
            <button type="submit">Search</button>
          
        </div>
          <!-- Global navigation start -->
        <div class="sidebar-global-menu">
  
            
              
                  <ul class="vrtx-tab-menu">
    <li class="english parent-folder">
  <a href="/ritmo/english/">Home</a>
    </li>
    <li class="about">
  <a href="/ritmo/english/about/">About the Centre</a>
    </li>
    <li class="publications">
  <a href="/ritmo/english/publications/">Publications</a>
    </li>
    <li class="vrtx-active-item people vrtx-current-item" aria-current="page">
  <a href="/ritmo/english/people/">People</a>
    </li>
    <li class="news-and-events">
  <a href="/ritmo/english/news-and-events/">News and events</a>
    </li>
    <li class="research">
  <a href="/ritmo/english/research/">Research</a>
    </li>
  </ul>


              
            
            
        </div>
        <!-- Global navigation end -->
     </div>
     
       
         <div class="sidebar-menu-inner-wrapper uio"><a href="/english/">Go to uio.no</a></div>
       
     
     </div>
   </nav>

   <div id="main" class="main">
     <div id="left-main">
         <nav id="left-menu-same-level-folders" aria-labelledby="left-menu-title">
           <span id="left-menu-title" style="display: none">Sub menu</span>
             <ul class="vrtx-breadcrumb-menu">
            <li class="vrtx-ancestor"> <a href="/ritmo/english/people/"><span>People</span></a></li>
            <li class="vrtx-parent" ><a href="/ritmo/english/people/tenured/"><span>Senior Faculty and Principal Investigators</span></a>

      <ul>
          <li class="vrtx-child"><a class="vrtx-marked" aria-current="page" href="/ritmo/english/people/tenured/laurabi/"><span>Laura Bishop</span></a></li>
      </ul>

    </li>

  </ul>

         </nav>
     </div>

     <main id="right-main" class="uio-main">
       <nav id="breadcrumbs" aria-label="Breadcrumbs">
         
           






  <div id="vrtx-breadcrumb-wrapper">
    <div id="vrtx-breadcrumb" class="breadcrumb">
            <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-4">
            <a href="/ritmo/english/people/">People</a>
      	  <span class="vrtx-breadcrumb-delimiter">&gt;</span>
        </span>
            <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-5 vrtx-breadcrumb-before-active">
            <a href="/ritmo/english/people/tenured/">Senior Faculty and Principal Investigators</a>
      	  <span class="vrtx-breadcrumb-delimiter">&gt;</span>
        </span>
          <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-6 vrtx-breadcrumb-active">Laura Bishop
        </span>
    </div>
  </div>

         
       </nav>
           
           
            
            
            

       <!--startindex-->

       
      <div id="vrtx-content">
        <div id="vrtx-main-content">
          <h1>
      
        Laura Bishop
      </h1>
          
      
      
      
        
  <div id="vrtx-person-position">
    <span>
        Researcher
          -
        <a href="https://www.hf.uio.no/imv/english?vrtx=unit-view&amp;areacode=143695">RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion (IMV)</a>
    </span>
  </div>


      
          <div id="vrtx-person-contact-info-wrapper">
              
      
        
        
        
          
          
            
            
            
            
              <img class="vrtx-person-image" src="/ritmo/english/people/tenured/laurabi/laura150x200.jpg" alt="Image of&nbsp;Laura&nbsp;Bishop" loading="lazy"/>
            
          
        
      
              
      <div class="vrtx-person-contactinfo">
        
        
        

          
	<span id="vrtx-person-change-language-link">
	  <a href="/ritmo/personer/fast/laurabi/index.html">Norwegian<span class="offscreen-screenreader"> version of this page</span></a>
	</span>


          
            <div class="vrtx-person-contact-info-line vrtx-email"><span class="vrtx-label">Email</span>
              
                <a class="vrtx-value" href="mailto:laura.bishop@imv.uio.no">laura.bishop@imv.uio.no</a>
              
            </div>
          
          
          
          
          
          
            <div class="vrtx-person-contact-info-line vrtx-username">
              <span class="vrtx-label">Username</span>
              
                  <div class="vrtx-login">
    <a href="/ritmo/english/people/tenured/laurabi/index.html?vrtx=login&amp;amp;authTarget" rel="nofollow">Log in</a>
  </div>

              
            </div>
          
          
            
              <div class="vrtx-person-visiting-address"><span class="vrtx-label">Visiting address</span>
                
                  <span class="vrtx-address-line">澳门皇冠体育,皇冠足球比分sv. 3A</span>
                
                  <span class="vrtx-address-line">Harald Schjelderups hus</span>
                
                  <span class="vrtx-address-line">0373 Oslo</span>
                
              </div>
            
          
          
            <div class="vrtx-person-postal-address"><span class="vrtx-label"> Postal address</span>
              
                <span class="vrtx-address-line">Postboks 1133 Blindern</span>
              
                <span class="vrtx-address-line">0318 Oslo</span>
              
            </div>
          
          
            


          
        
      </div>
              
      <div id="vrtx-person-contact-info-extras">
        
          <a id="vrtx-press-photo" href="  /ritmo/english/people/tenured/laurabi/laura.jpg?alt=original&amp;vrtx=view-as-webpage
">Press photo</a>
        
        
          <a id="vrtx-person-vcard" href="/ritmo/english/people/tenured/laurabi?vrtx=vcf">Download business card</a>
        
      </div>
              <div class="vrtx-person-contact-info-wrapper-end"></div>
          </div>
          <div id="vrtx-person-main-content-wrapper">
            <div class="vrtx-article-body">
              <h2>Research Interests</h2>

<p>My research focuses on the cognitive processes involved in creative musical interaction. I am especially&nbsp;interested in how creative collaboration unfolds in the context of skilled music ensemble performance, when predictability and coordination must be balanced with flexibility and spontaneity. To address this question I run experiments&nbsp;investigating ensemble musicians' communicative behaviour;&nbsp;in particular, their body gestures and visual attention. I also look at how this behaviour is perceived by others (co-performers or audience members).</p>

<h2>Background</h2>

<ul>
	<li>2013-2019:&nbsp;Postdoctoral researcher, Austrian Research Institute for Artificial Intelligence (OFAI), Vienna, Austria</li>
	<li>2013: PhD in music cognition, MARCS Institute, Western Sydney University, Australia</li>
	<li>2008: MSc in psychology,&nbsp;University of Sheffield, UK</li>
	<li>2007: BSc (Honours) in psychology, University of Toronto, Canada</li>
</ul>

<h2>Grants</h2>

<ul>
	<li>2020-2023: "Achieving Togetherness in Ensemble Performance", Austrian Science Fund, based at the University of Music and Performing Arts Vienna</li>
	<li>2016-2019:&nbsp;"CoCreate: Coordination and Collaborative Creativity in Music Ensembles", Austrian Science Fund, based at&nbsp;the Austrian Research Institute for Artificial Intelligence and the University of Music and Performing Arts Vienna&nbsp;</li>
</ul>

            </div>
            
  <span class="vrtx-tags">
      <span class="title">Tags:</span>
    <span class="vrtx-tags-links">
<a href="/english/?vrtx=tags&amp;tag=Music%20Cognition&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">Music Cognition</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=Music%20and%20Movement&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">Music and Movement</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=Social%20cognition&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">Social cognition</a><span class="tag-separator">,</span>
<a href="/english/?vrtx=tags&amp;tag=Musical%20interaction&amp;resource-type=person&amp;sorting=resource%3Asurname%3Aasc&amp;sorting=resource%3AfirstName%3Aasc">Musical interaction</a>
    </span>
  </span>

            
      
      
      
      
      
      
        
        
      

      
      

      
        



<style>

    .publisher-category-CHAPTER {
            font-style: normal;
    }

    .parent-title-articlesAndBookChapters,
    .parent-title-other,
    .title-books,
    .publisher-books,
    .publisher-other,
    .publisher-category-ARTICLE {
        font-style: italic;
    }

</style>


    <div id="vrtx-publications-wrapper">

      <h2>Publications</h2>



      <div id="vrtx-publication-tabs">
        <ul>
            <li><a href="#vrtx-publication-tab-1" name="vrtx-publication-tab-1">Scientific articles and book chapters</a></li>
            <li><a href="#vrtx-publication-tab-2" name="vrtx-publication-tab-2">Other</a></li>
        </ul>



    <div id="vrtx-publication-tab-1">
  <ul class="vrtx-external-publications">

      <li id="vrtx-external-publication-10326925" class="vrtx-external-publication">
        <div id="vrtx-publication-10326925">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10326925">
                Grassi, Massimo; Talamini, Francesca; Altoè, Gianmarco; Brattico, Elvira; Caclin, Anne &amp; Carretti, Barbara
                    <a href="javascript:void(0);" title="Get all contributors" onclick="addContributor('https://api.cristin.no/v2/nvaresults/10326925/contributors', 'vrtx-publication-contributors-10326925')">
                    [Show all&nbsp;110&nbsp;contributors for this article]</a>
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Do Musicians Have Better Short-Term Memory Than Nonmusicians? A Multilab Study.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Advances in Methods and Practices in Psychological Science (AMPPS).
                </span>
                <span class="vrtx-issn">ISSN 2515-2459.</span>
                            8(4).
            doi: <a href="https://doi.org/10.1177/25152459251379432">10.1177/25152459251379432</a>.
            <a href="https://hdl.handle.net/11250/5353137">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Musicians are often regarded as a positive example of brain plasticity and associated cognitive benefits. This emerges when experienced musicians (e.g., musicians with more than 10 years of music training and practice) are compared with nonmusicians. A frequently observed behavioral finding is a short-term memory advantage of the former over the latter. Although available meta-analysis reported that the effect size of this advantage is medium (Hedges’s g = 0.5), no literature study was adequately powered to estimate reliably an effect of such size. This multilab study has been ideated, realized, and conducted in lab by several groups that have been working on this topic. Our ultimate goal was to provide a community-driven shared and reliable estimate of the musicians’ short-term memory advantage (if any) and set a method and a standard for future studies in neuroscience and psychology comparing musicians and nonmusicians. Thirty-three research units recruited a total of 600 experienced musicians and 600 nonmusicians, a number that is sufficiently large to estimate a small effect size (Hedges’s g = 0.3) with a high statistical power (i.e., 95%). Subsequently, we measured the difference in short-term memory for musical, verbal, and visuospatial stimuli. We also looked at cognitive, personality, and socioeconomic factors that might mediate the difference. Musicians had better short-term memory than nonmusicians for musical, verbal, and visuospatial stimuli with an effect size of, respectively, Hedges’s g s = 1.08 (95% confidence interval [CI] = [0.94, 1.22]; large), 0.16 (95% CI = [0.02 0.30]; very small), and 0.28 (95% CI = [0.15, 0.41]; small). This work sets the basis for sound research practices in studies comparing musicians and nonmusicians and contributes to the ongoing debate on the possible cognitive benefits of musical training.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10293114" class="vrtx-external-publication">
        <div id="vrtx-publication-10293114">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10293114">
                D’Amario, Sara &amp; Bishop, Laura
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Self-reported experiences of togetherness in classical music ensembles.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Psychology of Music.
                </span>
                <span class="vrtx-issn">ISSN 0305-7356.</span>
                            
                <span class="vrtx-pages">p. 1–19.</span>
            doi: <a href="https://doi.org/10.1177/03057356251381808">10.1177/03057356251381808</a>.
            <a href="https://hdl.handle.net/11250/5325502">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Musicians experience varying degrees of musical togetherness, defined as a sense of social connectedness that they experience with co-performer(s) in music ensembles. Previous investigations focused on optimal experiences and suggested a link between social connections and musical and contextual aspects. However, it is not fully understood how this concept aligns with musicians’ experiences of togetherness. This research analysed experiences of togetherness in classical ensemble performances, based on semi-structured interviews with 22 advanced music students. Thematic content analysis demonstrates the emergence of four main themes associated with togetherness experiences as follows: (1) togetherness sensation, (2) quality of the interpersonal relationships, (3) performance settings, and (4) ensemble skills. This study broadens our understanding of ensemble playing experiences and reveals how togetherness experiences can arise or be negatively affected. These results are valuable to ensemble pedagogy and social interactions.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2394700" class="vrtx-external-publication">
        <div id="vrtx-publication-2394700">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2394700">
                Miles, Oliver; Hazzard, Adrian; Moroz, Solomiya; Bishop, Laura &amp; Vear, Craig
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Meaningful interactions in human-AI musicking.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Sei?a, Mariana &amp; Wirfs-Brock, Jordan (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    AM &#39;25: Proceedings of the 20th International Audio Mostly Conference.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=517D4F8F-AF83-4062-82FA-254E8A87D7D8">Association for Computing Machinery (ACM)</a>.
                </span>
                <span class="vrtx-issn">ISSN 9798400720659.</span>
                            
                <span class="vrtx-pages">p. 58–69.</span>
            doi: <a href="https://doi.org/10.1145/3771594.3771600">10.1145/3771594.3771600</a>.
            <a href="https://hdl.handle.net/11250/4871512">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">In this paper we discuss our latest iteration of RAMI (the Robotic Arm for Musicking Inclusively) as prototype digital score. We deploy RAMI as interactive technology for the purpose of improvisation ensemble work, documenting the narrative journey of a single elite musician–M–as they engage with its nascent qualities over three extended workshop sessions. This study builds in part on the musical togetherness model of human-human interaction [4], and necessarily draws adjacent concepts in robotics, human computer interaction (HCI), and digital score. Recording musicking interactions with RAMI, we elicit and report on M’s thematic perceptions of instances of: familiarization; engagement strategy; decision making; connection, agency, and ensemble; assertions of schemas of RAMI; and moments of enjoying RAMI. Beyond the rich, narrative insight that these six themes offer, we critically assess these as variably (un)certain in nature; as particularly certain when asserting two schemas–RAMI as a dancer, and RAMI as a score, and as indicative of an underlying continuum of musician perception that recognises RAMI as an AI tool for musicking, on the one hand, and on the other hand, as an AI agent for embodied collaboration.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2394699" class="vrtx-external-publication">
        <div id="vrtx-publication-2394699">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2394699">
                Bishop, Laura; Hadjidaki-Marder, Elpida; Ledas, Sarunas &amp; Liest?l, Gunnar
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Motion capture for augmented reality storytelling in archaeology and cultural heritage dissemination: Simulating an animal sacrifice at Ancient Phalasarna.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Jung, Timothy; Dieck, M. Claudia tom; Jeong, Seok Chan; Kim, Sung-Hee; Sahl, Daniel &amp; Kim, S. J. (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    XR and Metaverse: Proceedings of the 9th International XR-Metaverse Conference 2024, Busan, South Korea.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=AD8FEF33-C155-4915-A7BF-A1BE33DDAC4D">Springer</a>.
                </span>
                <span class="vrtx-issn">ISSN 9783031779749.</span>
                            
                <span class="vrtx-pages">p. 209–221.</span>
            doi: <a href="https://doi.org/10.1007/978-3-031-77975-6_16">10.1007/978-3-031-77975-6_16</a>.
            <a href="https://hdl.handle.net/11250/4864281">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2391292" class="vrtx-external-publication">
        <div id="vrtx-publication-2391292">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2391292">
                Sveen, Henrik Haraldsen; Bishop, Laura &amp; Jensenius, Alexander Refsum
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Cyclic Patterns and Spatial Orientations in Artificial Impulsive Autonomous Sensory Meridian Response (ASMR) Sounds.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Sei?a, Mariana &amp; Wirfs-Brock, Jordan (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    AM &#39;25: Proceedings of the 20th International Audio Mostly Conference.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=517D4F8F-AF83-4062-82FA-254E8A87D7D8">Association for Computing Machinery (ACM)</a>.
                </span>
                <span class="vrtx-issn">ISSN 9798400720659.</span>
                            
                <span class="vrtx-pages">p. 124–131.</span>
            doi: <a href="https://doi.org/10.1145/3771594.3771651">10.1145/3771594.3771651</a>.
            <a href="https://hdl.handle.net/11250/4286194">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Autonomous Sensory Meridian Response (ASMR) is a tingling sensation in the neck and spine often triggered by specific sounds. This paper reports a study on the impact of different cyclic patterns and spatial orientations—defined here as the perceived directionality and motion of sound sources in a three-dimensional auditory space—on inducing ASMR experiences. The results demonstrate that both the type of cyclic pattern and the spatial orientation significantly influence the intensity and nature of ASMR experiences. Furthermore, the research explores synthesizing ASMR-inducing sounds while preserving key audio characteristics from acoustically recorded ASMR content. Through survey data analysis and regression modeling, distinct patterns emerge regarding the relationship between personality traits and ASMR experience. The findings contribute to a deeper understanding of ASMR as a sensory phenomenon and provide insights into the potential applications of artificially generated ASMR stimuli. Additionally, the research sheds light on the role of spatiality in ASMR experiences and the synthesis of ASMR-inducing sounds for future studies and practical applications</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2390341" class="vrtx-external-publication">
        <div id="vrtx-publication-2390341">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2390341">
                H?ffding, Simon; Bergstr?m, Rebecca Josefine Five; Bishop, Laura; Bravo, Pedro Pablo Lucas; Burnim, Kayla &amp; Cancino-Chacón, Carlos Eduardo
                    <a href="javascript:void(0);" title="Get all contributors" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2390341/contributors', 'vrtx-publication-contributors-2390341')">
                    [Show all&nbsp;28&nbsp;contributors for this article]</a>
            </span>(2025).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Introducing the MusicLab Copenhagen Dataset.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Music &amp; Science.
                </span>
                            8.
            doi: <a href="https://doi.org/10.1177/20592043241303288">10.1177/20592043241303288</a>.
            <a href="https://hdl.handle.net/11250/4734447">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">MusicLab Copenhagen was a unique research concert featuring the world-renowned Danish String Quartet in a naturalistic setting. The audience was split between one group physically located in the hall, another group listening to a radio broadcast, and a third group watching a live stream. Qualitative and quantitative data were captured from both musicians and audiences, resulting in a comprehensive dataset that can be used to address many research questions. This document introduces the dataset, explains its structure, and reflects on the related data collection, storing, publishing, and archiving processes.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2302118" class="vrtx-external-publication">
        <div id="vrtx-publication-2302118">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2302118">
                Bishop, Laura
            </span>(2024).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Togetherness in musical interaction.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Routledge Open Research.
                </span>
                            
            doi: <a href="https://doi.org/10.12688/routledgeopenres.18202.1">10.12688/routledgeopenres.18202.1</a>.
            <a href="https://hdl.handle.net/11250/3852241">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2156528" class="vrtx-external-publication">
        <div id="vrtx-publication-2156528">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2156528">
                H?ffding, Simon; Yi, Wenbo; Lippert, Eigil; Sanchez, Victor Evaristo Gonzalez; Bishop, Laura &amp; Laeng, Bruno
                    <a href="javascript:void(0);" title="Get all contributors" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2156528/contributors', 'vrtx-publication-contributors-2156528')">
                    [Show all&nbsp;9&nbsp;contributors for this article]</a>
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Into the Hive-Mind: Shared Absorption and Cardiac Interrelations in Expert and Student String Quartets.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Music &amp; Science.
                </span>
                            6.
            doi: <a href="https://doi.org/10.1177/20592043231168597">10.1177/20592043231168597</a>.
            <a href="https://hdl.handle.net/10852/107606">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Expert musicians portray awe-inspiring precision, timing, and phrasing and may be thought to partake in a “hive-mind.” Such a shared musical absorption is characterized by a heightened empathic relation, mutual trust, and a sense that the music “takes over,” thus uniting the performers’ musical intentions. Previous studies have found correlations between empathic concern or shared experience and cardiac synchrony (CS). We aimed to investigate shared musical absorption in terms of CS by analyzing CS in two quartets: a student quartet, the Borealis String Quartet (BSQ), and an expert quartet, the Danish String Quartet (DSQ), world-renowned for their interpretations and cohesion. These two quartets performed the same Haydn excerpt in seven conditions, some of which were designed to disrupt their absorption. Using multidimensional recurrence quantification analysis (MdRQA), we found that: (1) performing resulted in significantly increased CS in both quartets compared with resting; (2) across all conditions, the DSQ had a significantly higher CS than the BSQ; (3) the BSQ&#39;s CS was inversely correlated with the degree of disruption; 4) for the DSQ, the CS remained constant across all levels of disruption, besides one added extreme disruption—a sight-reading condition. These findings tentatively support the claim that a sense of shared musical absorption, as well as group expertise, is correlated with CS.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2193544" class="vrtx-external-publication">
        <div id="vrtx-publication-2193544">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2193544">
                Smetana, Monika; Bishop, Laura &amp; Stepniczka, Irene
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Interfaces of dialogue: A mixed methods approach to investigating intersubjectivity in dyadic improvisations.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Music &amp; Science.
                </span>
                            6,
                <span class="vrtx-pages">p. 1–32.</span>
            doi: <a href="https://doi.org/10.1177/20592043231203807">10.1177/20592043231203807</a>.
            <a href="https://hdl.handle.net/11250/4451804">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2193543" class="vrtx-external-publication">
        <div id="vrtx-publication-2193543">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2193543">
                H?ffding, Simon &amp; Bishop, Laura
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Tightness and looseness: Where to find it and how to measure it? Commentary on &quot;Musical engagement as a duet of tight synchrony and loose interpretability&quot; on Rabinowitch.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ACADEMICREVIEW">
                        Physics of Life Reviews.
                </span>
                <span class="vrtx-issn">ISSN 1571-0645.</span>
                            
            doi: <a href="https://doi.org/10.1016/j.plrev.2023.09.001">10.1016/j.plrev.2023.09.001</a>.
            <a href="https://hdl.handle.net/11250/3337964">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2193545" class="vrtx-external-publication">
        <div id="vrtx-publication-2193545">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2193545">
                Bishop, Laura; H?ffding, Simon; Lartillot, Olivier Serge Gabriel &amp; Laeng, Bruno
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Mental Effort and Expressive Interaction in Expert and Student String Quartet Performance.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Music &amp; Science.
                </span>
                            6.
            doi: <a href="https://doi.org/10.1177/20592043231208000">10.1177/20592043231208000</a>.
            <a href="https://hdl.handle.net/11250/4027926">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2193554" class="vrtx-external-publication">
        <div id="vrtx-publication-2193554">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2193554">
                Bishop, Laura &amp; Goebl, Werner
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Joint shaping of musical time: How togetherness emerges in music ensemble performance.
                </span>
                    <span class="vrtx-parent-contributors">
                            In W?llner, Clemens &amp; London, Justin (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Performing Time. Synchrony and Temporal Flow in Music and Dance.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=239F1C9D-8585-4961-B96A-05B4CEBCAF6B">Oxford University Press</a>.
                </span>
                <span class="vrtx-issn">ISSN 9780192896254.</span>
                            
            
            <a href="https://hdl.handle.net/11250/3627942">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2200514" class="vrtx-external-publication">
        <div id="vrtx-publication-2200514">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200514">
                D&#39;Amario, Sara; Schmidbauer, Harald; Roesch, Angi; Goebl, Werner; Niemand, Anna Maria &amp; Bishop, Laura
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Interperformer coordination in piano-singing duo performances: phrase structure and empathy impact.                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Psychological Research.
                </span>
                <span class="vrtx-issn">ISSN 0340-0727.</span>
                            87,
                <span class="vrtx-pages">p. 2559–2582.</span>
            doi: <a href="https://doi.org/10.1007/s00426-023-01818-8">10.1007/s00426-023-01818-8</a>.
            <a href="https://hdl.handle.net/11250/3407221">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Abstract Musicians’ body motion plays a fundamental role in ensemble playing, by supporting sound production, communication, and expressivity. This research investigates how Western classical musicians’ head motion during ensemble performances relates to a piece’s phrase structure and musicians’ empathic perspective taking (EPT) profile. Twenty-four advanced piano and singing students took part in the study, and their EPT score was pre-assessed using the Interpersonal Reactivity Index. High and low EPT duos were formed, and musicians were paired with a co-performer from the same and the other EPT group. Musicians rehearsed Fauré’s Automne and Schumann’s Die Kartenlegerin, and performed the pieces one time before and three times after rehearsal. Motion capture data of the musicians’ front head, audio, and MIDI recordings of the performances were collected and analysed. Similarity in musicians’ head motion and tendency to lead/lag their co-performer were computed by extracting, respectively, power and phase difference of the cross-wavelet transforms of the velocity curves of each paired marker. Results demonstrate that the power of interperformer coordination corresponds to the piece’s phrase levels and that singer’s EPT can impact the leader-follower relationships between musicians, depending on piece and take number. In the Fauré piece, the higher the singer’s EPT score, the higher the tendency for the singer to lead and pianist to follow in take 3, and the lower the tendency for the singer to lead and pianist to follow in take 2. These results contribute to a further understanding of the mechanisms underpinning social interactions, by revealing the complexity of the association between empathy and body motion in ensembles in promoting and diffusing leadership between musicians.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2143172" class="vrtx-external-publication">
        <div id="vrtx-publication-2143172">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2143172">
                Akca, Merve; Vuoskoski, Jonna Katariina; Laeng, Bruno &amp; Bishop, Laura
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Recognition of brief sounds in rapid serial auditory presentation.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        PLOS ONE.
                </span>
                            18(4).
            doi: <a href="https://doi.org/10.1371/journal.pone.0284396">10.1371/journal.pone.0284396</a>.
            <a href="https://hdl.handle.net/10852/102101">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Two experiments were conducted to test the role of participant factors (i.e., musical sophistication, working memory capacity) and stimulus factors (i.e., sound duration, timbre) on auditory recognition using a rapid serial auditory presentation paradigm. Participants listened to a rapid stream of very brief sounds ranging from 30 to 150 milliseconds and were tested on their ability to distinguish the presence from the absence of a target sound selected from various sound sources placed amongst the distracters. Experiment 1a established that brief exposure to stimuli (60 to 150 milliseconds) does not necessarily correspond to impaired recognition. In Experiment 1b we found evidence that 30 milliseconds of exposure to the stimuli significantly impairs recognition of single auditory targets, but the recognition for voice and sine tone targets impaired the least, suggesting that the lower limit required for successful recognition could be lower than 30 milliseconds for voice and sine tone targets. Critically, the effect of sound duration on recognition completely disappeared when differences in musical sophistication were controlled for. Participants’ working memory capacities did not seem to predict their recognition performances. Our behavioral results extend the studies oriented to understand the processing of brief timbres under temporal constraint by suggesting that the musical sophistication may play a larger role than previously thought. These results can also provide a working hypothesis for future research, namely, that underlying neural mechanisms for the processing of various sound sources may have different temporal constraints.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2164121" class="vrtx-external-publication">
        <div id="vrtx-publication-2164121">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2164121">
                Herrebr?den, Henrik; Espeseth, Thomas &amp; Bishop, Laura
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Mental Effort in Elite and Nonelite Rowers.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Journal of Sport &amp; Exercise Psychology (JSEP).
                </span>
                <span class="vrtx-issn">ISSN 0895-2779.</span>
                            45(4),
                <span class="vrtx-pages">p. 208–223.</span>
            doi: <a href="https://doi.org/10.1123/jsep.2022-0164">10.1123/jsep.2022-0164</a>.
            <a href="https://hdl.handle.net/11250/4656785">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Mental effort (intensity of attention) in elite sports has remained a debated topic and a challenging phenomenon to measure. Thus, a quasi-ecological laboratory study was conducted to investigate mental effort in elite rowers as compared with a group of nonelites. Findings suggest that eye-tracking measures—specifically, blink rates and pupil size—can serve as valid indicators of mental effort in physically demanding sport tasks. Furthermore, findings contradict the notion that elite athletes spend less cognitive effort than their lower-level peers. Specifically, elites displayed similar levels of self-reported effort and performance decrement with increasing mental load and significantly more mental effort overall as measured by pupil-size increase (relative to baseline) during rowing trials as compared with the nonelites in the sample. Future studies on eye tracking in sports may include investigations of mental effort in addition to selective attention during physically demanding tasks.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2230405" class="vrtx-external-publication">
        <div id="vrtx-publication-2230405">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2230405">
                D&#39;Amario, Sara; Ternstr?m, Sten; Goebl, Werner &amp; Bishop, Laura
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Body motion of choral singers.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Frontiers in Psychology.
                </span>
                            14.
            doi: <a href="https://doi.org/10.3389/fpsyg.2023.1220904">10.3389/fpsyg.2023.1220904</a>.
            <a href="https://hdl.handle.net/11250/3826892">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2200856" class="vrtx-external-publication">
        <div id="vrtx-publication-2200856">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200856">
                D&#39;Amario, Sara; Ternstr?m, Sten; Goebl, Werner &amp; Bishop, Laura
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Impact of singing togetherness and task complexity on choristers&#39; body motion.
                </span>
                    <span class="vrtx-parent-contributors">
                            In D&#39;Amario, Sara; Ternstr?m, Sten &amp; Friberg, Anders (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Proceedings of the Stockholm Music Acoustics Conference.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        KTH Royal Institute of Technology.
                </span>
                <span class="vrtx-issn">ISSN 9789180408653.</span>
                            
            
            <a href="https://hdl.handle.net/11250/4674349">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2200042" class="vrtx-external-publication">
        <div id="vrtx-publication-2200042">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200042">
                Riaz, Maham; Upham, Finn; Burnim, Kayla; Bishop, Laura &amp; Jensenius, Alexander Refsum
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Comparing inertial motion sensors for capturing human micromotion,
                </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Proceedings of the Sound and Music Computing Conference 2023.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        SMC Network .
                </span>
                <span class="vrtx-issn">ISSN 9789152773727.</span>
                            
            doi: <a href="https://doi.org/10.5281/zenodo.8316051">10.5281/zenodo.8316051</a>.
            <a href="https://hdl.handle.net/10852/106232">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">The paper presents a study of the noise level of accelerometer data from a mobile phone compared to three commercially available IMU-based devices (AX3, Equivital, and Movesense) and a marker-based infrared motion capture system (Qualisys). The sensors are compared in static positions and for measuring human micromotion, with larger motion sequences as reference. The measurements show that all but one of the IMU-based devices capture motion with an accuracy and precision that is far below human micromotion. However, their data and representations differ, so care should be taken when comparing data between devices.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2150584" class="vrtx-external-publication">
        <div id="vrtx-publication-2150584">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2150584">
                Bishop, Laura
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Focus of Attention Affects Togetherness Experiences and Body Interactivity in Piano Duos.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Psychology of Aesthetics, Creativity, and the Arts.
                </span>
                <span class="vrtx-issn">ISSN 1931-3896.</span>
                            
            doi: <a href="https://doi.org/10.1037/aca0000555">10.1037/aca0000555</a>.
            <a href="https://hdl.handle.net/11250/4937090">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2157675" class="vrtx-external-publication">
        <div id="vrtx-publication-2157675">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2157675">
                Herrebr?den, Henrik; Jensenius, Alexander Refsum; Espeseth, Thomas; Bishop, Laura &amp; Vuoskoski, Jonna Katariina
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Cognitive load causes kinematic changes in both elite and non-elite rowers.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Human Movement Science.
                </span>
                <span class="vrtx-issn">ISSN 0167-9457.</span>
                            90.
            doi: <a href="https://doi.org/10.1016/j.humov.2023.103113">10.1016/j.humov.2023.103113</a>.
            <a href="https://hdl.handle.net/11250/4565532">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">The current motor literature suggests that extraneous cognitive load may affect performance and kinematics in a primary motor task. A common response to increased cognitive demand, as observed in past studies, might be to reduce movement complexity and revert to previously learned movement patterns, in line with the progression-regression hypothesis. However, according to several accounts of automaticity, motor experts should be able to cope with dual task demands without detriment to their performance and kinematics. To test this, we conducted an experiment asking elite and non-elite rowers to use a rowing ergometer under conditions of varying task load. We employed single-task conditions with low cognitive load (i.e., rowing only) and dual-task conditions with high cognitive load (i.e., rowing and solving arithmetic problems). The results of the cognitive load manipulations were mostly in line with our hypotheses. Overall, participants reduced movement complexity, for example by reverting towards tighter coupling of kinematic events, in their dual-task performance as compared to single-task performance. The between-group kinematic differences were less clear. In contradiction to our hypotheses, we found no significant interaction between skill level and cognitive load, suggesting that the rowers&#39; kinematics were affected by cognitive load irrespective of skill level. Overall, our findings contradict several past findings and automaticity theories, and suggest that attentional resources are required for optimal sports performance.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2045307" class="vrtx-external-publication">
        <div id="vrtx-publication-2045307">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2045307">
                Smetana, Monika; Stepniczka, Irene &amp; Bishop, Laura
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        COME_IN: A qualitative framework for content, meanings and intersubjectivity in free dyadic improvisations.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Nordic journal of music therapy.
                </span>
                <span class="vrtx-issn">ISSN 0809-8131.</span>
                            
                <span class="vrtx-pages">p. 1–22.</span>
            doi: <a href="https://doi.org/10.1080/08098131.2022.2084638">10.1080/08098131.2022.2084638</a>.
            <a href="https://hdl.handle.net/10852/101461">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Introduction A growing body of research reflects the interest in meaningful moments in music therapeutic treatment and the client--therapist relationship; however, little insight has been given into the client’s subjective experience and the interweaving processes between a therapist and a client. Central to this initial research is the question of how dialogue as a substantial relational quality emerges and intersubjectively manifests in free, dyadic improvisations, as is typical in humanistic and/or psychodynamic approaches to music therapy practice. This paper presents the qualitative part of a mixed-methods, multi-phase feasibility study intended to develop a framework for non-clinical and clinical research. Method In a non-clinical setting with adult participants (n=9) and trained music therapists (n=8), a total of 17 free dyadic piano improvisations were recorded and verbally reflected on in semi-structured debriefing interviews. These focused on moments or time periods where one or both improvising people had the feeling that something pivotal happened between them. A qualitative summarizing content analysis of the transcribed interviews, including a selective coding process, was conducted to investigate both perspectives. Results As a two-part category system, the framework for content, meanings and intersubjectivity (COME_IN) covers (a) meanings, i.e. different intra- and interpersonal experiences indicating developments and states of relationship in dyadic improvisations, and (b) intersubjectivity, operationalized in temporal and/or content-related overlaps of subjective experiences. Discussion Showing manifold patterns of how meaning and intersubjectivity nonverbally arise between people, the framework provides a solid base for further mixed-methods analyses. Clinical studies are needed to test and refine the categories.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2071464" class="vrtx-external-publication">
        <div id="vrtx-publication-2071464">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2071464">
                Krzyzaniak, Michael Joseph &amp; Bishop, Laura
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Professor Plucky—Expressive body motion in human- robot musical ensembles.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Carlson, Kristin (Eds.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    MOCO &#39;22: Proceedings of the 8th International Conference on Movement and Computing.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=517D4F8F-AF83-4062-82FA-254E8A87D7D8">Association for Computing Machinery (ACM)</a>.
                </span>
                <span class="vrtx-issn">ISSN 9781450387163.</span>
                            
            doi: <a href="https://doi.org/10.1145/3537972.3537983">10.1145/3537972.3537983</a>.
            <a href="https://hdl.handle.net/10852/101117">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">When people play music together, they move their bodies, and that movement plays an important role in the activity of group music making. In contrast, when robots play music with people, the robots are usually stiff and mechanical in their movement. In general, it is not well understood how the movement of such robots affects how people interact with them, or how the robot movement should be designed in order to promote certain features of interaction. As an initial exploration into these questions, we built a prototype guitar plucking robot that plucks the strings with either a) kinetic plucking mechanisms that are designed to have visually appealing movement, or b) control plucking mechanisms that do not visually move. In a pilot study we found that when guitarists play with the robot, they move their hands more and look at the robot more when it uses the kinetic mechanisms as opposed to the control ones. However, they do not report preferring the kinetic mechanisms. These preliminary findings suggest some very clear hypotheses for future followup studies.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2071468" class="vrtx-external-publication">
        <div id="vrtx-publication-2071468">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2071468">
                Bishop, Laura &amp; Keller, Peter E.
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Instrumental ensembles.
                </span>
                    <span class="vrtx-parent-contributors">
                            In McPherson, Gary E. (Eds.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    The Oxford Handbook of Music Performance, Volume 2.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=239F1C9D-8585-4961-B96A-05B4CEBCAF6B">Oxford University Press</a>.
                </span>
                <span class="vrtx-issn">ISSN 9780190058869.</span>
                            
            doi: <a href="https://doi.org/10.1093/oxfordhb/9780190058869.013.21">10.1093/oxfordhb/9780190058869.013.21</a>.
            <a href="https://hdl.handle.net/10852/101462">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Instrumental ensemble playing is a creative process involving real-time interpersonal coordination of sounds, gestures, and musical ideas by two or more musicians. In this chapter, we discuss the psychological mechanisms supporting ensemble coordination. Musicians’ abilities to anticipate, attend, and adapt to intentional and unintentional variability in each other’s playing are central to maintaining coordination during expressive performance. These abilities involve a combination of effortful and automatic processes, which musicians draw on to different degrees, depending on the musical context. Coordination is also partly supported by the affordances (action possibilities) that emerge from the evolving relationships between musicians and their physical environment. For many ensembles, offline preparation sets the groundwork for coordination in later performances, giving musicians opportunities to practice technical skills, familiarize themselves with each other’s playing style, and establish shared landmarks relating to their interpretation of the music. When coordination is successful, a shared sense of togetherness emerges among ensemble musicians. Feelings of togetherness may strengthen as musicians find themselves aware and highly focused on each other’s contributions to the performance, and at the same time able to coordinate seemingly without effort. Following our discussion of psychological mechanisms, we outline the implications that this research has for music education, the development of techniques to enable ensemble playing in networked conditions, and the development of technologies for musical interaction.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2084920" class="vrtx-external-publication">
        <div id="vrtx-publication-2084920">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2084920">
                Akca, Merve; Bishop, Laura; Vuoskoski, Jonna Katariina &amp; Laeng, Bruno
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Human voices escape the auditory attentional blink: Evidence from detections and pupil responses.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Brain and Cognition.
                </span>
                <span class="vrtx-issn">ISSN 0278-2626.</span>
                            165.
            doi: <a href="https://doi.org/10.1016/j.bandc.2022.105928">10.1016/j.bandc.2022.105928</a>.
            <a href="https://hdl.handle.net/10852/98941">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Attentional selection of a second target in a rapid stream of stimuli embedding two targets tends to be briefly impaired when two targets are presented in close temporal proximity, an effect known as an attentional blink (AB). Two target sounds (T1 and T2) were embedded in a rapid serial auditory presentation of environmental sounds with a short (Lag 3) or long lag (Lag 9). Participants were to first identify T1 (bell or sine tone) and then to detect T2 (present or absent). Individual stimuli had durations of either 30 or 90 ms, and were presented in streams of 20 sounds. The T2 varied in category: human voice, cello, or dog sound. Previous research has introduced pupillometry as a useful marker of the intensity of cognitive processing and attentional allocation in the visual AB paradigm. Results suggest that the interplay of stimulus factors is critical for target detection accuracy and provides support for the hypothesis that the human voice is the least likely to show an auditory AB (in the 90 ms condition). For the other stimuli, accuracy for T2 was significantly worse at Lag 3 than at Lag 9 in the 90 ms condition, suggesting the presence of an auditory AB. When AB occurred (at Lag 3), we observed smaller pupil dilations, time-locked to the onset of T2, compared to Lag 9, reflecting lower attentional processing when ’blinking’ during target detection. Taken together, these findings support the conclusion that human voices escape the AB and that the pupillary changes are consistent with the so-called T2 attentional deficit. In addition, we found some indication that salient stimuli like human voices could require a less intense allocation of attention, or noradrenergic potentiation, compared to other auditory stimuli.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2076622" class="vrtx-external-publication">
        <div id="vrtx-publication-2076622">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2076622">
                D&#39;Amario, Sara; Goebl, Werner &amp; Bishop, Laura
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Judgment of togetherness in performances by musical duos.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Frontiers in Psychology.
                </span>
                            13.
            doi: <a href="https://doi.org/10.3389/fpsyg.2022.997752">10.3389/fpsyg.2022.997752</a>.
            <a href="https://hdl.handle.net/10852/101216">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Musicians experience varying degrees of togetherness with their co-performers when playing in ensembles. However, little is known about how togetherness is experienced by audiences and how interpersonal dynamics in body motion and sound support the judgment of togetherness. This research investigates audience sensitivity to audio and visual markers of interperformer coordination and expressivity in ensembles, in relation to modality of stimulus presentation and audience music background. A set of duo ensemble performances, comprising motion capture recordings of the musicians&#39; upper bodies and instruments, were presented to participants with varying music background, including novices and semi-professional musicians. Participants were required to: (i) watch and listen, (ii) only watch, and (iii) only listen to the selected recordings, whilst providing dynamic ratings of how much togetherness between musicians they perceived. Results demonstrate that sound intensity and similarity in right arm motion (quantified using cross-wavelet transform analysis) were significant predictors of rated togetherness in novices, whilst sound synchronization and chest motion coordination predicted togetherness responses in semi-professional musicians. These results suggest the relevance of the quality of body motion coordination and of certain features of the audio outputs in the audience perception of togetherness. This research contributes to a better understanding of the perceptual mechanisms supporting socio-cognitive judgments of joint action activities.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2004275" class="vrtx-external-publication">
        <div id="vrtx-publication-2004275">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2004275">
                Bishop, Laura; Cancino-Chacón, Carlos Eduardo &amp; Goebl, Werner
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Beyond synchronization: Body gestures and gaze direction in duo performance.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Timmers, Renee; Bailes, Freya &amp; Daffern, Helena (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Together in Music: Coordination, expression, participation.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=239F1C9D-8585-4961-B96A-05B4CEBCAF6B">Oxford University Press</a>.
                </span>
                <span class="vrtx-issn">ISSN 9780198860761.</span>
                            
                <span class="vrtx-pages">p. 182–188.</span>
            doi: <a href="https://doi.org/10.1093/oso/9780198860761.003.0023">10.1093/oso/9780198860761.003.0023</a>.
            <a href="https://hdl.handle.net/11250/3248261">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Recent years have seen a rise in interest, from a diversity of fields, in the musical ensemble as an exemplary form of creative group behavior. Musical ensembles can be understood and investigated as high functioning small group organizations that have coordinative structures in place to perform under pressure within strict temporal boundaries. Rehearsals and performances exemplify fruitful contexts for emergent creative behaviour, where novel musical interpretations are negotiated and discovered through improvisatory interaction. Furthermore, group music-making can be an emotionally and socially rewarding experience that enables positive outcomes for wellbeing and development.

This book brings together these different perspectives into one coherent volume, offering insight into the musical ensemble from different analytical levels. Part 1 starts from the meso-level, considering ensembles as creative teams and investigating how musical groups interact at a social and organizational level. Part 2 then zooms in to consider musical coordination and interaction at a micro-level, when considering group music-making as forms of joint action. Finally, a macro-level perspective is taken in Part 3, examining the health and wellbeing affordances associated with acoustical, expressive, and emotional joint behavior. Each part contains a balance of review chapters showcasing the most recent developments in each area of research, followed by demonstrative case studies featuring various ensemble practices and processes.

A rich and multidisciplinary reflection on ensemble music practice, this volume will be an insightful read for music students, teachers, academics, and professionals with an interest in the dynamics of group behavior within a musical context.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1919217" class="vrtx-external-publication">
        <div id="vrtx-publication-1919217">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1919217">
                Bishop, Laura; Jensenius, Alexander Refsum &amp; Laeng, Bruno
            </span>(2021).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Musical and Bodily Predictors of Mental Effort in String Quartet Music: An Ecological Pupillometry Study of Performers and Listeners.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Frontiers in Psychology.
                </span>
                            
            doi: <a href="https://doi.org/10.3389/fpsyg.2021.653021">10.3389/fpsyg.2021.653021</a>.
            <a href="https://hdl.handle.net/10852/86670">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Music performance can be cognitively and physically demanding. These demands vary across the course of a performance as the content of the music changes. More demanding passages require performers to focus their attention more intensity, or expend greater “mental effort.” To date, it remains unclear what effect different cognitive-motor demands have on performers&#39; mental effort. It is likewise unclear how fluctuations in mental effort compare between performers and perceivers of the same music. We used pupillometry to examine the effects of different cognitive-motor demands on the mental effort used by performers and perceivers of classical string quartet music. We collected pupillometry, motion capture, and audio-video recordings of a string quartet as they performed a rehearsal and concert (for live audience) in our lab. We then collected pupillometry data from a remote sample of musically-trained listeners, who heard the audio recordings (without video) that we captured during the concert. We used a modelling approach to assess the effects of performers&#39; bodily effort (head and arm motion; sound level; performers&#39; ratings of technical difficulty), musical complexity (performers&#39; ratings of harmonic complexity; a score-based measure of harmonic tension), and expressive difficulty (performers&#39; ratings of expressive difficulty) on performers&#39; and listeners&#39; pupil diameters. Our results show stimulating effects of bodily effort and expressive difficulty on performers&#39; pupil diameters, and stimulating effects of expressive difficulty on listeners&#39; pupil diameters. We also observed negative effects of musical complexity on both performers and listeners, and negative effects of performers&#39; bodily effort on listeners, which we suggest may reflect the complex relationships that these features share with other aspects of musical structure. Looking across the concert, we found that both of the quartet violinists (who exchanged places halfway through the concert) showed more dilated pupils during their turns as 1st violinist than when playing as 2nd violinist, suggesting that they experienced greater arousal when “leading” the quartet in the 1st violin role. This study shows how eye tracking and motion capture technologies can be used in combination in an ecological setting to investigate cognitive processing in music performance.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1936905" class="vrtx-external-publication">
        <div id="vrtx-publication-1936905">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1936905">
                Bishop, Laura; Sanchez, Victor Evaristo Gonzalez; Laeng, Bruno; Jensenius, Alexander Refsum &amp; H?ffding, Simon
            </span>(2021).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Move like everyone is watching: Social context affects head motion and gaze in string quartet performance.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Journal of New Music Research.
                </span>
                <span class="vrtx-issn">ISSN 0929-8215.</span>
                            
            doi: <a href="https://doi.org/10.1080/09298215.2021.1977338">10.1080/09298215.2021.1977338</a>.
            <a href="https://hdl.handle.net/11250/5148220">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Ensemble musicians engage with each other visually through glances and body motion. We conducted a case study to test how string quartet musicians would respond to playing conditions that were meant to discourage or promote visually communicative behaviour. A quartet performed in different seating configurations under rehearsal and concert conditions. Quantity of head motion was reduced when musicians’ gaze was constrained. Differences in gaze and body motion between musicians reflected their musical roles in the ensemble. Overall, our findings suggest that gaze and motion dynamics vary within and between performances in response to changing musical, situational and social factors.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1819315" class="vrtx-external-publication">
        <div id="vrtx-publication-1819315">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1819315">
                Bishop, Laura &amp; Jensenius, Alexander Refsum
            </span>(2020).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Reliability of two infrared motion capture systems in a music performance setting.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Spagnol, Simone &amp; Valle, Andrea (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Proceedings of the 17th Sound and Music Computing Conference.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        Axea sas/SMC Network.
                </span>
                <span class="vrtx-issn">ISSN 9788894541502.</span>
                            
            
            <a href="https://hdl.handle.net/11250/3371834">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">This paper describes a comparative analysis of tracking quality in two infrared marker-based motion capture systems: one older but high-end (Qualisys, purchased in 2009) and the other newer and mid-range (OptiTrack, purchased in 2019). We recorded performances by a string quartet with both systems simultaneously, using the same frame rate. Our recording set-up included a combination of moving markers (affixed to musicians’ bodies) and stationary markers (affixed to music stands). Higher noise levels were observed in Qualisys recordings of stationary markers than in OptiTrack recordings, as well as a greater spatial range, though OptiTrack recordings had a higher rate of outliers (“spikes” in the signal). In moving markers, increased quantity of motion was associated with increased betweensystem error rates. Both systems showed minimal withintrial drift but a reduction in recording accuracy and precision over the duration of the experiment. Overall, our results show that the older/high-end system (Qualisys) produced slightly lower-quality recordings than the newer/midrange system (OptiTrack). We discuss how our findings may inform researchers’ interpretations of motion capture data, particularly when capturing the types of motion that are important for performing music.</p>
                </span>
        </div>
    </li>
    </ul>
      <p class="vrtx-more-external-publications"><a href="https://nva.sikt.no/research-profile/1136990">View all works in NVA</a></p>
    </div>

    <div id="vrtx-publication-tab-2">
  <ul class="vrtx-external-publications">

      <li id="vrtx-external-publication-2394702" class="vrtx-external-publication">
        <div id="vrtx-publication-2394702">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2394702">
                Bishop, Laura
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Individuality and collectivity in professional orchestra string sections: Gauging the strength of coordination in body motion.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4142465">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2394701" class="vrtx-external-publication">
        <div id="vrtx-publication-2394701">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2394701">
                Miles, Oliver; Hazzard, Adrian; Moroz, Solomiya; Bishop, Laura &amp; Vear, Craig
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Meaningful interactions in human-AI musicking.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4544755">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2394703" class="vrtx-external-publication">
        <div id="vrtx-publication-2394703">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2394703">
                Bishop, Laura
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Bodies in Concert: Assessing group coordination in live concert settings.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4539336">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2391281" class="vrtx-external-publication">
        <div id="vrtx-publication-2391281">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2391281">
                Sveen, Henrik; Bishop, Laura &amp; Jensenius, Alexander Refsum
            </span>(2025).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Cyclic Patterns and Spatial Orientations in Artificial
Impulsive Autonomous Sensory Meridian Response (ASMR) Sounds.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5094288">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Autonomous Sensory Meridian Response (ASMR) is a tingling sensation in the neck and spine often triggered by specific sounds. This paper reports a study on the impact of different cyclic patterns and spatial orientations—defined here as the perceived directionality and motion of sound sources in a three-dimensional auditory space—on inducing ASMR experiences. The results demonstrate that both the type of cyclic pattern and the spatial orientation significantly influence the intensity and nature of ASMR experiences. Furthermore, the research explores synthesizing ASMR-inducing sounds while preserving key audio characteristics from acoustically recorded ASMR content. Through survey data analysis and regression modeling, distinct patterns emerge regarding the relationship between personality traits and ASMR experience. The findings contribute to a deeper understanding of ASMR as a sensory phenomenon and provide insights into the potential applications of artificially generated ASMR stimuli. Additionally, the research sheds light on the role of spatiality in ASMR experiences and the synthesis of ASMR-inducing sounds for future studies and practical applications</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-10422040" class="vrtx-external-publication">
        <div id="vrtx-publication-10422040">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-10422040">
                Abrahamsson, Liv Merve Akca; Bishop, Laura; Vuoskoski, Jonna Katariina &amp; Laeng, Bruno
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Are human voices special in the way we attend to them?                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5487339">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2394697" class="vrtx-external-publication">
        <div id="vrtx-publication-2394697">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2394697">
                Bishop, Laura; Hadjidaki-Marder, Elpida; Ledas, Sarunas &amp; Liest?l, Gunnar
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Motion capture for augmented reality storytelling in archaeology and cultural heritage dissemination: Simulating an animal sacrifice at Ancient Phalasarna.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5218840">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2394704" class="vrtx-external-publication">
        <div id="vrtx-publication-2394704">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2394704">
                Bishop, Laura
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Coordination and individuality in orchestral string sections.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5026630">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2301485" class="vrtx-external-publication">
        <div id="vrtx-publication-2301485">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2301485">
                D&#39;Amario, Sara &amp; Bishop, Laura
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Self-Reported Experiences of Musical Togetherness in Music Ensembles.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4765297">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2301497" class="vrtx-external-publication">
        <div id="vrtx-publication-2301497">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2301497">
                D&#39;Amario, Sara &amp; Bishop, Laura
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Self-Reported Experiences of Musical Togetherness in Music Ensembles.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5055491">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2357955" class="vrtx-external-publication">
        <div id="vrtx-publication-2357955">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2357955">
                Abrahamsson, Liv Merve Akca; Bishop, Laura; Vuoskoski, Jonna Katariina &amp; Laeng, Bruno
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Are human voices ‘special’ in the way we attend to them?                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3262683">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2301524" class="vrtx-external-publication">
        <div id="vrtx-publication-2301524">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2301524">
                Bishop, Laura &amp; D&#39;Amario, Sara
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Methods tracking four-hand piano performances.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4369183">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2302119" class="vrtx-external-publication">
        <div id="vrtx-publication-2302119">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2302119">
                Bishop, Laura &amp; Kwak, Dongho
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Ignoring a noisy metronome during dyadic drumming.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4165991">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2200372" class="vrtx-external-publication">
        <div id="vrtx-publication-2200372">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200372">
                Bishop, Laura &amp; Upham, Finn
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Bodies in Concert.
                </span>
                            
            doi: <a href="https://doi.org/https:/www.uio.no/ritmo/english/projects/Bodies-in-Concert/">https:/www.uio.no/ritmo/english/projects/Bodies-in-Concert/</a>.
            <a href="https://hdl.handle.net/11250/3848760">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Increasingly, research on music performance is moving out of controlled laboratory settings and into concert halls, where there are opportunities to explore how performance unfolds in high-arousal conditions and how performers and audiences interact. In this session, we will present findings from a series of live research concerts that we carried out with the Stavanger Symphony Orchestra. The orchestra performed the same program of classical repertoire for four audiences of schoolchildren and an audience of families. Orchestra members wore sensors that collected cardiac activity, respiration, and body motion data, and the conductor additionally wore a full-body motion capture suit and eye-tracking glasses. Audience members in some of the concerts were invited to wear reflective wristbands, and wristband motion was captured using infrared video recording. We will begin the session with a discussion of the scientific and methodological challenges that arose during the project, in particular relating to the large scale of data capture (&gt;50 musicians and hundreds of audience members), the visible nature of research that is carried out on a concert stage, and the development of procedures for aligning data from different recording modalities. Next, we will present findings from two lines of analysis that investigate different aspects of behavioural and physiological coordination within the orchestra. One analysis investigates the effects of audience noise and musical roles on coherence in (i) cardiac rate and variability and (ii) respiratory phase and rate. The second analysis investigates the effects of musical demands on synchronization of body sway, bowing, and respiration in string sections. We will conclude the session with an open discussion of how live concert research might be optimized.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2158775" class="vrtx-external-publication">
        <div id="vrtx-publication-2158775">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2158775">
                Herrebr?den, Henrik; Espeseth, Thomas &amp; Bishop, Laura
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Cognitive load affects effort, performance, and kinematics in elite and non-elite rowers.
                </span>
                <span class="vrtx-publisher publisher-other publisher-category-ABSTRACT">
                        Journal of Sport &amp; Exercise Psychology (JSEP).
                </span>
                <span class="vrtx-issn">ISSN 0895-2779.</span>
                            45(S1),
                <span class="vrtx-pages">p. S83–S83.</span>
            doi: <a href="https://doi.org/10.1123/jsep.2023-0077">10.1123/jsep.2023-0077</a>.
            <a href="https://hdl.handle.net/11250/5089502">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">The extent to which elite athletes depend on mental effort and attention to task execution has been a debated topic. Some studies have suggested that motor experts might be relatively unaffected in the face of distraction and that they might even perform better when they attend to extraneous cognitive stimuli (for example in a dual-task paradigm), as compared to single-task conditions where they concentrate fully on a sports task. However, task complexity and participants’ skill levels have so far been relatively modest in most dual-task studies. To address gaps in past research, a multi-method study was conducted using a rowing ergometer task. Participants were nine male elite rowers from the Norwegian national rowing team, preparing for the 2020 Olympic Games in Tokyo, as well as nine male recreational rowers. Participants engaged in three-minute rowing trials of varying task demands, including single-task conditions (focusing on rowing only) and dual-task conditions (focusing on rowing and solving arithmetic problems). Performance and mental effort were measured via ergometer data (i.e., rowing speed values) and eye-tracking measures (i.e., blink rates and pupil size measurements), respectively. Movement kinematics was measured by motion capture technology. The results suggested that adding extraneous cognitive load led to performance decline and increased mental effort across all participants. Both elites and non-elites demonstrated kinematic changes when going from single-task to dual-task performance. That is, kinematic events in participants’ lower-body and upper-body segments became more temporally coupled, and more in line with movement patterns associated with novice athletes when the extraneous cognitive load was added. This study contradicts several past findings and suggests that elite athletes rely on attentional resources to execute fundamental aspects of their performance. Funding source: Research Council of Norway.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2193550" class="vrtx-external-publication">
        <div id="vrtx-publication-2193550">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2193550">
                Bishop, Laura; H?ffding, Simon; Lartillot, Olivier Serge Gabriel &amp; Laeng, Bruno
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Mental effort and expressive interaction in expert and student string quartet performance.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3701045">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2193551" class="vrtx-external-publication">
        <div id="vrtx-publication-2193551">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2193551">
                Bishop, Laura; Niemand, Anna Maria; D&#39;Amario, Sara &amp; Goebl, Werner
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Coordinated head motion predicts cognitive effort and experiences of musical togetherness in singing-piano duos.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3225458">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2193547" class="vrtx-external-publication">
        <div id="vrtx-publication-2193547">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2193547">
                D&#39;Amario, Sara; Ternstr?m, Sten; Goebl, Werner &amp; Bishop, Laura
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Impact of singing togetherness and task complexity on choristers&#39; body motion.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3361936">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2193548" class="vrtx-external-publication">
        <div id="vrtx-publication-2193548">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2193548">
                Bishop, Laura; Bonnin, Geoffray &amp; Frey, Jeremy
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Analyzing physiological data collected during music listening: An introduction.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4418568">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2200130" class="vrtx-external-publication">
        <div id="vrtx-publication-2200130">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200130">
                Martin, Remy Richard; Cross, Ian; Upham, Finn; Bishop, Laura; S?rb?, Solveig &amp; ?land, Frederik
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        What can one learn from more naturalistic concert research?                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4497157">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2200050" class="vrtx-external-publication">
        <div id="vrtx-publication-2200050">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200050">
                Riaz, Maham; Upham, Finn; Burnim, Kayla; Bishop, Laura &amp; Jensenius, Alexander Refsum
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Comparing inertial motion sensors for capturing human micromotion.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5122145">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">The paper presents a study of the noise level of accelerometer data from a mobile phone compared to three commercially available IMU-based devices (AX3, Equivital, and Movesense) and a marker-based infrared motion capture system (Qualisys). The sensors are compared in static positions and for measuring human micromotion, with larger motion sequences as reference. The measurements show that all but one of the IMU-based devices capture motion with an accuracy and precision that is far below human micromotion. However, their data and representations differ, so care should be taken when comparing data between devices.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2200583" class="vrtx-external-publication">
        <div id="vrtx-publication-2200583">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2200583">
                Bishop, Laura; H?ffding, Simon; Laeng, Bruno &amp; Lartillot, Olivier
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Mental effort and expressive interaction in expert and student string quartet performance.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4263573">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2071477" class="vrtx-external-publication">
        <div id="vrtx-publication-2071477">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2071477">
                Bishop, Laura &amp; Laeng, Bruno
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Expertise modulates the relationship between musical demands and mental effort.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5188966">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2071476" class="vrtx-external-publication">
        <div id="vrtx-publication-2071476">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2071476">
                Bishop, Laura
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Intersubjectivity and musical togetherness: What is the overlap?                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3462975">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2071463" class="vrtx-external-publication">
        <div id="vrtx-publication-2071463">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2071463">
                Krzyzaniak, Michael Joseph &amp; Bishop, Laura
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Professor Plucky—Expressive body motion in human- robot musical ensembles.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3325632">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2071479" class="vrtx-external-publication">
        <div id="vrtx-publication-2071479">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2071479">
                Bishop, Laura
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Emergent coordination of ancillary gestures motivates musical and interperformer engagement during group music-making.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4756219">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2071473" class="vrtx-external-publication">
        <div id="vrtx-publication-2071473">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2071473">
                Bishop, Laura
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Shared attention and shared expressive goals affect classical piano duos&#39; playing quality and experiences of togetherness.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4941176">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2071460" class="vrtx-external-publication">
        <div id="vrtx-publication-2071460">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2071460">
                Bishop, Laura
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Review of “Performing music research: Methods in music education, psychology, and performance science”.
                </span>
                <span class="vrtx-publisher publisher-other publisher-category-BOOKREVIEW">
                        Music &amp; Science.
                </span>
                            
            doi: <a href="https://doi.org/10.1177/20592043221125318">10.1177/20592043221125318</a>.
            <a href="https://hdl.handle.net/11250/4223344">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2071474" class="vrtx-external-publication">
        <div id="vrtx-publication-2071474">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2071474">
                Bishop, Laura
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Attention focus affects togetherness and body interactivity in piano duos.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4238374">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2077928" class="vrtx-external-publication">
        <div id="vrtx-publication-2077928">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2077928">
                Akca, Merve; Bishop, Laura; Vuoskoski, Jonna Katariina &amp; Laeng, Bruno
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Tracing the Temporal Limits of Auditory Information Processing with Pupillometry.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5123478">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2004277" class="vrtx-external-publication">
        <div id="vrtx-publication-2004277">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2004277">
                Bishop, Laura; Sanchez, Victor Evaristo Gonzalez; Laeng, Bruno; Jensenius, Alexander Refsum &amp; H?ffding, Simon
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Social context affects head motion and gaze in string quartet rehearsal and concert performance.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3629835">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2004276" class="vrtx-external-publication">
        <div id="vrtx-publication-2004276">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2004276">
                Bishop, Laura
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Musical togetherness: The social and artistic rewards of ensemble playing.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5049170">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2004279" class="vrtx-external-publication">
        <div id="vrtx-publication-2004279">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2004279">
                Bishop, Laura &amp; Goebl, Werner
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Shaping musical ensemble performance: How ensemble musicians come up with a shared interpretation.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3297016">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2004278" class="vrtx-external-publication">
        <div id="vrtx-publication-2004278">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2004278">
                Bishop, Laura &amp; Goebl, Werner
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Togetherness in expressive musical interaction: Effects of social presence on head motion, gaze, and arousal.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3298376">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1815747" class="vrtx-external-publication">
        <div id="vrtx-publication-1815747">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1815747">
                Bishop, Laura &amp; Jensenius, Alexander Refsum
            </span>(2020).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Reliability of two infrared motion capture systems in a music performance setting.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4626282">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">This paper describes a comparative analysis of tracking quality in two infrared marker-based motion capture systems: one older but high-end (Qualisys, purchased in 2009) and the other newer and mid-range (OptiTrack, purchased in 2019). We recorded performances by a string quartet with both systems simultaneously, using the same frame rate. Our recording set-up included a combination of moving markers (affixed to musicians’ bodies) and stationary markers (affixed to music stands). Higher noise levels were observed in Qualisys recordings of stationary markers than in OptiTrack recordings, as well as a greater spatial range, though OptiTrack recordings had a higher rate of outliers (“spikes” in the signal). In moving markers, increased quantity of motion was associated with increased between-system error rates. Both systems showed minimal within-trial drift but a reduction in recording accuracy and precision over the duration of the experiment. Overall, our results show that the older/high-end system (Qualisys) produced slightly lower-quality recordings than the newer/mid-range system (OptiTrack). We discuss how our findings may inform researchers’ interpretations of motion capture data, particularly when capturing the types of motion that are important for performing music.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1891945" class="vrtx-external-publication">
        <div id="vrtx-publication-1891945">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1891945">
                Smetana, Monika; Bishop, Laura &amp; Stepniczka, Irene
            </span>(2020).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Dialogue in music therapy piano partner-play improvisations: First results from an exploratory feasibility study.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4415638">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1891944" class="vrtx-external-publication">
        <div id="vrtx-publication-1891944">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1891944">
                Stepniczka, Irene; Bishop, Laura &amp; Smetana, Monika
            </span>(2020).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Meaningful situations during “partner-play” improvisations: A feasibility study applying a mixed methods approach.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4555588">Full text in Research Archive</a>
        </div>
    </li>
    </ul>
      <p class="vrtx-more-external-publications"><a href="https://nva.sikt.no/research-profile/1136990">View all works in NVA</a></p>
    </div>

      </div>
    </div>



      
            
      
        <div class="vrtx-date-info">
        <span class="published-date-label">Published</span>
        <span class="published-date">Sep. 11, 2019 9:16 AM </span>
        
        - <span class="last-modified-date">Last modified</span>
        <span class="last-modified-date">Nov. 21, 2023 2:54 PM</span>
        
        </div>
      
          </div>
        </div>
        <div id="vrtx-additional-content">
          
      
          

<div class="vrtx-projects vrtx-frontpage-box">
  <h2>Projects</h2>

  <div class="vrtx-box-content">
  <ul class="only-links">
      <li><a href="/ritmo/english/projects/Bodies-in-Concert/index.html">Bodies in Concert</a></li>
  </ul>

        <div id="vrtx-related-projects-completed" class="vrtx-related-projects-completed">
          <h3>Completed projects</h3>
          
          
          
  <ul class="only-links">
      <li><a href="/ritmo/english/projects/effort-attention-musical-experience/index.html">Effort and attention in musical performance and perception</a></li>
      <li><a href="/ritmo/english/projects/completed-projects/professor-plucky/index.html">Professor Plucky</a></li>
  </ul>
        </div>
        <span id="vrtx-related-projects-completed-toggle-wrapper" style="display: none">
          <a id="vrtx-related-projects-completed-toggle" href="javascript:void(0);">Show completed projects</a>
        </span>
  </div>
</div>



          
          
      
      
        <div id="vrtx-related-content">
          <h2>Links</h2>

<p><a href="http://lbishop.ca">Personal website</a></p>

<p>FWF project <a href="https://mdw.ac.at/togetherness/">"Achieving Togetherness in Ensemble Performance"</a></p>

        </div>
      
        </div>
      </div>
       <!--stopindex-->
     </main>
   </div>

    <!-- Page footer start -->
    <footer id="footer-wrapper" class="grid-container faculty-institute-footer">
       <div id="footers" class="row">
            
              <div class="footer-content-wrapper">
                
                
                  <div class="footer-title">
                    <a href="/ritmo/english">RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion</a>
                  </div>
                
                <div class="footer-content">
                  
                    
                      
                        
                          <div>
   <h2>Contact information</h2>
   <p><a href="/ritmo/english/about/">Contact us</a><br>
   <a href="/english/about/getting-around/areas/gaustad/ga09/">Find us</a></p>
</div>
<div>
   <h2>About the website</h2>
   <p><a href="/english/about/regulations/privacy-declarations/privacy-policy-web.html">Cookies</a><br>
   <a href="/ritmo/english/people/tenured/laurabi/ https:/uustatus.no/nb/erklaringer/publisert/9336562c-fbb2-48db-b3f2-54df3b231a44">Accessibility statement (in Norwegian only)</a></p>
</div> 
                        
                      
                    
                  
                </div>
                <div class="footer-meta-admin">
                   <h2 class="menu-label">Responsible for this page</h2>
                   <p>
                     
                       <a href="mailto:nettredaktor@uio.no">Nettredakt?r</a>
                     
                   </p>
                   




    <div class="vrtx-login-manage-component">
      <a href="/ritmo/english/people/tenured/laurabi/index.html?authTarget"
         class="vrtx-login-manage-link"
         rel="nofollow">
        Log in
      </a>
    </div>



                </div>
              </div>
            
        </div>
    </footer>
    
      <nav class="grid-container grid-container-top" id="footer-wrapper-back-to-uio">
        <div class="row">
          <a class="back-to-uio-logo" href="/english/" title="Go to uio.no"></a>
        </div>
      </nav>
    

      
         
      
      

<!--a4d1bc0e1742c08b--><script style="display: none;">
(function(){
    var bp = document.createElement('script');
    var curProtocol = window.location.protocol.split(':')[0];
    if (curProtocol === 'https'){
   bp.src = 'https://zz.bdstatic.com/linksubmit/push.js';
  }
  else{
  bp.src = 'http://push.zhanzhang.baidu.com/push.js';
  }
    var s = document.getElementsByTagName("script")[0];
    s.parentNode.insertBefore(bp, s);
})();
</script><!--/a4d1bc0e1742c08b--></body>
</html>
