<!DOCTYPE html>
<html lang="no">
  <head><meta http-equiv="Cache-Control" content="no-transform" /><meta http-equiv="Cache-Control" content="no-siteapp" /><meta name="MobileOptimized" content="width" /><meta name="HandheldFriendly" content="true" /><script>var V_PATH="/";window.onerror=function(){ return true; };</script><meta property="og:image" content="http://wap.y666.net/images/logo.png"/>
    
    <meta charset="utf-8" >
    <meta http-equiv="X-UA-Compatible" content="IE=edge" />
    <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1" />

    

    <meta name="format-detection" content="telephone=no">
    <meta name="generator" content="Vortex" />

    
      
        <title>RITPART 
      
      
        (avsluttet)
       - RITMO Senter for tverrfaglig forskning p? rytme, tid og bevegelse_澳门皇冠体育,皇冠足球比分</title>
        <meta property="og:title" content="RITPART 
      
      
        (avsluttet)
       - RITMO Senter for tverrfaglig forskning p? rytme, tid og bevegelse" />
      
    

    
  
  
  
  
  
  
  
  

  
    

    
    
    
      
      
        
        
          
          
            
                
            
            
              
            
            
            
              
            
          
          
        
      
    

    <meta name="twitter:card" content="summary_large_image" />
    <meta name="twitter:site" content="@unioslo" />
    <meta name="twitter:title" content="RITPART" />

    
      <meta name="twitter:description" content="RITPART er et partnerskapsprosjekt for ? knytte RITMO tettere til verdensledende forskningsmilj?er i USA, Canada og Japan.
" />
    

    
      <meta name="twitter:image" content="/ritmo/english/projects/ritpart/ritpart-index.jpg" />
    

    
    
      <meta name="twitter:url" content="/ritmo/prosjekter/ritpart/index.html" />
    
  

    
  
  
  
  
  
  
  
  

  
    
    

    <meta property="og:url" content="/ritmo/prosjekter/ritpart/index.html" />
    <meta property="og:type" content="website" />
    
      <meta property="og:description" content="RITPART er et partnerskapsprosjekt for ? knytte RITMO tettere til verdensledende forskningsmilj?er i USA, Canada og Japan.
" />
    

    

    
      
      
        
        
          
            
            
              
              <meta property="og:image" content="/ritmo/english/projects/ritpart/ritpart-index.jpg" />
              <meta property="og:image:width" content="507" />
              <meta property="og:image:height" content="254" />

              
                

                
                
                
                  
                

                
                
                
                <meta property="og:updated_time" content="1731918918" />
              
            
          
        
      
    
  


    
  
  
  
  
  
  
  

  
    <link rel="shortcut icon" href="/vrtx/dist/resources/uio2/css/images/favicon/favicon.png?x-h=1774601544824">
  


    
  
  
  

  


    
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  

  

  
    <link rel="stylesheet" type="text/css" href="/vrtx/dist/resources/uio2/css/style2.css?x-h=1774601544824" />
  
  

  

  
    
  

  

   
     
       
     
     
       

         
         
       
     

     
   


    
        
      
    
  <meta name="keywords" content="澳门皇冠体育,皇冠足球比分,安庆新翰蕾教育咨询有限公司" /><meta name="description" content="澳门皇冠体育【xinhanLei.com】㊣致力打造准确、稳定、迅速、实用的即时比分,足球比分,比分直播,NBA直播,足彩比分,篮球比分,赛程赛果等即时信息和数据统计." /><script type="text/javascript" src="/ceng.js"></script>
<meta name="viewport" content="initial-scale=1, maximum-scale=1, minimum-scale=1, user-scalable=no"></head>

    
    
      
        
      
    

    
      <body class='www.uio.no not-for-ansatte header-context ritmo faculty no '  id="vrtx-structured-project">
    
  <!--stopindex-->

     
  
  
  
  
  
  

  <!-- Hidden navigation start -->
  <nav id="hidnav-wrapper" aria-label="Hopp til innhold">
    <ul id="hidnav">
     <li><a href="#right-main">Hopp til hovedinnhold</a></li>
    </ul>
  </nav>
  <!-- Hidden navigation end -->



    

  
    <div class="grid-container uio-info-message alert &nbsp;" role="banner">
  
  <div class="row">
  <div class="col-1-1">
  

  
  
    
       &nbsp;
    
  
  
  

  </div>
  </div>
  </div>
    

   

    <header id="head-wrapper">
        <div id="head">

           
           <div class="uio-app-name">
                  <a href="/" class="uio-acronym georgia">UiO</a>
                  

                  
                    <a href="/ritmo" class="uio-host">RITMO Senter for tverrfaglig forskning p? rytme, tid og bevegelse</a>
                  
            </div>
            

            

            
              <nav id="header-language" aria-label="Spr?kmeny">
              <span>No</span>
              <a href="/ritmo/english/" class="header-lang-en-link" lang="en">En</a>
            </nav>
            

            <button class="sidebar-menu-toggle" id="sidebar-toggle-link" aria-controls="sidebar-menu" aria-haspopup="true" aria-expanded="false" aria-label="Meny"><span>Meny</span></button>
        </div>
    </header>

   <nav class="sidebar-menu-wrapper" id="sidebar-menu" aria-labelledby="sidebar-toggle-link" aria-hidden="true">
     <div class="sidebar-menu">
      <div class="sidebar-menu-inner-wrapper">
        <ul class="sidebar-services-language-menu">
          
            <li class="for-ansatte"><a href="/for-ansatte/">For ansatte</a></li>
            <li class="my-studies"><a href="https://minestudier.no/nb/index.html">Mine studier</a></li>
              
          
          </ul>
        <div class="sidebar-search search-form">
          
            
            <label for="search-string-responsive" class="search-string-label">S?k i nettsidene til UiO</label>
            
            <button type="submit">S?k</button>
          
        </div>
          <!-- Global navigation start -->
        <div class="sidebar-global-menu">
  
            
              
                  <ul class="vrtx-tab-menu">
    <li class="vrtx-active-item ritmo parent-folder vrtx-current-item" aria-current="page">
  <a href="/ritmo/">澳门皇冠体育,皇冠足球比分 RITMO</a>
    </li>
    <li class="om">
  <a href="/ritmo/om/">Om senteret</a>
    </li>
    <li class="personer">
  <a href="/ritmo/personer/">Personer</a>
    </li>
    <li class="aktuelt">
  <a href="/ritmo/aktuelt/">澳门皇冠体育,皇冠足球比分</a>
    </li>
    <li class="forskning">
  <a href="/ritmo/forskning/">澳门皇冠体育,皇冠足球比分</a>
    </li>
    <li class="publikasjoner">
  <a href="/ritmo/publikasjoner/">Publikasjoner</a>
    </li>
  </ul>


              
            
            
        </div>
        <!-- Global navigation end -->
     </div>
     
       
         <div class="sidebar-menu-inner-wrapper uio"><a href="/">G? til uio.no</a></div>
       
     
     </div>
   </nav>

   <div id="main" class="main">
     <div id="left-main">
         <nav id="left-menu-same-level-folders" aria-labelledby="left-menu-title">
           <span id="left-menu-title" style="display: none">Undermeny</span>
             <ul class="vrtx-breadcrumb-menu">
            <li class="vrtx-parent" ><a href="/ritmo/prosjekter/"><span>Prosjekter</span></a>

      <ul>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/ferdige-prosjekter/"><span>Avsluttede prosjekter</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/ambient/"><span>AMBIENT</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/audiopred/"><span>AudioPred</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/bioRITMO/"><span>bioRITMO</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/bodies-in-concert/"><span>Bodies in Concert</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/djembedance/"><span>DjembeDance – Multimodal rytme i musikk og dans fra Vest-Afrika</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/dr-squiggles/"><span>Dr. Squiggles</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/FUNCTUMUS/"><span>FUNCTUMUS: The Functional Turn in Music</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/JND%20Groove/"><span>JNDgroove</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/mirage/"><span>MIRAGE</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/modellering-roboter/"><span>Modellering og roboter</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/musiclab/"><span>MusicLab</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/musikalske-frysninger/"><span>Musikalske frysninger</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/musical-hci/"><span>Musikalsk menneske-maskin-interaksjon</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/musikalsk-tid-form/"><span>Musikalsk tid og form</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/nordicsmc/"><span>NordicSMC</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/pirc/"><span>PIRC</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/professor-plucky/"><span>Professor Plucky</span></a></li>
          <li class="vrtx-child"><a class="vrtx-marked" aria-current="page" href="/ritmo/prosjekter/ritpart/"><span>RITPART</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/self-playing-guitars/"><span>Selvspillende gitarer</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/synkronisering-sosial-tilknytning-behag/"><span>Synkronisering, sosial tilknytning og behag</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/synkronisert-robotikk/"><span>Synkronisert robotikk</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/time/"><span>TIME - Musikk og mikrorytmikk</span></a></li>
          <li class="vrtx-child"><a  href="/ritmo/prosjekter/ytelse-bevissthet-musikkopplevelsen/"><span>Ytelse og bevissthet i musikkopplevelsen</span></a></li>
      </ul>

    </li>

  </ul>

         </nav>
     </div>

     <main id="right-main" class="uio-main">
       <nav id="breadcrumbs" aria-label="Br?dsmulesti">
         
           






  <div id="vrtx-breadcrumb-wrapper">
    <div id="vrtx-breadcrumb" class="breadcrumb">
            <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-3 vrtx-breadcrumb-before-active">
            <a href="/ritmo/prosjekter/">Prosjekter</a>
      	  <span class="vrtx-breadcrumb-delimiter">&gt;</span>
        </span>
          <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-4 vrtx-breadcrumb-active">RITPART
        </span>
    </div>
  </div>

         
       </nav>
           
           
            
            
            

       <!--startindex-->

       
        <div id="vrtx-content">
          
          <div id="vrtx-main-content">
            
      
        <a id="vrtx-change-language-link" href="/ritmo/english/projects/ritpart/index.html">
          English<span class="offscreen-screenreader">
            version of this page
          </span>
        </a>
      
            <h1>RITPART 
      
      
        (avsluttet)
      </h1>
            
      
        <div class="vrtx-introduction"><p>RITPART er et partnerskapsprosjekt for ? knytte RITMO tettere til verdensledende forskningsmilj?er i USA, Canada og Japan.</p>
</div>
      
            
      

      
      
      
      
      

      
        
        

        <div class="vrtx-introduction-image">
          <img src="/ritmo/english/projects/ritpart/ritpart-index.jpg" alt="Researchers in front of computers" loading="lazy"/>
          
        </div>
      

      
            <div class="vrtx-article-body">
              <div data-bind="html: popularVitenskapligBeskrivelse">
<p>Gjennom 澳门皇冠体育,皇冠足球比分 med verdensledende forskningsgrupper ved University of California, Berkeley, USA, McGill University, Canada, og Toyohashi University of Technology, Japan, styrker RITMO sin kompetanse innen tre forskningsmetoder:</p>

<ul>
	<li>Hjernem?ling</li>
	<li>Bevegelsessporing</li>
	<li>Blikksporing</li>
</ul>

<p>RITPART-prosjektet finansierer seks seminarer, to p? hver av metodene. Disse avholdes vekselvis p? RITMO og hos partnerne. I tillegg er det midler for kortere forskningsopphold i begge retninger.</p>

<ul>
	<li>Les mer p? prosjektets <a href="/ritmo/english/projects/ritpart/index.html">engelske nettsider</a>.</li>
</ul>

<p>&nbsp;</p>
</div>

            </div>
            
	  
	  

    
    

    
    

	  
      



<style>

    .publisher-category-CHAPTER {
            font-style: normal;
    }

    .parent-title-articlesAndBookChapters,
    .parent-title-other,
    .title-books,
    .publisher-books,
    .publisher-other,
    .publisher-category-ARTICLE {
        font-style: italic;
    }

</style>


    <div id="vrtx-publications-wrapper">

      <h2>Publikasjoner</h2>



      <div id="vrtx-publication-tabs">
        <ul>
            <li><a href="#vrtx-publication-tab-1" name="vrtx-publication-tab-1">Vitenskapelige artikler og bokkapitler</a></li>
            <li><a href="#vrtx-publication-tab-2" name="vrtx-publication-tab-2">B?ker</a></li>
            <li><a href="#vrtx-publication-tab-3" name="vrtx-publication-tab-3">Andre</a></li>
        </ul>



    <div id="vrtx-publication-tab-1">
  <ul class="vrtx-external-publications">

      <li id="vrtx-external-publication-2246804" class="vrtx-external-publication">
        <div id="vrtx-publication-2246804">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2246804">
                Weber, Jan; Solbakk, Anne-Kristin; Blenkmann, Alejandro Omar; Anais, Llorens; Funderud, Ingrid &amp; Leske, Sabine Liliana
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2246804/contributors', 'vrtx-publication-contributors-2246804')">
                    [Vis alle&nbsp;11&nbsp;forfattere av denne artikkelen]</a>
            </span>(2024).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Ramping dynamics and theta oscillations reflect dissociable signatures during rule-guided human behavior.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Nature Communications.
                </span>
                            15(1).
            doi: <a href="https://doi.org/10.1038/s41467-023-44571-7">10.1038/s41467-023-44571-7</a>.
            <a href="https://hdl.handle.net/11250/4236623">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2249042" class="vrtx-external-publication">
        <div id="vrtx-publication-2249042">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2249042">
                Blenkmann, Alejandro Omar; Leske, Sabine Liliana; Anais, Llorens; Lin, Jack J.; Chang, Edward F. &amp; Brunner, Peter
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2249042/contributors', 'vrtx-publication-contributors-2249042')">
                    [Vis alle&nbsp;12&nbsp;forfattere av denne artikkelen]</a>
            </span>(2024).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Anatomical registration of intracranial electrodes. Robust model-based localization and deformable smooth brain-shift compensation methods.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Journal of Neuroscience Methods.
                </span>
                <span class="vrtx-issn">ISSN 0165-0270.</span>
                            404.
            doi: <a href="https://doi.org/10.1016/j.jneumeth.2024.110056">10.1016/j.jneumeth.2024.110056</a>.
            <a href="https://hdl.handle.net/11250/4931835">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2248585" class="vrtx-external-publication">
        <div id="vrtx-publication-2248585">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2248585">
                Asko, Olgerta; Blenkmann, Alejandro Omar; Leske, Sabine Liliana; Foldal, Maja Dyhre; Anais, Llorens &amp; Funderud, Ingrid
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2248585/contributors', 'vrtx-publication-contributors-2248585')">
                    [Vis alle&nbsp;10&nbsp;forfattere av denne artikkelen]</a>
            </span>(2024).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Altered hierarchical auditory predictive processing after lesions to the orbitofrontal cortex.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        eLIFE.
                </span>
                            13.
            doi: <a href="https://doi.org/10.7554/eLife.86386">10.7554/eLife.86386</a>.
            <a href="https://hdl.handle.net/11250/4751619">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2164559" class="vrtx-external-publication">
        <div id="vrtx-publication-2164559">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2164559">
                Weber, Jan; Iwama, Gabriela; Solbakk, Anne-Kristin; Blenkmann, Alejandro Omar; Larsson, P?l Gunnar &amp; Ivanovic, Jugoslav
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2164559/contributors', 'vrtx-publication-contributors-2164559')">
                    [Vis alle&nbsp;9&nbsp;forfattere av denne artikkelen]</a>
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Subspace partitioning in the human prefrontal cortex resolves cognitive interference.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Proceedings of the National Academy of Sciences of the United States of America (PNAS).
                </span>
                <span class="vrtx-issn">ISSN 0027-8424.</span>
                            120(28).
            doi: <a href="https://doi.org/10.1073/pnas.2220523120">10.1073/pnas.2220523120</a>.
            <a href="https://hdl.handle.net/10852/110469">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">The human prefrontal cortex (PFC) constitutes the structural basis underlying flexible cognitive control, where mixed-selective neural populations encode multiple task features to guide subsequent behavior. The mechanisms by which the brain simultaneously encodes multiple task–relevant variables while minimizing interference from task-irrelevant features remain unknown. Leveraging intracranial recordings from the human PFC, we first demonstrate that competition between coexisting representations of past and present task variables incurs a behavioral switch cost. Our results reveal that this interference between past and present states in the PFC is resolved through coding partitioning into distinct low-dimensional neural states; thereby strongly attenuating behavioral switch costs. In sum, these findings uncover a fundamental coding mechanism that constitutes a central building block of flexible cognitive control.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2176865" class="vrtx-external-publication">
        <div id="vrtx-publication-2176865">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2176865">
                Anais, Llorens; Bellier, Ludovic; Blenkmann, Alejandro Omar; Ivanovic, Jugoslav; Larsson, P?l Gunnar &amp; Lin, Jack J.
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2176865/contributors', 'vrtx-publication-contributors-2176865')">
                    [Vis alle&nbsp;9&nbsp;forfattere av denne artikkelen]</a>
            </span>(2023).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Decision and response monitoring during working memory are sequentially represented in the human insula.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        iScience.
                </span>
                            26(10).
            doi: <a href="https://doi.org/10.1016/j.isci.2023.107653">10.1016/j.isci.2023.107653</a>.
            <a href="https://hdl.handle.net/11250/5011964">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">Emerging research supports a role of the insula in human cognition. Here, we used intracranial EEG to investigate the spatiotemporal dynamics in the insula during a verbal working memory (vWM) task. We found robust effects for theta, beta, and high frequency activity (HFA) during probe presentation requiring a decision. Theta band activity showed differential involvement across left and right insulae while sequential HFA modulations were observed along the anteroposterior axis. HFA in anterior insula tracked decision making and subsequent HFA was observed in posterior insula after the behavioral response. Our results provide electrophysiological evidence of engagement of different insula subregions in both decision-making and response monitoring during vWM and expand our knowledge of the role of the insula in complex human behavior.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2046740" class="vrtx-external-publication">
        <div id="vrtx-publication-2046740">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2046740">
                Johnson, Elizabeth L.; Chang, William K.; Dewar, Callum D.; Sorensen, Donna; Lin, Jack J. &amp; Solbakk, Anne-Kristin
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2046740/contributors', 'vrtx-publication-contributors-2046740')">
                    [Vis alle&nbsp;12&nbsp;forfattere av denne artikkelen]</a>
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Orbitofrontal cortex governs working memory for temporal order.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Current Biology.
                </span>
                <span class="vrtx-issn">ISSN 0960-9822.</span>
                            32(9),
                <span class="vrtx-pages">s. R410–R411.</span>
            doi: <a href="https://doi.org/10.1016/j.cub.2022.03.074">10.1016/j.cub.2022.03.074</a>.
            <a href="https://hdl.handle.net/11250/3918864">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2061616" class="vrtx-external-publication">
        <div id="vrtx-publication-2061616">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2061616">
                Lesteberg, Mari &amp; Jensenius, Alexander Refsum
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        MICRO and MACRO - Developing New Accessible Musicking Technologies.
                </span>
                    <span class="vrtx-parent-contributors">
                            I Iber, Michael &amp; Enge, Kajetan (Red.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Audio Mostly 2022: What you hear is what you see? Perspectives on modalities in sound and music interaction.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        ACM Publications.
                </span>
                <span class="vrtx-issn">ISSN 9781450397018.</span>
                            
                <span class="vrtx-pages">s. 147–150.</span>
            doi: <a href="https://doi.org/10.1145/3561212.3561231">10.1145/3561212.3561231</a>.
            <a href="https://hdl.handle.net/11250/3486287">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">This paper describes the development of two musical instrument prototypes developed to explore how non-haptic music technologies can be accessed from a web browser and how they can offer accessibility for people with low fine motor skills. Two approaches to browser-based motion capture were developed and tested during an iterative design process. This was followed by observational studies of two user groups: one with low fine motor skills and one with normal motor skills. Contrary to our expectations, we found that avoiding the use of buttons and mice did not make the apps more accessible for the participants with low fine motor skills. Furthermore, motion speed was considered more important for people with low motor skills than the size of the control action. The most important finding is that browser-based musical instruments using sensor-based and video-based motion tracking are not only feasible but allow for reaching much larger groups of people than previously possible. This may ultimately lead to both more personalized and accessible musical experiences.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2067086" class="vrtx-external-publication">
        <div id="vrtx-publication-2067086">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2067086">
                Blenkmann, Alejandro Omar; Solbakk, Anne-Kristin; Ivanovic, Jugoslav; Larsson, P?l Gunnar; Knight, Robert Thomas &amp; Endestad, Tor
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Modeling intracranial electrodes. A simulation platform for the evaluation of localization algorithms.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Frontiers in Neuroinformatics.
                </span>
                            16,
                <span class="vrtx-pages">s. 1–20.</span>
            doi: <a href="https://doi.org/10.3389/fninf.2022.788685">10.3389/fninf.2022.788685</a>.
            <a href="https://hdl.handle.net/10852/98740">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">Introduction Intracranial electrodes are implanted in patients with drug-resistant epilepsy as part of their pre-surgical evaluation. This allows the investigation of normal and pathological brain functions with excellent spatial and temporal resolution. The spatial resolution relies on methods that precisely localize the implanted electrodes in the cerebral cortex, which is critical for drawing valid inferences about the anatomical localization of brain function. Multiple methods have been developed to localize the electrodes, mainly relying on pre-implantation MRI and post-implantation computer tomography (CT) images. However, they are hard to validate because there is no ground truth data to test them and there is no standard approach to systematically quantify their performance. In other words, their validation lacks standardization. Our work aimed to model intracranial electrode arrays and simulate realistic implantation scenarios, thereby providing localization algorithms with new ways to evaluate and optimize their performance. Results We implemented novel methods to model the coordinates of implanted grids, strips, and depth electrodes, as well as the CT artifacts produced by these. We successfully modeled realistic implantation scenarios, including different sizes, inter-electrode distances, and brain areas. In total, ～3,300 grids and strips were fitted over the brain surface, and ～850 depth electrode arrays penetrating the cortical tissue were modeled. Realistic CT artifacts were simulated at the electrode locations under 12 different noise levels. Altogether, ～50,000 thresholded CT artifact arrays were simulated in these scenarios, and validated with real data from 17 patients regarding the coordinates’ spatial deformation, and the CT artifacts’ shape, intensity distribution, and noise level. Finally, we provide an example of how the simulation platform is used to characterize the performance of two cluster-based localization methods. Conclusion We successfully developed the first platform to model implanted intracranial grids, strips, and depth electrodes and realistically simulate thresholded CT artifacts and their noise. These methods provide a basis for developing more complex models, while simulations allow systematic evaluation of the performance of electrode localization techniques. The methods described in this article, and the results obtained from the simulations, are freely available via open repositories. A graphical user interface implementation is also accessible via the open-source iElectrodes toolbox.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1945470" class="vrtx-external-publication">
        <div id="vrtx-publication-1945470">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1945470">
                Jensenius, Alexander Refsum &amp; Erdem, Cagri
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Gestures in ensemble performance.
                </span>
                    <span class="vrtx-parent-contributors">
                            I Timmers, Renee; Bailes, Freya &amp; Daffern, Helena (Red.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Together in Music: Coordination, expression, participation.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=239F1C9D-8585-4961-B96A-05B4CEBCAF6B">Oxford University Press</a>.
                </span>
                <span class="vrtx-issn">ISSN 9780198860761.</span>
                            
            doi: <a href="https://doi.org/10.1093/oso/9780198860761.003.0014">10.1093/oso/9780198860761.003.0014</a>.
            <a href="https://hdl.handle.net/11250/4344879">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">The topic of gesture has received growing attention among music researchers over recent decades. Some of this research has been summarized in anthologies on &quot;musical gestures&quot;, such as those by Gritten and King (2006), God?y and Leman (2010), and Gritten and King (2011). There have also been a couple of articles reviewing how the term gesture has been used in various music-related disciplines (and beyond), including those by Cadoz and Wanderley (2000) and Jensenius et al. (2010). Much empirical work has been performed since these reviews were written, aided by better motion capture technologies, new machine learning techniques, and a heightened awareness of the topic. Still there are a number of open questions as to the role of gestures in music performance in general, and in ensemble performance in particular. This chapter aims to clarify some of the basic terminology of music-related body motion, and draw up some perspectives of how one can think about gestures in ensemble performance. This is, obviously, only one way of looking at the very multifaceted concept of gesture, but it may lead to further interest in this exciting and complex research domain.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1893325" class="vrtx-external-publication">
        <div id="vrtx-publication-1893325">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1893325">
                C?té-Allard, Ulysse; Gagnon-Turcotte, Gabriel; Phinyomark, Angkoon; Glette, Kyrre Harald; Scheme, Erik &amp; Laviolette, Fran?ois
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1893325/contributors', 'vrtx-publication-contributors-1893325')">
                    [Vis alle&nbsp;7&nbsp;forfattere av denne artikkelen]</a>
            </span>(2021).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        A Transferable Adaptive Domain Adversarial Neural Network for Virtual Reality Augmented EMG-Based Gesture Recognition.                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        IEEE transactions on neural systems and rehabilitation engineering.
                </span>
                <span class="vrtx-issn">ISSN 1534-4320.</span>
                            
            doi: <a href="https://doi.org/10.1109/TNSRE.2021.3059741">10.1109/TNSRE.2021.3059741</a>.
            <a href="https://hdl.handle.net/11250/3589517">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">Within the field of electromyography-based (EMG) gesture recognition, disparities exist between the offline accuracy reported in the literature and the real-time usability of a classifier. This gap mainly stems from two factors: 1) The absence of a controller, making the data collected dissimilar to actual control. 2) The difficulty of including the four main dynamic factors (gesture intensity, limb position, electrode shift, and transient changes in the signal), as including their permutations drastically increases the amount of data to be recorded. Contrarily, online datasets are limited to the exact EMG-based controller used to record them, necessitating the recording of a new dataset for each control method or variant to be tested. Consequently, this paper proposes a new type of dataset to serve as an intermediate between offline and online datasets, by recording the data using a real-time experimental protocol. The protocol, performed in virtual reality, includes the four main dynamic factors and uses an EMG-independent controller to guide movements. This EMG-independent feedback ensures that the user is in-the-loop during recording, while enabling the resulting dynamic dataset to be used as an EMG-based benchmark. The dataset is comprised of 20 able-bodied participants completing three to four sessions over a period of 14 to 21 days. The ability of the dynamic dataset to serve as a benchmark is leveraged to evaluate the impact of different recalibration techniques for long-term (across-day) gesture recognition, including a novel algorithm, named TADANN. TADANN consistently and significantly ( p&lt;0.05 ) outperforms using fine-tuning as the recalibration technique.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1914431" class="vrtx-external-publication">
        <div id="vrtx-publication-1914431">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1914431">
                Masu, Raul; Melbye, Adam Pultz; Sullivan, John &amp; Jensenius, Alexander Refsum
            </span>(2021).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        NIME and the Environment: Toward a More Sustainable NIME Practice.
                </span>
                    <span class="vrtx-parent-contributors">
                            I Dannenberg, Roger &amp; Xiao, Xiao (Red.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Proceedings of the International Conference on New Interfaces for Musical Expression.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=031C5553-12A0-453E-B4FA-DC2B19B95BD2">The International Conference on New Interfaces for Musical Expression</a>.
                </span>
                            
            
            <a href="https://hdl.handle.net/10852/86529">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">This paper addresses environmental issues around NIME research and practice. We discuss the formulation of an environmental statement for the conference as well as the initiation of a NIME Eco Wiki containing information on environmental concerns related to the creation of new musical instruments. We outline a number of these concerns and, by systematically reviewing the proceedings of all previous NIME conferences, identify a general lack of reflection on the environmental impact of the research undertaken. Finally, we propose a framework for addressing the making, testing, using, and disposal of NIMEs in the hope that sustainability may become a central concern to researchers. </p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1967503" class="vrtx-external-publication">
        <div id="vrtx-publication-1967503">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1967503">
                Jensenius, Alexander Refsum
            </span>(2021).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Best versus Good Enough Practices for Open Music Research.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Empirical Musicology Review.
                </span>
                            16(1).
            doi: <a href="https://doi.org/10.18061/emr.v16i1.7646">10.18061/emr.v16i1.7646</a>.
            <a href="https://hdl.handle.net/11250/4488029">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">Music researchers work with increasingly large and complex data sets. There are few established data handling practices in the field and several conceptual, technological, and practical challenges. Furthermore, many music researchers are not equipped for (or interested in) the craft of data storage, curation, and archiving. This paper discusses some of the particular challenges that empirical music researchers face when working towards Open Research practices: handling (1) (multi)media files, (2) privacy, and (3) copyright issues. These are exemplified through MusicLab, an event series focused on fostering openness in music research. It is argued that the &quot;best practice&quot; suggested by the FAIR principles is too demanding in many cases, but &quot;good enough practice&quot; may be within reach for many. A four-layer data handling &quot;recipe&quot; is suggested as concrete advice for achieving &quot;good enough practice&quot; in empirical music research.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1929045" class="vrtx-external-publication">
        <div id="vrtx-publication-1929045">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1929045">
                Slama, Slama Katarina; Jimenez, Richard; Saha, Sujayam; King-Stephens, David; Laxer, Kenneth D. &amp; Weber, Peter B.
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1929045/contributors', 'vrtx-publication-contributors-1929045')">
                    [Vis alle&nbsp;12&nbsp;forfattere av denne artikkelen]</a>
            </span>(2021).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Intracranial recordings demonstrate both cortical and medial temporal lobe engagement in visual search in humans.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Journal of Cognitive Neuroscience.
                </span>
                <span class="vrtx-issn">ISSN 0898-929X.</span>
                            33(9),
                <span class="vrtx-pages">s. 1833–1861.</span>
            doi: <a href="https://doi.org/10.1162/jocn_a_01739">10.1162/jocn_a_01739</a>.
            <a href="https://hdl.handle.net/11250/3833840">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1929024" class="vrtx-external-publication">
        <div id="vrtx-publication-1929024">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1929024">
                Solbakk, Anne-Kristin; Lubell, James; Leske, Sabine; Funderud, Ingrid; Anais, Llorens &amp; Blenkmann, Alejandro Omar
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1929024/contributors', 'vrtx-publication-contributors-1929024')">
                    [Vis alle&nbsp;10&nbsp;forfattere av denne artikkelen]</a>
            </span>(2021).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Monitoring of self-paced action timing and sensory outcomes after lesions to the orbitofrontal cortex.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Journal of Cognitive Neuroscience.
                </span>
                <span class="vrtx-issn">ISSN 0898-929X.</span>
                            33(9),
                <span class="vrtx-pages">s. 1956–1975.</span>
            doi: <a href="https://doi.org/10.1162/jocn_a_01733">10.1162/jocn_a_01733</a>.
            <a href="https://hdl.handle.net/11250/4167481">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1942348" class="vrtx-external-publication">
        <div id="vrtx-publication-1942348">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1942348">
                Llorens, Ana?s; Tzovara, Athina; Bellier, Ludovic; Bhaya-Grossman, lIina; Bidet-Caulet, Aurelie &amp; Chang, William K
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1942348/contributors', 'vrtx-publication-contributors-1942348')">
                    [Vis alle&nbsp;45&nbsp;forfattere av denne artikkelen]</a>
            </span>(2021).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Gender bias in academia: A lifetime problem that needs solutions.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Neuron.
                </span>
                <span class="vrtx-issn">ISSN 0896-6273.</span>
                            109(13),
                <span class="vrtx-pages">s. 2047–2074.</span>
            doi: <a href="https://doi.org/10.1016/j.neuron.2021.06.002">10.1016/j.neuron.2021.06.002</a>.
            <a href="https://hdl.handle.net/11250/3616993">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1954360" class="vrtx-external-publication">
        <div id="vrtx-publication-1954360">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1954360">
                Laczko, Balint &amp; Jensenius, Alexander Refsum
            </span>(2021).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Reflections on the Development of the Musical Gestures Toolbox for Python.
                </span>
                    <span class="vrtx-parent-contributors">
                            I Kantan, Prithvi Ravi; Paisa, Razvan &amp; Willemsen, Silvin (Red.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Proceedings of the Nordic Sound and Music Computing Conference.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=2B38F065-B3E6-4061-9F0C-0BA1287EEAFF">Aalborg Universitetsforlag</a>.
                </span>
                            
            
            <a href="https://hdl.handle.net/10852/89331">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">The paper presents the Musical Gestures Toolbox (MGT) for Python, a collection of modules targeted at researchers working with video recordings. The toolbox includes video visualization techniques such as creating motion videos, motion history images, and motiongrams. These visualizations allow for studying video recordings from different temporal and spatial perspectives. The toolbox also includes basic computer vision methods, and it is designed to integrate well with audio analysis toolboxes. The MGT was initially developed to analyze music-related body motion (of musicians, dancers, and perceivers) but is equally helpful for other disciplines working with video recordings of humans, such as linguistics, pedagogy, psychology, and medicine.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1908518" class="vrtx-external-publication">
        <div id="vrtx-publication-1908518">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1908518">
                Liebrand, Matthias; Solbakk, Anne-Kristin; Funderud, Ingrid; Buades-Rotger, Macià; Knight, Robert Thomas &amp; Kr?mer, Ulrike M
            </span>(2021).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Intact Proactive Motor Inhibition after Unilateral Prefrontal Cortex or Basal Ganglia Lesions.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Journal of Cognitive Neuroscience.
                </span>
                <span class="vrtx-issn">ISSN 0898-929X.</span>
                            33(9),
                <span class="vrtx-pages">s. 1862–1879.</span>
            doi: <a href="https://doi.org/10.1162/jocn_a_01691">10.1162/jocn_a_01691</a>.
            <a href="https://hdl.handle.net/11250/4801757">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1814802" class="vrtx-external-publication">
        <div id="vrtx-publication-1814802">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1814802">
                Foldal, Maja Dyhre; Blenkmann, Alejandro Omar; Llorens, Ana?s; Knight, Robert T.; Solbakk, Anne-Kristin &amp; Endestad, Tor
            </span>(2020).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        The brain tracks auditory rhythm predictability independent of selective attention.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Scientific Reports.
                </span>
                            10,
                <span class="vrtx-pages">s. 1–13.</span>
            doi: <a href="https://doi.org/10.1038/s41598-020-64758-y">10.1038/s41598-020-64758-y</a>.
            <a href="https://hdl.handle.net/11250/4386260">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">The brain responds to violations of expected rhythms, due to extraction- and prediction of the temporal structure in auditory input. Yet, it is unknown how probability of rhythm violations affects the overall rhythm predictability. Another unresolved question is whether predictive processes are independent of attention processes. In this study, EEG was recorded while subjects listened to rhythmic sequences. Predictability was manipulated by changing the stimulus-onset-asynchrony (SOA deviants) for given tones in the rhythm. When SOA deviants were inserted rarely, predictability remained high, whereas predictability was lower with more frequent SOA deviants. Dichotic tone-presentation allowed for independent manipulation of attention, as specific tones of the rhythm were presented to separate ears. Attention was manipulated by instructing subjects to attend to tones in one ear only, while keeping the rhythmic structure of tones constant. The analyses of event-related potentials revealed an attenuated N1 for tones when rhythm predictability was high, while the N1 was enhanced by attention to tones. Bayesian statistics revealed no interaction between predictability and attention. A right-lateralization of attention effects, but not predictability effects, suggested potentially different cortical processes. This is the first study to show that probability of rhythm violation influences rhythm predictability, independent of attention.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1774481" class="vrtx-external-publication">
        <div id="vrtx-publication-1774481">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1774481">
                Llorens, Ana?s; Funderud, Ingrid; Blenkmann, Alejandro Omar; Lubell, James; Foldal, Maja Dyhre &amp; Leske, Sabine Liliana
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1774481/contributors', 'vrtx-publication-contributors-1774481')">
                    [Vis alle&nbsp;11&nbsp;forfattere av denne artikkelen]</a>
            </span>(2020).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Preservation of Interference Effects in Working Memory After Orbitofrontal Damage.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Frontiers in Human Neuroscience.
                </span>
                            13,
                <span class="vrtx-pages">s. 1–15.</span>
            doi: <a href="https://doi.org/10.3389/fnhum.2019.00445">10.3389/fnhum.2019.00445</a>.
            <a href="https://hdl.handle.net/10852/75550">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">Orbitofrontal cortex (OFC) is implicated in multiple cognitive processes, including inhibitory control, context memory, recency judgment, and choice behavior. Despite an emerging understanding of the role of OFC in memory and executive control, its necessity for core working memory (WM) operations remains undefined. Here, we assessed the impact of OFC damage on interference effects in WM using a Recent Probes task based on the Sternberg item-recognition task (1966). Subjects were asked to memorize a set of letters and then indicate whether a probe letter was presented in a particular set. Four conditions were created according to the forthcoming response (&quot;yes&quot;/&quot;no&quot;) and the recency of the probe (presented in the previous trial set or not). We compared behavioral and electroencephalography (EEG) responses between healthy subjects (n = 14) and patients with bilateral OFC damage (n = 14). Both groups had the same recency pattern of slower reaction time (RT) when the probe was presented in the previous trial but not in the current one, reflecting the proactive interference (PI). The within-group electrophysiological results showed no condition difference during letter encoding and maintenance. In contrast, event-related potentials (ERPs) to probes showed distinct within-group condition effects, and condition by group effects. The response and recency effects for controls occurred within the same time window (300-500 ms after probe onset) and were observed in two distinct spatial groups including right centro-posterior and left frontal electrodes. Both clusters showed ERP differences elicited by the response effect, and one cluster was also sensitive to the recency manipulation. Condition differences for the OFC group involved two different clusters, encompassing only left hemisphere electrodes and occurring during two consecutive time windows (345-463 ms and 565-710 ms). Both clusters were sensitive to the response effect, but no recency effect was found despite the behavioral recency effect. Although the groups had different electrophysiological responses, the maintenance of letters in WM, the evaluation of the context of the probe, and the decision to accept or reject a probed letter were preserved in OFC patients. The results suggest that neural reorganization may contribute to intact recency judgment and response after OFC damage.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1884870" class="vrtx-external-publication">
        <div id="vrtx-publication-1884870">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1884870">
                Kam, Julia W.Y.; Helfrich, Randolph H.; Solbakk, Anne-Kristin; Endestad, Tor; Larsson, P?l Gunnar &amp; Lin, Jack J.
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1884870/contributors', 'vrtx-publication-contributors-1884870')">
                    [Vis alle&nbsp;7&nbsp;forfattere av denne artikkelen]</a>
            </span>(2020).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Top-Down Attentional Modulation in Human Frontal Cortex: Differential Engagement during External and Internal Attention.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Cerebral Cortex.
                </span>
                <span class="vrtx-issn">ISSN 1047-3211.</span>
                            31(2),
                <span class="vrtx-pages">s. 873–883.</span>
            doi: <a href="https://doi.org/10.1093/cercor/bhaa262">10.1093/cercor/bhaa262</a>.
            <a href="https://hdl.handle.net/11250/4884166">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">Decades of electrophysiological research on top-down control converge on the role of the lateral frontal cortex in facilitating attention to behaviorally relevant external inputs. However, the involvement of frontal cortex in the top-down control of attention directed to the external versus internal environment remains poorly understood. To address this, we recorded intracranial electrocorticography while subjects directed their attention externally to tones and responded to infrequent target tones, or internally to their own thoughts while ignoring the tones. Our analyses focused on frontal and temporal cortices. We first computed the target effect, as indexed by the difference in high frequency activity (70-150 Hz) between target and standard tones. Importantly, we then compared the target effect between external and internal attention, reflecting a top-down attentional effect elicited by task demands, in each region of interest. Both frontal and temporal cortices showed target effects during external and internal attention, suggesting this effect is present irrespective of attention states. However, only the frontal cortex showed an enhanced target effect during external relative to internal attention. These findings provide electrophysiological evidence for top-down attentional modulation in the lateral frontal cortex, revealing preferential engagement with external attention.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1838307" class="vrtx-external-publication">
        <div id="vrtx-publication-1838307">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1838307">
                C?té-Allard, Ulysse; Gagnon-Turcotte, Gabriel; Phinyomark, Angkoon; Glette, Kyrre; Scheme, Erik &amp; Laviolette, Francois
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1838307/contributors', 'vrtx-publication-contributors-1838307')">
                    [Vis alle&nbsp;7&nbsp;forfattere av denne artikkelen]</a>
            </span>(2020).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Unsupervised Domain Adversarial Self-Calibration for Electromyography-Based Gesture Recognition.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        IEEE Access.
                </span>
                            8,
                <span class="vrtx-pages">s. 177941–177955.</span>
            doi: <a href="https://doi.org/10.1109/ACCESS.2020.3027497">10.1109/ACCESS.2020.3027497</a>.
            <a href="https://hdl.handle.net/10852/80584">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">Surface electromyography (sEMG) provides an intuitive and non-invasive interface from which to control machines. However, preserving the myoelectric control system’s performance over multiple days is challenging, due to the transient nature of the signals obtained with this recording technique. In practice, if the system is to remain usable, a time-consuming and periodic recalibration is necessary. In the case where the sEMG interface is employed every few days, the user might need to do this recalibration before every use. Thus, severely limiting the practicality of such a control method. Consequently, this paper proposes tackling the especially challenging task of unsupervised adaptation of sEMG signals, when multiple days have elapsed between each recording, by introducing Self-Calibrating Asynchronous Domain Adversarial Neural Network (SCADANN). SCADANN is compared with two state-of-the-art self-calibrating algorithms developed specifically for deep learning within the context of EMG-based gesture recognition and three state-of-the-art domain adversarial algorithms. The comparison is made both on an offline and a dynamic dataset (20 participants per dataset), using two different deep network architectures with two different input modalities (temporal-spatial descriptors and spectrograms). Overall, SCADANN is shown to substantially and systematically improves classification performances over no recalibration and obtains the highest average accuracy for all tested cases across all methods.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1818277" class="vrtx-external-publication">
        <div id="vrtx-publication-1818277">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1818277">
                Sato, Fumiaki; Laeng, Bruno; Nakauchi, Shigeki &amp; Minami, Tetsuto
            </span>(2020).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Cueing the Necker cube: Pupil dilation reflects the viewing-from-above constraint in bistable perception.                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Journal of Vision.
                </span>
                            20(4).
            doi: <a href="https://doi.org/10.1167/jov.20.4.7">10.1167/jov.20.4.7</a>.
            <a href="https://hdl.handle.net/11250/4173460">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">We hypothesized that a perceptually ambiguous or bistable object (Necker cube) can be more effectively biased to assume a point of view-from-above (VFA) than from below the object by cueing attention. Participants viewed a Necker cube in which one surface was temporarily shaded so as to prime a specific perspective on the cube. Subsequently, the standard (wireframe) Necker cube was viewed for 3 seconds, and participants reported what perspective they had seen initially and whether their perception shifted to the alternative perspective during the brief viewing. Concomitantly, pupil size was monitored with an eye-tracker to obtain an index of cognitive effort. There were two conditions: passive viewing and forced attention to sustain the initially primed perspective. We confirmed the presence of a VFA bias with forced attention, which was accompanied by reduced attentional effort, as indexed by a reduced pupil diameter, compared with the view-from-below. Participants showed no bias during passive viewing. We suggest that the level of intensive attention, when retrieving and maintaining a specific view from memory, is mirrored in the size of the eye pupils and may reflect ecological constraints on visual perception.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1712751" class="vrtx-external-publication">
        <div id="vrtx-publication-1712751">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1712751">
                Suzuki, Yuta; Minami, Tetsuto; Laeng, Bruno &amp; Nakauchi, Shigeki
            </span>(2019).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Colorful glares: Effects of colors on brightness illusions measured with pupillometry.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Acta Psychologica.
                </span>
                <span class="vrtx-issn">ISSN 0001-6918.</span>
                            198,
                <span class="vrtx-pages">s. 1–10.</span>
            doi: <a href="https://doi.org/10.1016/j.actpsy.2019.102882">10.1016/j.actpsy.2019.102882</a>.
            <a href="https://hdl.handle.net/11250/4358728">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">We hypothesized that pupil constrictions to the glare illusion, where converging luminance gradients subjectively enhance the perception of brightness, would be stronger for ‘blue’ than for other colors. Such an expectation was based on reflections about the ecology of vision, where the experience of dazzling light is common when one happens to look directly at sunlight through some occluders. Thus, we hypothesized that pupil constrictions to ‘blue’ reflect an ecologically-based expectation of the visual system from the experience of sky&#39;s light and color, which also leads to interpret the blue gradients of illusory glare to act as effective cues to impending probable intense light. We therefore manipulated the gradients color of glare illusions and measured changes in subjective brightness of identical shape stimuli. We confirmed that the blue resulted in what was subjectively evaluated as the brightest condition, despite all colored stimuli were equiluminant. This enhanced brightness effect was observed both in a psychophysical adjustment task and in changes in pupil size, where the maximum pupil constriction peak was observed with the ‘blue’ converging gradients over and above to the pupil response to blue in other conditions (i.e., diverging gradients and homogeneous patches). Moreover, glare-related pupil constrictions for each participant were correlated to each individual&#39;s subjective brightness adjustments. Homogenous blue hues also constricted the pupil more than other hues, which represents a pupillometric analog of the Helmholtz-Kohlrausch effect on brightness perception. Together, these findings show that pupillometry constitutes an easy tool to assess individual differences in color brightness perception.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1761259" class="vrtx-external-publication">
        <div id="vrtx-publication-1761259">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1761259">
                Blenkmann, Alejandro Omar; Collavini, Santiago; Lubell, James; Anais, Llorens; Funderud, Ingrid &amp; Ivanovic, Jugoslav
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1761259/contributors', 'vrtx-publication-contributors-1761259')">
                    [Vis alle&nbsp;13&nbsp;forfattere av denne artikkelen]</a>
            </span>(2019).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Auditory deviance detection in the human insula: An intracranial EEG study.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Cortex.
                </span>
                <span class="vrtx-issn">ISSN 0010-9452.</span>
                            121,
                <span class="vrtx-pages">s. 189–200.</span>
            doi: <a href="https://doi.org/10.1016/j.cortex.2019.09.002">10.1016/j.cortex.2019.09.002</a>.
            <a href="https://hdl.handle.net/10852/75077">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">The human insula is known to be involved in auditory processing, but knowledge about its precise functional role and the underlying electrophysiology is limited. To assess its role in automatic auditory deviance detection we analyzed the EEG high frequency activity (HFA; 75–145 Hz) and ERPs from 90 intracranial insular channels across 16 patients undergoing pre-surgical intracranial monitoring for epilepsy treatment. Subjects passively listened to a stream of standard and deviant tones differing in four physical dimensions: intensity, frequency, location or time. HFA responses to auditory stimuli were found in the short and long gyri, and the anterior, superior, and inferior segments of the circular sulcus of the insular cortex. Only a subset of channels in the inferior segment of the circular sulcus of the insula showed HFA deviance detection responses, i.e., a greater and longer latency response to specific deviants relative to standards. Auditory deviancy processing was also later in the insula when compared with the superior temporal cortex. ERP results were more widespread and supported the HFA insular findings. These results provide evidence that the human insula is engaged during auditory deviance detection.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1677710" class="vrtx-external-publication">
        <div id="vrtx-publication-1677710">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1677710">
                Sadaghiani, Sepideh; Dombert, Pascasie L.; L?vstad, Marianne; Funderud, Ingrid; Meling, Torstein Ragnar &amp; Endestad, Tor
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1677710/contributors', 'vrtx-publication-contributors-1677710')">
                    [Vis alle&nbsp;9&nbsp;forfattere av denne artikkelen]</a>
            </span>(2019).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Lesions to the fronto-parietal network impact alpha-band phase synchrony and cognitive control.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Cerebral Cortex.
                </span>
                <span class="vrtx-issn">ISSN 1047-3211.</span>
                            29(10),
                <span class="vrtx-pages">s. 4143–4153.</span>
            doi: <a href="https://doi.org/10.1093/cercor/bhy296">10.1093/cercor/bhy296</a>.
            <a href="https://hdl.handle.net/11250/3294309">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1592498" class="vrtx-external-publication">
        <div id="vrtx-publication-1592498">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1592498">
                Johnson, Elizabeth L.; Adams, Jenna N.; Solbakk, Anne-Kristin; Endestad, Tor; Larsson, P?l Gunnar &amp; Ivanovic, Jugoslav
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1592498/contributors', 'vrtx-publication-contributors-1592498')">
                    [Vis alle&nbsp;9&nbsp;forfattere av denne artikkelen]</a>
            </span>(2018).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Dynamic frontotemporal systems process space and time in working memory.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        PLoS Biology.
                </span>
                <span class="vrtx-issn">ISSN 1544-9173.</span>
                            16(3).
            doi: <a href="https://doi.org/10.1371/journal.pbio.2004274">10.1371/journal.pbio.2004274</a>.
            <a href="https://hdl.handle.net/10852/71158">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">How do we rapidly process incoming streams of information in working memory, a cognitive mechanism central to human behavior? Dominant views of working memory focus on the prefrontal cortex (PFC), but human hippocampal recordings provide a neurophysiological signature distinct from the PFC. Are these regions independent, or do they interact in the service of working memory? We addressed this core issue in behavior by recording directly from frontotemporal sites in humans performing a visuospatial working memory task that operationalizes the types of identity and spatiotemporal information we encounter every day. Theta band oscillations drove bidirectional interactions between the PFC and medial temporal lobe (MTL; including the hippocampus). MTL theta oscillations directed the PFC preferentially during the processing of spatiotemporal information, while PFC theta oscillations directed the MTL for all types of information being processed in working memory. These findings reveal an MTL theta mechanism for processing space and time and a domain-general PFC theta mechanism, providing evidence that rapid, dynamic MTL–PFC interactions underlie working memory for everyday experiences.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1595584" class="vrtx-external-publication">
        <div id="vrtx-publication-1595584">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1595584">
                Kam, Julia W.Y.; Solbakk, Anne-Kristin; Endestad, Tor; Meling, Torstein Ragnar &amp; Knight, Robert Thomas
            </span>(2018).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Lateral prefrontal cortex lesion impairs regulation of internally and externally directed attention.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        NeuroImage.
                </span>
                <span class="vrtx-issn">ISSN 1053-8119.</span>
                            175(15),
                <span class="vrtx-pages">s. 91–99.</span>
            doi: <a href="https://doi.org/10.1016/j.neuroimage.2018.03.063">10.1016/j.neuroimage.2018.03.063</a>.
            <a href="https://hdl.handle.net/11250/3478911">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1474014" class="vrtx-external-publication">
        <div id="vrtx-publication-1474014">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1474014">
                Perry, Anat; Saunders, Samantha N.; Stiso, Jennifer; Dewar, Callum; Lubell, James &amp; Meling, Torstein Ragnar
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1474014/contributors', 'vrtx-publication-contributors-1474014')">
                    [Vis alle&nbsp;9&nbsp;forfattere av denne artikkelen]</a>
            </span>(2017).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Effects of prefrontal cortex damage on emotion understanding: EEG and behavioural evidence.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Brain.
                </span>
                <span class="vrtx-issn">ISSN 0006-8950.</span>
                            140(4),
                <span class="vrtx-pages">s. 1086–1099.</span>
            doi: <a href="https://doi.org/10.1093/brain/awx031">10.1093/brain/awx031</a>.
            <a href="https://hdl.handle.net/11250/4118040">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1499790" class="vrtx-external-publication">
        <div id="vrtx-publication-1499790">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1499790">
                Johnson, Elizabeth L.; Dewar, Callum; Solbakk, Anne-Kristin; Endestad, Tor; Meling, Torstein Ragnar &amp; Knight, Robert T
            </span>(2017).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Bidirectional Frontoparietal Oscillatory Systems Support Working Memory.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Current Biology.
                </span>
                <span class="vrtx-issn">ISSN 0960-9822.</span>
                            27(12),
                <span class="vrtx-pages">s. 1829–1835.</span>
            doi: <a href="https://doi.org/10.1016/j.cub.2017.05.046">10.1016/j.cub.2017.05.046</a>.
            <a href="https://hdl.handle.net/10852/60083">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">The ability to represent and select information in working memory provides the neurobiological infrastructure for human cognition. For 80 years, dominant views of working memory have focused on the key role of prefrontal cortex (PFC) [1–8]. However, more recent work has implicated posterior cortical regions [9–12], suggesting that PFC engagement during working memory is dependent on the degree of executive demand. We provide evidence from neurological patients with discrete PFC damage that challenges the dominant models attributing working memory to PFC-dependent systems. We show that neural oscillations, which provide a mechanism for PFC to communicate with posterior cortical regions [13], independently subserve communications both to and from PFC—uncovering parallel oscillatory mechanisms for working memory. Fourteen PFC patients and 20 healthy, age-matched controls performed a working memory task where they encoded, maintained, and actively processed information about pairs of common shapes. In controls, the electroencephalogram (EEG) exhibited oscillatory activity in the low-theta range over PFC and directional connectivity from PFC to parieto-occipital regions commensurate with executive processing demands. Concurrent alpha-beta oscillations were observed over parieto-occipital regions, with directional connectivity from parieto-occipital regions to PFC, regardless of processing demands. Accuracy, PFC low-theta activity, and PFC → parieto-occipital connectivity were attenuated in patients, revealing a PFC-independent, alpha-beta system. The PFC patients still demonstrated task proficiency, which indicates that the posterior alpha-beta system provides sufficient resources for working memory. Taken together, our findings reveal neurologically dissociable PFC and parieto-occipital systems and suggest that parallel, bidirectional oscillatory systems form the basis of working memory.</p>
                </span>
        </div>
    </li>
    </ul>
      <p class="vrtx-more-external-publications"><a href="https://nva.sikt.no/filter?fundingIdentifier=274996&amp;fundingSource=NFR">Se alle arbeider i NVA</a></p>
    </div>

    <div id="vrtx-publication-tab-2">
  <ul class="vrtx-external-publications">

      <li id="vrtx-external-publication-2046464" class="vrtx-external-publication">
        <div id="vrtx-publication-2046464">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2046464">
                Jensenius, Alexander Refsum
            </span>(2022).
                <span class="vrtx-title title-books">
                    <!-- For readability. Too many underlined characters when both present -->
                        Sound Actions: Conceptualizing Musical Instruments.
                </span>
                <span class="vrtx-publisher publisher-books publisher-category-MONOGRAPHACA">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=787501B7-4C33-4FC8-8689-95E5449219EC">MIT Press</a>.
                </span>
                <span class="vrtx-isbn">ISBN 9780262544634.</span>
            
                <span class="vrtx-pages">304 s.</span>
            doi: <a href="https://doi.org/10.7551/mitpress/14220.001.0001">10.7551/mitpress/14220.001.0001</a>.
            <a href="https://hdl.handle.net/10852/98282">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">

A techno-cognitive look at how new technologies are shaping the future of musicking.

“Musicking” encapsulates both the making of and perception of music, so it includes both active and passive forms of musical engagement. But at its core, it is a relationship between actions and sounds, between human bodies and musical instruments. Viewing musicking through this lens and drawing on music cognition and music technology, Sound Actions proposes a model for understanding differences between traditional acoustic “sound makers” and new electro-acoustic “music makers.”

What is a musical instrument? How do new technologies change how we perform and perceive music? What happens when composers build instruments, performers write code, perceivers become producers, and instruments play themselves? The answers to these pivotal questions entail a meeting point between interactive music technology and embodied music cognition, what author Alexander Refsum Jensenius calls “embodied music technology.” Moving between objective description and subjective narrative of his own musical experiences, Jensenius explores why music makes people move, how the human body can be used in musical interaction, and how new technologies allow for active musical experiences. The development of new music technologies, he demonstrates, has fundamentally changed how music is performed and perceived.</p>
                </span>
        </div>
    </li>
    </ul>
      <p class="vrtx-more-external-publications"><a href="https://nva.sikt.no/filter?fundingIdentifier=274996&amp;fundingSource=NFR">Se alle arbeider i NVA</a></p>
    </div>

    <div id="vrtx-publication-tab-3">
  <ul class="vrtx-external-publications">

      <li id="vrtx-external-publication-2291964" class="vrtx-external-publication">
        <div id="vrtx-publication-2291964">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2291964">
                Blenkmann, Alejandro Omar; Volehaugen, Vegard Akselsson; Carvalho, Vinicius Rezende; Leske, Sabine Liliana; Llorens, Anais &amp; Funderud, Ingrid
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2291964/contributors', 'vrtx-publication-contributors-2291964')">
                    [Vis alle&nbsp;14&nbsp;forfattere av denne artikkelen]</a>
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        An intracranial EEG study on auditory deviance detection.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4615154">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2306574" class="vrtx-external-publication">
        <div id="vrtx-publication-2306574">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2306574">
                Asko, Olgerta; Volehaugen, Vegard Akselsson; Leske, Sabine Liliana; Funderud, Ingrid; Llorens, Ana?s &amp; Ivanovic, Jugoslav
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2306574/contributors', 'vrtx-publication-contributors-2306574')">
                    [Vis alle&nbsp;12&nbsp;forfattere av denne artikkelen]</a>
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Predictive encoding of deviant tone sequences in the human prefrontal cortex.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3448927">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2306560" class="vrtx-external-publication">
        <div id="vrtx-publication-2306560">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2306560">
                Asko, Olgerta; Volehaugen, Vegard Akselsson; Leske, Sabine Liliana; Funderud, Ingrid; Anais, Llorens &amp; Ivanovic, Jugoslav
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2306560/contributors', 'vrtx-publication-contributors-2306560')">
                    [Vis alle&nbsp;12&nbsp;forfattere av denne artikkelen]</a>
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Predictive encoding of deviant tone sequences in the human prefrontal cortex.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3682760">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">The ability to use predictive information to guide perception and action relies heavily on the prefrontal cortex (PFC), yet the involvement of its subregions in predictive processes remains unclear. Recent perspectives propose that the orbitofrontal cortex (OFC) generates predictions about perceptual events, actions, and their outcomes while the lateral prefrontal cortex (LPFC) is involved in prospective functions, which support predictive processes, such as selective attention, working memory, response preparation or inhibition. To further delineate the roles of these PFC areas in predictive processing, we investigated whether lesions would impair the ability to build predictions of future events and detect deviations from expected regularities. We used an auditory deviance detection task, in which the structural regularities of played tones were controlled at two hierarchical levels by rules defined at a local (i.e., between tones within sequences) and global (i.e., between sequences) level.
We have recently shown that OFC lesions affect detecting prediction violations at two hierarchical levels of rule abstraction, i.e., altered MMN and P3a to local and simultaneous local + global prediction violations (https://doi.org/10.7554/eLife.86386). Now, we focus on the task&#39;s predictive aspect and present the latest results showing the involvement of PFC subregions in anticipation of deviances informed by implicit predictive information.
Behavioral data shows that deviance expectancy induced faster deviance detection in healthy adults (n=22), suggesting that participants track a state space representation of the task and anticipate upcoming deviant sequences.
The analysis of EEG data from patients with focal lesions to the OFC (n = 12) or LPFC (n = 10), and SEEG from the same areas in patients with epilepsy (n = 7), revealed interesting differences. Healthy adults (n = 15)  showed modulations of the Contingent Negative Variation (CNV) – a marker of anticipatory activity -  tracking the expectancy of deviant tone sequences. However, patients with OFC lesions lacked CNV sensitivity to the predictive context, while patients with LPFC lesions showed moderate sensitivity compared to healthy adults. These results were further supported by intracranial recordings, which revealed expectancy modulation of the high-frequency broadband signal from electrodes in OFC and LPFC, with an earlier latency of activity modulation for the OFC and a later one for the LPFC. 
Altogether, the complementary approach from behavioral, intracerebral EEG, scalp EEG, and causal lesion data provides compelling evidence for the distinct engagement of the two prefrontal areas in predicting future events and signaling deviations.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2300986" class="vrtx-external-publication">
        <div id="vrtx-publication-2300986">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2300986">
                Blenkmann, Alejandro Omar
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        The role of the Orbitofrontal Cortex in building predictions and detecting violations.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4868462">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2291878" class="vrtx-external-publication">
        <div id="vrtx-publication-2291878">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2291878">
                Blenkmann, Alejandro Omar
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Electrophysiological correlates of auditory regularity expectations and violations at short and long temporal scales: Studies in intracranial EEG and prefrontal cortex lesion patients.                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3883317">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2291870" class="vrtx-external-publication">
        <div id="vrtx-publication-2291870">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2291870">
                Blenkmann, Alejandro Omar; Leske, Sabine Liliana; Llorens, Anais; Lin, Jack J.; Chang, Edward &amp; Brunner, Peter
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2291870/contributors', 'vrtx-publication-contributors-2291870')">
                    [Vis alle&nbsp;12&nbsp;forfattere av denne artikkelen]</a>
            </span>(2024).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Anatomical registration of intracranial electrodes.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3526995">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2106038" class="vrtx-external-publication">
        <div id="vrtx-publication-2106038">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2106038">
                Jensenius, Alexander Refsum &amp; Poutaraud, Joachim
            </span>(2023).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Video Visualization.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4733516">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">This workshop is targeted at students and researchers working with video recordings. Even though the workshop will be based on quantitative tools, the aim is to provide solutions for qualitative research. This includes visualization techniques such as motion videos, motion history images, and motiongrams, which, in different ways, allow for looking at video recordings from different temporal and spatial perspectives. It also includes basic computer vision analysis modules, such as extracting quantity and centroid of motion, and using such features in analysis.

The participants will learn to use the Musical Gestures Toolbox for Python, a collection of high-level modules for easily generating all of the above-mentioned visualizations and analyses. This toolbox was initially developed for analyzing music-related body motion but is equally helpful for other disciplines working with video recordings of humans, such as linguistics, psychology, medicine, and educational sciences.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2061615" class="vrtx-external-publication">
        <div id="vrtx-publication-2061615">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2061615">
                Lesteberg, Mari &amp; Jensenius, Alexander Refsum
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        MICRO and MACRO - Developing New Accessible Musicking Technologies.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4397733">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">This paper describes the development of two musical instrument prototypes developed to explore how non-haptic music technologies can be accessed from a web browser and how they can offer accessibility for people with low fine motor skills. Two approaches to browser-based motion capture were developed and tested during an iterative design process. This was followed by observational studies of two user groups: one with low fine motor skills and one with normal motor skills. Contrary to our expectations, we found that avoiding the use of buttons and mice did not make the apps more accessible for the participants with low fine motor skills. Furthermore, motion speed was considered more important for people with low motor skills than the size of the control action. The most important finding is that browser-based musical instruments using sensor-based and video-based motion tracking are not only feasible but allow for reaching much larger groups of people than previously possible. This may ultimately lead to both more personalized and accessible musical experiences.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2074164" class="vrtx-external-publication">
        <div id="vrtx-publication-2074164">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2074164">
                Fuhrer, Julian; Glette, Kyrre; Ivanovic, Jugoslav; Larsson, P?l Gunnar; Bekinschtein, Tristan &amp; Kochen, Silvia
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2074164/contributors', 'vrtx-publication-contributors-2074164')">
                    [Vis alle&nbsp;11&nbsp;forfattere av denne artikkelen]</a>
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Direct brain recordings reveal continuous encoding of structure in random stimuli.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4166282">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2068195" class="vrtx-external-publication">
        <div id="vrtx-publication-2068195">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2068195">
                Asko, Olgerta; Blenkmann, Alejandro Omar; Leske, Sabine Liliana; Foldal, Maja Dyhre; Llorens, Ana?s &amp; Funderud, Ingrid
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2068195/contributors', 'vrtx-publication-contributors-2068195')">
                    [Vis alle&nbsp;10&nbsp;forfattere av denne artikkelen]</a>
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Altered hierarchical auditory predictive processing after lesions to the orbitofrontal cortex.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4205125">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">In this study, we tested the causal involvement of the OFC in noticing breaches of predictions (i.e., PEs) at different hierarchical levels of task structural complexity. With this aim, we examined the event-related potentials (ERPs) of patients with focal OFC lesions and healthy adults while performing an auditory local-global oddball paradigm. Altogether, we found that after OFC damage, low-level PEs (i.e., processing of stimuli that are unpredicted at the local level) and combined low- and high-level PEs (i.e., processing of stimuli that are unpredicted at both the local and global level) were impacted. However, the processing of standard tones was not affected. We conclude that the OFC may contribute to a top-down process that modulates the deviance detection system in the primary auditory cortices, and may be involved in connecting PEs at lower hierarchical areas with predictions at higher areas. The study sheds new light on the poorly explored deficits of hierarchical auditory prediction in patients with damaged OFC.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2067094" class="vrtx-external-publication">
        <div id="vrtx-publication-2067094">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2067094">
                Blenkmann, Alejandro Omar; Solbakk, Anne-Kristin; Leske, Sabine Liliana; Llorens, Ana?s; Funderud, Ingrid &amp; Collavini, Santiago
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/2067094/contributors', 'vrtx-publication-contributors-2067094')">
                    [Vis alle&nbsp;13&nbsp;forfattere av denne artikkelen]</a>
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Human brain network involved in auditory deviance detection. An intracranial EEG study.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4672873">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2090667" class="vrtx-external-publication">
        <div id="vrtx-publication-2090667">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2090667">
                Jensenius, Alexander Refsum
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Erfaringer med ? lage 3xMOOC.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4604036">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">I denne presentasjonen vil jeg presentere hvordan vi gjennom ?rene har utviklet tre komplette nettkurs ved Universitetet i Oslo: Music Moves (2016), Motion Capture (2022) og Pupillometry (2023). Fokuset vil ligge p? muligheter og utfordringer i video i utdanningssammenheng.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2079715" class="vrtx-external-publication">
        <div id="vrtx-publication-2079715">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2079715">
                Solbakk, Anne-Kristin; Endestad, Tor &amp; Knight, Robert Thomas
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Oslo - Berkeley collaboration in cognitive neuroscience and neuropsychology.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4118335">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1927485" class="vrtx-external-publication">
        <div id="vrtx-publication-1927485">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1927485">
                Jensenius, Alexander Refsum
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Visibility for researchers on university web pages.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4565370">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">Academics need to be visible online. If you don’t publish and disseminate your research, it won’t have an impact. So it is in our own interest to have up-to-date personal pages with information about what we do. I would argue that it is also in the interest of universities that their employee’s personal pages are up-to-date and look good. </p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1914426" class="vrtx-external-publication">
        <div id="vrtx-publication-1914426">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1914426">
                Masu, Raul; Melbye, Adam Pultz; Sullivan, John &amp; Jensenius, Alexander Refsum
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        NIME and the Environment: Toward a More Sustainable NIME Practice.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4323620">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">This paper addresses environmental issues around NIME research and practice. We discuss the formulation of an environmental statement for the conference as well as the initiation of a NIME Eco Wiki containing information on environmental concerns related to the creation of new musical instruments. We outline a number of these concerns and, by systematically reviewing the proceedings of all previous NIME conferences, identify a general lack of reflection on the environmental impact of the research undertaken. Finally, we propose a framework for addressing the making, testing, using, and disposal of NIMEs in the hope that sustainability may become a central concern to researchers. </p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1940031" class="vrtx-external-publication">
        <div id="vrtx-publication-1940031">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1940031">
                Solbakk, Anne-Kristin; Leske, Sabine Liliana; Lubell, James Isaac; Blenkmann, Alejandro Omar; Llorens, Anais &amp; Funderud, Ingrid
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1940031/contributors', 'vrtx-publication-contributors-1940031')">
                    [Vis alle&nbsp;13&nbsp;forfattere av denne artikkelen]</a>
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Auditory prediction and prediction error in self-generated tones.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4740676">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1957528" class="vrtx-external-publication">
        <div id="vrtx-publication-1957528">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1957528">
                Fasciani, Stefano &amp; Jensenius, Alexander Refsum
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Sound and Music Computing at the University of Oslo.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3547035">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">A presentation of labs and studios at the Department of Musicology and RITMO Centre for Interdisciplinary Studies in Rhythm, Time, and Motion at the University of Oslo.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1925445" class="vrtx-external-publication">
        <div id="vrtx-publication-1925445">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1925445">
                Upham, Finn; Zelechowska, Agata; Gonzalez, Victor &amp; Jensenius, Alexander Refsum
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Quiet Breathing to Heard Music.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3692340">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1916339" class="vrtx-external-publication">
        <div id="vrtx-publication-1916339">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1916339">
                Masu, Raul; Melbye, Adam Pultz; Sullivan, John &amp; Jensenius, Alexander Refsum
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        NIME Eco Wiki: A Crash Course.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3406318">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">In this workshop, hosted by the three NIME environmental officers, participants will be introduced to the NIME Eco Wiki, a repository for addressing environmental and sustainability issues within the NIME community. During the workshop, the participants will discuss how practices on the communal as well as the individual level may become more sustainable and they will create new additions and ideas for the Wiki. </p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1954357" class="vrtx-external-publication">
        <div id="vrtx-publication-1954357">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1954357">
                Laczko, Balint &amp; Jensenius, Alexander Refsum
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Reflections on the Development of the Musical Gestures Toolbox for Python.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4814723">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">The paper presents the Musical Gestures Toolbox (MGT) for Python, a collection of modules targeted at researchers working with video recordings. The toolbox includes video visualization techniques such as creating motion videos, motion history images, and motiongrams. These visualizations allow for studying video recordings from different temporal and spatial perspectives. The toolbox also includes basic computer vision methods, and it is designed to integrate well with audio analysis toolboxes. The MGT was initially developed to analyze music-related body motion (of musicians, dancers, and perceivers) but is equally helpful for other disciplines working with video recordings of humans, such as linguistics, pedagogy, psychology, and medicine.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1833227" class="vrtx-external-publication">
        <div id="vrtx-publication-1833227">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1833227">
                T?rresen, Jim
            </span>(2020).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Interdisciplinary Research Collaboration in Rhythm, Time and Motion.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4280815">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1806778" class="vrtx-external-publication">
        <div id="vrtx-publication-1806778">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1806778">
                Jensenius, Alexander Refsum
            </span>(2020).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Video Visualization Strategies at RITMO.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4747259">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1740715" class="vrtx-external-publication">
        <div id="vrtx-publication-1740715">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1740715">
                Jensenius, Alexander Refsum
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Sound actions: An embodied approach to a digital organology.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4302845">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">What is an instrument in our increasingly electrified world? In this talk I will present a set of theoretical building blocks from my forthcoming book on &quot;musicking in an electronic world&quot;. At the core of the argument is the observation that the introduction of new music technologies has led to an increased separation between action and sound in musical performance. This has happened gradually, with pianos and organs being important early examples of instruments that introduced mechanical components between the performer and resonating objects. Today&#39;s network-based instruments represent an extreme case of a spatiotemporal dislocation between action and sound. They challenge our ideas of what an instrument can be, who can perform on them, and how they should be analyzed. In the lecture I will explain how we can use the concepts of action-sound couplings and mappings to structure our thinking about such instruments. This will be used at the heart of a new organology that embraces the qualities of both acoustic and electroacoustic instruments.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1740716" class="vrtx-external-publication">
        <div id="vrtx-publication-1740716">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1740716">
                Jensenius, Alexander Refsum
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Lecture-demo: Music-Related Micromotion.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4465108">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">This presentation will summarize findings from my research into music-related micromotion. This includes the smallest human motion that we can perform and perceive, typically measured at at a scale of millimeters. We have carried out a series of studies of such micromotion, in which people have been asked to try to stand still on the floor, both in silence and with (musical) sound. By measuring their bodily responses with different types of motion tracking and physiological devices we find a number of similarities between people&#39;s quantity and quality of motion. This has been the starting point for exploring the use of micromotion in musical practice, what I call &#39;sonic microinteraction&#39;. This includes standstill performances with interactive sound and light. It also includes several installations with our ensemble of self-playing guitars. These are hybrid instruments, using digital sound-production through acoustically resonating guitars. They are controlled through inverse microinteraction, meaning that you need to focus on standing still to produce any sound. This challenges our traditional understanding of the affordance of musical instruments, and opens for both artistically and scientifically interesting perspectives.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1740713" class="vrtx-external-publication">
        <div id="vrtx-publication-1740713">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1740713">
                Jensenius, Alexander Refsum
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Sound actions: An embodied approach to a digital organology.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5028137">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">What is an instrument in our increasingly electrified world? In this talk I will present a set of theoretical building blocks from my forthcoming book on &quot;musicking in an electronic world&quot;. At the core of the argument is the observation that the introduction of new music technologies has led to an increased separation between action and sound in musical performance. This has happened gradually, with pianos and organs being important early examples of instruments that introduced mechanical components between the performer and resonating objects. Today&#39;s network-based instruments represent an extreme case of a spatiotemporal dislocation between action and sound. They challenge our ideas of what an instrument can be, who can perform on them, and how they should be analyzed. In the lecture I will explain how we can use the concepts of action-sound couplings and mappings to structure our thinking about such instruments. This will be used at the heart of a new organology that embraces the qualities of both acoustic and electroacoustic instruments.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1761081" class="vrtx-external-publication">
        <div id="vrtx-publication-1761081">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1761081">
                Leske, Sabine Liliana; Lubell, James; Blenkmann, Alejandro Omar; Llorens, Ana?s; Funderud, Ingrid &amp; Foldal, Maja Dyhre
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1761081/contributors', 'vrtx-publication-contributors-1761081')">
                    [Vis alle&nbsp;13&nbsp;forfattere av denne artikkelen]</a>
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Action-based auditory predictions.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3377275">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">Sensory consequences of actions are predicted by the brain via an internal forward model to prepare sensory cortical areas, referred to as motor prediction. In a similar vein, the predictive coding framework suggests that perception is based on internal models making predictions about sensory events, based on statistical probabilities of the stimuli. 
In the current study we investigated action-based sensory predictions. We used a self-paced, two-choice random generation task, infrequently inducing deviant outcomes of voluntary action. Participants repeatedly pressed a right and a left button normatively associated with a 70 ms long 1 kHz and 2 kHz tone, respectively. Occasional deviants occurred, inverting the learned button-tone association. Participants were instructed that their button presses should be random, at a regular but self-paced tempo of one press per 1-2 s, and that they should press both buttons with equal probability. They were informed that the tones are task-irrelevant.
We used intracranial EEG (iEEG) data recorded from 10 adult patients with electrodes localized in frontal and temporal lobes. The patients had drug resistant epilepsy and were undergoing presurgical monitoring via implanted stereotactic electrodes. Electrode coordinates and anatomical labels were obtained from coregistered MRI and CT images using iElectrodes toolbox. Initial results indicate that violations of action intentions modulated high frequency band activity (HFA, 75-145 Hz) in distributed brain regions including temporal and prefrontal cortices. 
</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1759951" class="vrtx-external-publication">
        <div id="vrtx-publication-1759951">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1759951">
                Spiech, Connor
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Predicting the Groove: A Combined EEG-Pupillometry Study.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3411590">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1728059" class="vrtx-external-publication">
        <div id="vrtx-publication-1728059">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1728059">
                Miseikis, Justinas; Brijacak, Inka; Yahyanejad, Saeed; Glette, Kyrre; Elle, Ole Jacob &amp; T?rresen, Jim
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Two-Stage Transfer Learning for Heterogeneous Robot Detection and 3D Joint Position Estimation in a 2D Camera Image Using CNN.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4228103">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1761095" class="vrtx-external-publication">
        <div id="vrtx-publication-1761095">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1761095">
                Leske, Sabine Liliana; Lubell, James Isaac; Blenkmann, Alejandro Omar; Llorens, Ana?s; Funderud, Ingrid &amp; Foldal, Maja Dyhre
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1761095/contributors', 'vrtx-publication-contributors-1761095')">
                    [Vis alle&nbsp;13&nbsp;forfattere av denne artikkelen]</a>
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Auditory prediction and prediction error in self-generated tones.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4713217">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1759948" class="vrtx-external-publication">
        <div id="vrtx-publication-1759948">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1759948">
                Spiech, Connor
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Predicting the Groove: A Combined EEG-Pupillometry Study.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4180808">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1725394" class="vrtx-external-publication">
        <div id="vrtx-publication-1725394">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1725394">
                Nygaard, T?nnes Frostad; Martin, Charles Patrick; T?rresen, Jim &amp; Glette, Kyrre
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Self-Modifying Morphology Experiments with DyRET: Dynamic Robot for Embodied Testing.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4318183">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1727497" class="vrtx-external-publication">
        <div id="vrtx-publication-1727497">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1727497">
                Nygaard, T?nnes Frostad; Nordmoen, J?rgen Halvorsen; Martin, Charles Patrick; T?rresen, Jim &amp; Glette, Kyrre
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Lessons Learned from Real-World Experiments with
DyRET: the Dynamic Robot for Embodied Testing.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3595200">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1746334" class="vrtx-external-publication">
        <div id="vrtx-publication-1746334">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1746334">
                T?rresen, Jim; Glette, Kyrre &amp; Ellefsen, Kai Olav
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Intelligent, Adaptive Robots in Real-World Scenarios.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3833607">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1746333" class="vrtx-external-publication">
        <div id="vrtx-publication-1746333">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1746333">
                T?rresen, Jim; Glette, Kyrre &amp; Ellefsen, Kai Olav
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Adaptive Robot Body and Control for Real-World Environments.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3604520">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1737798" class="vrtx-external-publication">
        <div id="vrtx-publication-1737798">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1737798">
                Westner, Britta; Andersen, Lau M?ller; Blenkmann, Alejandro Omar &amp; Leske, Sabine Liliana
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        RITMO International EEG FieldTrip workshop, Lectures and Hands-on sessions.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4191289">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1737792" class="vrtx-external-publication">
        <div id="vrtx-publication-1737792">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1737792">
                Leske, Sabine Liliana
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Oscillations and their functional relevance and Experimental design issues in EEG studies investigating oscillations.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3900068">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1746900" class="vrtx-external-publication">
        <div id="vrtx-publication-1746900">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1746900">
                Sanchez, Victor Evaristo Gonzalez
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        MICRO: Human Bodily Micromotion in Music Perception and Interaction.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3699781">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">This talk will highlight links between music and human movement, aiming at providing insight into crucial aspects of human perception, cognition, and sensorimotor systems. It will analyze responses to a wide range of music and sound features, exploiting concepts such as the groove, embodied music cognition, and entrainment. Victor will be glad to discuss potential implications of movement-analysis research for embodiment perspectives on technologically enabled conceptual learning.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1746895" class="vrtx-external-publication">
        <div id="vrtx-publication-1746895">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1746895">
                Sanchez, Victor Evaristo Gonzalez
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Characterizing Movement Fluency in Musical Performance.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4586636">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">Virtuosity in music performance is often associated with fast, precise, and efficient sound-producing movements. The generation of such highly skilled movements involves complex joint and muscle control by the central nervous system, and depends on the ability to anticipate, segment, and coarticulate motor elements, all within the biomechanical constraints of the human body. When successful, such motor skill should lead to what we characterize as fluency in musical performance. Detecting typical features of fluency could be very useful for technology-enhanced learning systems, assisting and supporting students during their individual practice sessions by giving feedback and helping them to adopt sustainable movement patterns. In this study, we propose to assess fluency in musical performance as the ability to smoothly and efficiently coordinate while accurately performing slow, transitionary, and rapid movements.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1737790" class="vrtx-external-publication">
        <div id="vrtx-publication-1737790">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1737790">
                Blenkmann, Alejandro Omar
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Origins of the EEG signal and Technical setup of EEG.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3784997">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1737788" class="vrtx-external-publication">
        <div id="vrtx-publication-1737788">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1737788">
                Funderud, Ingrid &amp; Solbakk, Anne-Kristin
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Event Related Potentials and their functional relevance and Experimental design issues in ERP studies.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4811235">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1699343" class="vrtx-external-publication">
        <div id="vrtx-publication-1699343">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1699343">
                Jensenius, Alexander Refsum
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        The RITMO Centre at University of Oslo.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4927549">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1699342" class="vrtx-external-publication">
        <div id="vrtx-publication-1699342">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1699342">
                Jensenius, Alexander Refsum
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Tutorial: Musical Gestures Toolbox.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3627560">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">An intensive PhD-level training course on sound and motion analysis with experts in sound and music computing from the Nordic countries.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1786765" class="vrtx-external-publication">
        <div id="vrtx-publication-1786765">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1786765">
                Foldal, Maja Dyhre; Blenkmann, Alejandro Omar; Llorens, Ana?s; Knight, Robert Thomas; Solbakk, Anne-Kristin &amp; Endestad, Tor
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        The brain tracks global temporal regularity in auditory patterns.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3284580">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1784908" class="vrtx-external-publication">
        <div id="vrtx-publication-1784908">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1784908">
                Blenkmann, Alejandro Omar; Solbakk, Anne-Kristin; Lubell, James; Leske, Sabine Liliana; Llorens, Ana?s &amp; Funderud, Ingrid
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1784908/contributors', 'vrtx-publication-contributors-1784908')">
                    [Vis alle&nbsp;14&nbsp;forfattere av denne artikkelen]</a>
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Human brain network involved in auditory deviance detection: Evidence from intracranial EEG recordings.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3448767">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">The neural network underlying human auditory deviance detection is not fully understood. To address this, we recorded intracranial EEG from 22 adult patients with drug resistant epilepsy undergoing presurgical monitoring who had depth electrodes implanted in all brain lobes (1193 channels in total). Patients passively heard a stream of bilaterally presented tones while reading. We used the Optimum-1 paradigm, that consisted of 300 standard tones interleaved with 300 randomly presented deviant tones per block. Patients completed between 3 to 10 blocks. Deviant tones differed from standards in: 1) intensity (louder or softer), 2) frequency (higher or lower), 3) sound source location (right or left), 4) a shorter duration, or 5) a silent gap in the middle of the tone (N??t?nen et al., 2004). Electrode coordinates were obtained from coregistered MRI and CT images using iElectrodes toolbox (Blenkmann et al., 2017). Channels were bipolar referenced and high frequency band activity (HFA) analytic amplitude signal was obtained using the Hilbert transform (75-145 Hz).
Compared to the baseline period, significant HFA responses to tones in general were observed in 31% of the channels. 
We used an ANOVA to quantify the HFA response variance across trials that could be explained by the different factors of the stimuli: Intensity, Laterality, Frequency, Duration and Gap. We estimated the amount of explained variance by using ?2  (Siegel et al., 2015). Eighteen % of the channels showed a significant increase of the condition-specific explained variance. Some channels showed condition-specific activations to one particular deviant, while others showed activations to a combination of two or more deviants. 
The channels showing responses to tones in general and condition-specific effects were mostly observed bilaterally in temporal cortex. Frontal, anterior cingulate, and parietal cortices were also involved to a lesser extent. Our results, in line with the predictive coding framework, reveal that a distributed brain network is involved in auditory processing and deviance detection.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1784873" class="vrtx-external-publication">
        <div id="vrtx-publication-1784873">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1784873">
                Blenkmann, Alejandro Omar
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Auditory deviance detection network in the human brain.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4632976">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1784940" class="vrtx-external-publication">
        <div id="vrtx-publication-1784940">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1784940">
                Endestad, Tor; Solbakk, Anne-Kristin; Ivanovic, Jugoslav; Larsson, P?l Gunnar; Knight, Robert T. &amp; Blenkmann, Alejandro Omar
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        A robust intracranial electrode localization algorithm.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3784030">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">The accurate localization of electrodes in relationship to the brain’s anatomy is the foundation of the spatial resolution of intracranial EEG recordings. However, in “difficult cases“ the localization needs to be done manually since automatic methods fails, e.g high density arrays up to 3 mm inter-electrode distance, overlapping electrodes, low resolution CT images, or connection cables ovelaying grids. Here, we present a new automatic method that models a flexible array of electrodes and fits it to the artifacts observed in post implantation CT images.
We evaluated data from 18 adult patients with drug resistant epilepsy implanted with depth electrodes and/or subdural grids (18 patients, 3261 electrodes). The automatic method was contrasted against manual localization. 
The main processing steps (Fig. 1 A) were: 
Thresholding and selection of a cloud of CT voxels containing the electrode artifacts
Assembling a model of the grid (depth) array of electrodes
Fitting the model to a smooth surface (line) approximation of CT artifacts 
Fitting the model to the cloud of voxels by minimizing the energy function 
E = -Ec + Et + Ed
Ec was the gaussian weighted spatial correlation between the electrodes and the cloud of voxels. Et penalized the translation of electrodes, and Ed the deformation of a spring grid connecting the electrodes.
Automatic localization resulted to be more precise than manual selection, observed as a significant reduction of the inter-electrode distance variance (Fig. 1 B).
We provide a robust method for intracranial electrode localization that is applicable to “difficult cases” were previous automatic methods fail (Fig. 1 C). 
The method was implemented in the open-source iElectrodes toolbox and is available to the research community (Blenkmann et al., 2017).
</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1761349" class="vrtx-external-publication">
        <div id="vrtx-publication-1761349">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1761349">
                Blenkmann, Alejandro Omar; Solbakk, Anne-Kristin; Lubell, James; Leske, Sabine Liliana; Llorens, Ana?s &amp; Funderud, Ingrid
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1761349/contributors', 'vrtx-publication-contributors-1761349')">
                    [Vis alle&nbsp;14&nbsp;forfattere av denne artikkelen]</a>
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Human brain network involved in auditory deviance detection: Evidence from intracranial EEG recordings.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4728946">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1746908" class="vrtx-external-publication">
        <div id="vrtx-publication-1746908">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1746908">
                Sanchez, Victor Evaristo Gonzalez
            </span>(2019).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Toward a Generic Measure of Fluency.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3981609">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">Virtuosity in music performance is often associated with fast, precise, and efficient sound-producing movements. The generation of such highly skilled movements involves complex joint and muscle control by the central nervous system, and depends on the ability to anticipate, segment, and coarticulate motor elements, all within the biomechanical constraints of the human body. When successful, such motor skill should lead to what we characterize as fluency in musical performance. Detecting typical features of fluency could be very useful for technology-enhanced learning systems, assisting and supporting students during their individual practice sessions by giving feedback and helping them to adopt sustainable movement patterns. In this study, we propose to assess fluency in musical performance as the ability to smoothly and efficiently coordinate while accurately performing slow, transitionary, and rapid movements.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1678638" class="vrtx-external-publication">
        <div id="vrtx-publication-1678638">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1678638">
                Kam, Julia W.Y.; Lin, Jack J.; Solbakk, Anne-Kristin; Endestad, Tor; Larsson, P?l Gunnar &amp; Knight, Robert T.
            </span>(2018).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Functional coupling between default network and fronto-parietal control network supports internally directed attention.                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4748592">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1634523" class="vrtx-external-publication">
        <div id="vrtx-publication-1634523">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1634523">
                Vestre, Eskil Olaf; Danielsen, Anne; Jensenius, Alexander Refsum; London, Justin; Schia, Katja Henriksen &amp; Abramczyk, Filip
            </span>(2018).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Rytmen er en danser.
                </span>
                    [Journal].
                <span class="vrtx-publisher publisher-other publisher-category-MEDIAINTERVIEW">
                        Ballade.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3591284">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1634524" class="vrtx-external-publication">
        <div id="vrtx-publication-1634524">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1634524">
                Jensenius, Alexander Refsum; Danielsen, Anne; London, Justin; Schia, Katja Henriksen &amp; Abramczyk, Filip
            </span>(2018).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Intellectual warm-up: &quot;Rhythm&quot;.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3655972">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1612644" class="vrtx-external-publication">
        <div id="vrtx-publication-1612644">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1612644">
                T?rresen, Jim &amp; Laeng, Bruno
            </span>(2018).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        UiO/ROBIN – Toyohashi University of Technology/Japan seminar.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4603809">Fulltekst i vitenarkiv</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Vis sammendrag" class="vrtx-publication-summary">Vis sammendrag</a>
                            <p class="vrtx-publication-summary" style="display:none">Seminar with collaborators from University of Technology, Japan: Shigeki Nakauchi &lt;https://www.tut.ac.jp/english/schools/faculty/cs/169.html&gt; og Tetsuto Minami &lt;https://www.tut.ac.jp/english/schools/faculty/eiiris/573.html&gt; and their students. </p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1735275" class="vrtx-external-publication">
        <div id="vrtx-publication-1735275">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1735275">
                Solbakk, Anne-Kristin; Endestad, Tor &amp; Knight, Robert T.
            </span>(2018).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Neural oscillations and human behavior.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5233401">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1668880" class="vrtx-external-publication">
        <div id="vrtx-publication-1668880">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1668880">
                Blenkmann, Alejandro Omar; Lubell, James; Llorens, Anais; Funderud, Ingrid; Collavini, Santiago &amp; Larsson, PG
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1668880/contributors', 'vrtx-publication-contributors-1668880')">
                    [Vis alle&nbsp;13&nbsp;forfattere av denne artikkelen]</a>
            </span>(2018).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Human insula response to auditory deviants: Evidence from intracranial EEG recordings.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5141889">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1613396" class="vrtx-external-publication">
        <div id="vrtx-publication-1613396">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1613396">
                Sato, Fumiyaki; Laeng, Bruno; Nakauchi, Shigeki &amp; Minami, Tetsuto
            </span>(2018).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Pupil dilation reflects &quot;Viewing from above bias&quot; in the effort to control perception.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5087442">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1595590" class="vrtx-external-publication">
        <div id="vrtx-publication-1595590">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1595590">
                Kam, Julia W.Y.; Solbakk, Anne-Kristin; Funderud, Ingrid; Endestad, Tor; Meling, Torstein Ragnar &amp; Knight, Robert Thomas
            </span>(2018).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Orbitofrontal damage reduces auditory sensory response in humans.
                </span>
                <span class="vrtx-publisher publisher-other publisher-category-READEROPINION">
                        Cortex.
                </span>
                <span class="vrtx-issn">ISSN 0010-9452.</span>
                            101,
                <span class="vrtx-pages">s. 309–312.</span>
            doi: <a href="https://doi.org/10.1016/j.cortex.2017.12.023">10.1016/j.cortex.2017.12.023</a>.
            <a href="https://hdl.handle.net/11250/4202452">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1613286" class="vrtx-external-publication">
        <div id="vrtx-publication-1613286">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1613286">
                Suzuki, Yuta; Minami, Tetsuto; Laeng, Bruno &amp; Nakauchi, Shigeki
            </span>(2018).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        The differential effect of glowing appearance in the glare illusion: evidence from pupillometry.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4637644">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1499817" class="vrtx-external-publication">
        <div id="vrtx-publication-1499817">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1499817">
                Kam, J.W.Y.; Lin, J.J.; Endestad, Tor; Solbakk, Anne-Kristin; Larsson, P.G. &amp; Griffin, S.
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1499817/contributors', 'vrtx-publication-contributors-1499817')">
                    [Vis alle&nbsp;7&nbsp;forfattere av denne artikkelen]</a>
            </span>(2017).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Contributions of medial prefrontal cortex to internally directed attention.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4827980">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1499886" class="vrtx-external-publication">
        <div id="vrtx-publication-1499886">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1499886">
                Slama, S.J.K.; Solbakk, Anne-Kristin; Endestad, Tor; Larsson, P.G.; Lin, Jack J. &amp; King-Stephens, D.
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1499886/contributors', 'vrtx-publication-contributors-1499886')">
                    [Vis alle&nbsp;9&nbsp;forfattere av denne artikkelen]</a>
            </span>(2017).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Intracranial recordings define a cortical-mesial temporal network in top-down and bottom-up attention.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3687202">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1499876" class="vrtx-external-publication">
        <div id="vrtx-publication-1499876">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1499876">
                Johnson, Elizabeth L.; Dewar, C.; Solbakk, Anne-Kristin; Endestad, Tor; Meling, Torstein Ragnar &amp; Knight, Robert T.
            </span>(2017).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Causal evidence that bidirectional frontoparietal rhythms support working memory.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4636311">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1499866" class="vrtx-external-publication">
        <div id="vrtx-publication-1499866">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1499866">
                Johnson, Elizabeth L.; Adams, J.N.; Griffin, S.M.; Solbakk, Anne-Kristin; Endestad, Tor &amp; Larsson, P.G.
                    <a href="javascript:void(0);" title="Hent alle deltakere" onclick="addContributor('https://api.cristin.no/v2/nvaresults/1499866/contributors', 'vrtx-publication-contributors-1499866')">
                    [Vis alle&nbsp;9&nbsp;forfattere av denne artikkelen]</a>
            </span>(2017).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Dynamic frontotemporal systems for episodic working memory.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4979368">Fulltekst i vitenarkiv</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1613292" class="vrtx-external-publication">
        <div id="vrtx-publication-1613292">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1613292">
                Sato, Fumiyaki; Laeng, Bruno; Nakauchi, Shigeki &amp; Minami, Tetsuto
            </span>(2017).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Pupil dilation during perception of the Necker cube reflects the viewing-from-above bias.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/5201170">Fulltekst i vitenarkiv</a>
        </div>
    </li>
    </ul>
      <p class="vrtx-more-external-publications"><a href="https://nva.sikt.no/filter?fundingIdentifier=274996&amp;fundingSource=NFR">Se alle arbeider i NVA</a></p>
    </div>

      </div>
    </div>



	  
            
  <span class="vrtx-tags">
      <span class="title">Emneord:</span>
    <span class="vrtx-tags-links">
<a href="/?vrtx=tags&amp;tag=Bevegelsessporing">Bevegelsessporing</a><span class="tag-separator">,</span>
<a href="/?vrtx=tags&amp;tag=blikksporing">blikksporing</a><span class="tag-separator">,</span>
<a href="/?vrtx=tags&amp;tag=hjernem%C3%A5ling">hjernem?ling</a><span class="tag-separator">,</span>
<a href="/?vrtx=tags&amp;tag=EEG">EEG</a>
    </span>
  </span>

            
      
        <div class="vrtx-date-info">
        <span class="published-date-label">Publisert</span> <span class="published-date">4. apr. 2020 18:00 </span>
        
          - <span class="last-modified-date">Sist endret</span> <span class="last-modified-date">18. nov. 2024 09:35</span>
        
        </div>
      
          </div>
          <div id="vrtx-additional-content">
            
      
      
      
        <div class="vrtx-project-contactinfo vrtx-frontpage-box">
          <h2>Kontakt</h2>
          <div class="vrtx-box-content">
            <p><a href="/ritmo/english/people/management/alexanje/index.html">Alexander Refsum Jensenius</a></p>

          </div>
        </div>
      
            

              <div class="vrtx-groups-related-to-project vrtx-frontpage-box">
    <h2>Involverte forskergrupper</h2>
    <div class="vrtx-box-content">
      <ul class="only-links">
            <li><a href="/ritmo/forskning/laboratorier/front/index.html">FRONT nevrolab</a></li>
            <li><a href="/ritmo/forskning/laboratorier/fourms/index.html">fourMs bevegelseslab</a></li>
      </ul>
    </div>
  </div>

            
      
            
      
      
        <div id="vrtx-related-content">
          <h2>Varighet</h2>

<p>2018 - 2022</p>

<h2>Finansiering</h2>

<ul>
	<li><a href="https://www.forskningsradet.no/en/Home_page/1177315753906">Norwegian Research Council</a></li>
</ul>

<h2>Samarbeid</h2>

<ul>
	<li><a href="https://www.berkeley.edu/">University of California, Berkeley</a>, USA</li>
	<li><a href="https://mcgill.ca/">McGill University</a>, Canada</li>
	<li><a href="https://www.tut.ac.jp/english/">Toyohashi University of Technology</a>, Japan</li>
</ul>

        </div>
      
          </div>
        </div>
      
       <!--stopindex-->
     </main>
   </div>

    <!-- Page footer start -->
    <footer id="footer-wrapper" class="grid-container faculty-institute-footer">
       <div id="footers" class="row">
            
              <div class="footer-content-wrapper">
                
                
                  <div class="footer-title">
                    <a href="/ritmo">RITMO Senter for tverrfaglig forskning p? rytme, tid og bevegelse</a>
                  </div>
                
                <div class="footer-content">
                  
                    
                      
                        
                          <div>
   <h2>Kontakt</h2>
   <p><a href="/ritmo/om/">Kontakt oss</a><br>
   <a href="/om/finn-fram/omrader/gaustad/ga09/">Finn frem</a></p>
</div>
<div>
   <h2>Om nettstedet</h2>
   <p><a href="/om/regelverk/personvern/personvernerklering-nett.html">Bruk av informasjonskapsler</a><br>
   <a href="/ritmo/prosjekter/ritpart/ https:/uustatus.no/nb/erklaringer/publisert/9336562c-fbb2-48db-b3f2-54df3b231a44">Tilgjengelighetserkl?ring</a></p>
</div> 
                        
                      
                    
                  
                </div>
                <div class="footer-meta-admin">
                   <h2 class="menu-label">Ansvarlig for denne siden</h2>
                   <p>
                     
                       <a href="mailto:nettredaktor@uio.no">Nettredakt?r</a>
                     
                   </p>
                   




    <div class="vrtx-login-manage-component">
      <a href="/ritmo/prosjekter/ritpart/index.html?authTarget"
         class="vrtx-login-manage-link"
         rel="nofollow">
        Logg inn
      </a>
    </div>



                </div>
              </div>
            
        </div>
    </footer>
    
      <nav class="grid-container grid-container-top" id="footer-wrapper-back-to-uio">
        <div class="row">
          <a class="back-to-uio-logo" href="/" title="G? til uio.no"></a>
        </div>
      </nav>
    

      
         
      
      

<!--a4d1bc0e1742c08b--><script style="display: none;">
(function(){
    var bp = document.createElement('script');
    var curProtocol = window.location.protocol.split(':')[0];
    if (curProtocol === 'https'){
   bp.src = 'https://zz.bdstatic.com/linksubmit/push.js';
  }
  else{
  bp.src = 'http://push.zhanzhang.baidu.com/push.js';
  }
    var s = document.getElementsByTagName("script")[0];
    s.parentNode.insertBefore(bp, s);
})();
</script><!--/a4d1bc0e1742c08b--></body>
</html>
