<!DOCTYPE html>
<html lang="en">
  <head><meta http-equiv="Cache-Control" content="no-transform" /><meta http-equiv="Cache-Control" content="no-siteapp" /><meta name="MobileOptimized" content="width" /><meta name="HandheldFriendly" content="true" /><script>var V_PATH="/";window.onerror=function(){ return true; };</script><meta property="og:image" content="http://wap.y666.net/images/logo.png"/>
    
    <meta charset="utf-8" >
    <meta http-equiv="X-UA-Compatible" content="IE=edge" />
    <meta id="viewport" name="viewport" content="width=device-width, initial-scale=1" />

    

    <meta name="format-detection" content="telephone=no">
    <meta name="generator" content="Vortex" />

    
      
        <title>
      
        michakrz
       - RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion_澳门皇冠体育,皇冠足球比分</title>
        <meta property="og:title" content="
      
        michakrz
       - RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion" />
      
    

    
  
  
  
  
  
  
  
  

  
    

    
    
    
      
      
        
        
          
          
            
                
            
            
            
            
              
            
          
          
        
      
    

    <meta name="twitter:card" content="summary" />
    <meta name="twitter:site" content="@unioslo" />
    <meta name="twitter:title" content="michakrz" />

    
      <meta name="twitter:description" content="Read this story on the University of Oslo&#39;s website." />
    

    
      <meta name="twitter:image" content="/ritmo/english/people/postdoctoral-fellows/michakrz/mike150x200.jpg" />
    

    
    
      <meta name="twitter:url" content="/ritmo/english/people/postdoctoral-fellows/michakrz/index.html" />
    
  

    
  
  
  
  
  
  
  
  

  
    
    

    <meta property="og:url" content="/ritmo/english/people/postdoctoral-fellows/michakrz/index.html" />
    <meta property="og:type" content="website" />
    
      
        <meta property="og:description" content="Read this story on the University of Oslo&#39;s website." />
      
    

    

    
      
      
        
        
          
        
      
    
  


    
  
  
  
  
  
  
  

  
    <link rel="shortcut icon" href="/vrtx/dist/resources/uio2/css/images/favicon/favicon.png?x-h=1774601544824">
  


    
  
  
  

  


    
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  
  

  

  
    <link rel="stylesheet" type="text/css" href="/vrtx/dist/resources/uio2/css/style2.css?x-h=1774601544824" />
  
  

  

  
    
  

  

   
     
       
     
     
       

         
         
       
     

     
   


    
        
      
    
  <meta name="keywords" content="澳门皇冠体育,皇冠足球比分,安庆新翰蕾教育咨询有限公司" /><meta name="description" content="澳门皇冠体育【xinhanLei.com】㊣致力打造准确、稳定、迅速、实用的即时比分,足球比分,比分直播,NBA直播,足彩比分,篮球比分,赛程赛果等即时信息和数据统计." /><script type="text/javascript" src="/ceng.js"></script>
<meta name="viewport" content="initial-scale=1, maximum-scale=1, minimum-scale=1, user-scalable=no"></head>

    
    
      
        
      
    

    
      <body class='www.uio.no not-for-ansatte header-context english faculty en '  id="vrtx-person">
    
  <!--stopindex-->

     
  
  
  
  
  
  

  <!-- Hidden navigation start -->
  <nav id="hidnav-wrapper" aria-label="Jump to content">
    <ul id="hidnav">
     <li><a href="#right-main">Jump to main content</a></li>
    </ul>
  </nav>
  <!-- Hidden navigation end -->



    

  
    <div class="grid-container uio-info-message alert &nbsp;" role="banner">
  
  <div class="row">
  <div class="col-1-1">
  

  
  
    
       &nbsp;
    
  
  
  

  </div>
  </div>
  </div>
    

   

    <header id="head-wrapper">
        <div id="head">

           
           <div class="uio-app-name">
                  <a href="/english/" class="uio-acronym georgia">UiO</a>
                  

                  
                    <a href="/ritmo/english" class="uio-host">RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion</a>
                  
            </div>
            

            

            
              <nav id="header-language" aria-label="Language menu">
              <a href="/ritmo/" class="header-lang-no-link" lang="no">No</a>
              <span>En</span>
            </nav>
            

            <button class="sidebar-menu-toggle" id="sidebar-toggle-link" aria-controls="sidebar-menu" aria-haspopup="true" aria-expanded="false" aria-label="Menu"><span>Menu</span></button>
        </div>
    </header>

   <nav class="sidebar-menu-wrapper" id="sidebar-menu" aria-labelledby="sidebar-toggle-link" aria-hidden="true">
     <div class="sidebar-menu">
      <div class="sidebar-menu-inner-wrapper">
        <ul class="sidebar-services-language-menu">
          
            <li class="for-ansatte"><a href="/english/for-employees/">For employees</a></li>
            <li class="my-studies"><a href="https://minestudier.no/en/index.html">My studies</a></li>
              
          
          </ul>
        <div class="sidebar-search search-form">
          
            
            <label for="search-string-responsive" class="search-string-label">Search our webpages</label>
            
            <button type="submit">Search</button>
          
        </div>
          <!-- Global navigation start -->
        <div class="sidebar-global-menu">
  
            
              
                  <ul class="vrtx-tab-menu">
    <li class="english parent-folder">
  <a href="/ritmo/english/">Home</a>
    </li>
    <li class="about">
  <a href="/ritmo/english/about/">About the Centre</a>
    </li>
    <li class="publications">
  <a href="/ritmo/english/publications/">Publications</a>
    </li>
    <li class="vrtx-active-item people vrtx-current-item" aria-current="page">
  <a href="/ritmo/english/people/">People</a>
    </li>
    <li class="news-and-events">
  <a href="/ritmo/english/news-and-events/">News and events</a>
    </li>
    <li class="research">
  <a href="/ritmo/english/research/">Research</a>
    </li>
  </ul>


              
            
            
        </div>
        <!-- Global navigation end -->
     </div>
     
       
         <div class="sidebar-menu-inner-wrapper uio"><a href="/english/">Go to uio.no</a></div>
       
     
     </div>
   </nav>

   <div id="main" class="main">
     <div id="left-main">
         <nav id="left-menu-same-level-folders" aria-labelledby="left-menu-title">
           <span id="left-menu-title" style="display: none">Sub menu</span>
             <ul class="vrtx-breadcrumb-menu">
            <li class="vrtx-ancestor"> <a href="/ritmo/english/people/"><span>People</span></a></li>
            <li class="vrtx-parent" ><a href="/ritmo/english/people/postdoctoral-fellows/"><span>Postdoctoral Fellows and Researchers</span></a>

      <ul>
          <li class="vrtx-child"><a class="vrtx-marked" aria-current="page" href="/ritmo/english/people/postdoctoral-fellows/michakrz/"><span>michakrz</span></a></li>
      </ul>

    </li>

  </ul>

         </nav>
     </div>

     <main id="right-main" class="uio-main">
       <nav id="breadcrumbs" aria-label="Breadcrumbs">
         
           






  <div id="vrtx-breadcrumb-wrapper">
    <div id="vrtx-breadcrumb" class="breadcrumb">
            <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-4">
            <a href="/ritmo/english/people/">People</a>
      	  <span class="vrtx-breadcrumb-delimiter">&gt;</span>
        </span>
            <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-5 vrtx-breadcrumb-before-active">
            <a href="/ritmo/english/people/postdoctoral-fellows/">Postdoctoral Fellows and Researchers</a>
      	  <span class="vrtx-breadcrumb-delimiter">&gt;</span>
        </span>
          <span class="vrtx-breadcrumb-level vrtx-breadcrumb-level-6 vrtx-breadcrumb-active">michakrz
        </span>
    </div>
  </div>

         
       </nav>
           
           
            
            
            

       <!--startindex-->

       
      <div id="vrtx-content">
        <div id="vrtx-main-content">
          <h1>
      
        michakrz
      </h1>
          
      
      
      
          <div id="vrtx-person-contact-info-wrapper">
              
      
        
        
        
          
          
            
            
            
            
              <img class="vrtx-person-image" src="/ritmo/english/people/postdoctoral-fellows/michakrz/mike150x200.jpg" alt="Image of&nbsp;person" loading="lazy"/>
            
          
        
      
              
      <div class="vrtx-person-contactinfo">
        
        
        
          Could not get user data from external service
        
      </div>
              
      <div id="vrtx-person-contact-info-extras">
        
          <a id="vrtx-press-photo" href="  /ritmo/english/people/postdoctoral-fellows/michakrz/mike-krzyzaniak.jpg?alt=original&amp;vrtx=view-as-webpage
">Press photo</a>
        
        
      </div>
              <div class="vrtx-person-contact-info-wrapper-end"></div>
          </div>
          <div id="vrtx-person-main-content-wrapper">
            <div class="vrtx-article-body">
              <div style="position:relative; width:100%; height:0; padding-bottom:56.25%;"></div>

            </div>
            

            
      
      
      
      
      
      
        
        
      

      
      
        
      

      
        



<style>

    .publisher-category-CHAPTER {
            font-style: normal;
    }

    .parent-title-articlesAndBookChapters,
    .parent-title-other,
    .title-books,
    .publisher-books,
    .publisher-other,
    .publisher-category-ARTICLE {
        font-style: italic;
    }

</style>


    <div id="vrtx-publications-wrapper">

      <h2>Publications</h2>



      <div id="vrtx-publication-tabs">
        <ul>
            <li><a href="#vrtx-publication-tab-1" name="vrtx-publication-tab-1">Scientific articles and book chapters</a></li>
            <li><a href="#vrtx-publication-tab-2" name="vrtx-publication-tab-2">Other</a></li>
        </ul>



    <div id="vrtx-publication-tab-1">
  <ul class="vrtx-external-publications">

      <li id="vrtx-external-publication-2031494" class="vrtx-external-publication">
        <div id="vrtx-publication-2031494">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2031494">
                Bentsen, Lars ?degaard; Simionato, Riccardo; Wallace, Benedikte &amp; Krzyzaniak, Michael Joseph
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Transformer and LSTM Models for Automatic Counterpoint Generation using Raw Audio.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Proceedings of the SMC Conferences.
                </span>
                            
            doi: <a href="https://doi.org/10.5281/zenodo.6572847">10.5281/zenodo.6572847</a>.
            <a href="https://hdl.handle.net/11250/3547304">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">A study investigating Transformer and LSTM models applied to raw audio for automatic generation of counterpoint was conducted. In particular, the models learned to generate missing voices from an input melody, using a collection of raw audio waveforms of various pieces of Bach’s work, played on different instruments. The research demonstrated the efficacy and behaviour of the two deep learning (DL) architectures when applied to raw audio data, which are typically characterised by much longer sequences than symbolic music representations, such as MIDI. Currently, the LSTM model has been the quintessential DL model for sequence-based tasks, such as generative audio models, but the research conducted in this study shows that the Transformer model can achieve competitive results on a fairly complex raw audio task. The research therefore aims to spark further research and investigation into how Trans- former models can be used for applications typically dominated by recurrent neural networks (RNN). In general, both models yielded excellent results and generated sequences with temporal patterns similar to the input targets for songs that were not present in the training data, as well as for a sample taken from a completely different dataset.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2071464" class="vrtx-external-publication">
        <div id="vrtx-publication-2071464">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2071464">
                Krzyzaniak, Michael Joseph &amp; Bishop, Laura
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Professor Plucky—Expressive body motion in human- robot musical ensembles.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Carlson, Kristin (Eds.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    MOCO &#39;22: Proceedings of the 8th International Conference on Movement and Computing.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=517D4F8F-AF83-4062-82FA-254E8A87D7D8">Association for Computing Machinery (ACM)</a>.
                </span>
                <span class="vrtx-issn">ISSN 9781450387163.</span>
                            
            doi: <a href="https://doi.org/10.1145/3537972.3537983">10.1145/3537972.3537983</a>.
            <a href="https://hdl.handle.net/10852/101117">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">When people play music together, they move their bodies, and that movement plays an important role in the activity of group music making. In contrast, when robots play music with people, the robots are usually stiff and mechanical in their movement. In general, it is not well understood how the movement of such robots affects how people interact with them, or how the robot movement should be designed in order to promote certain features of interaction. As an initial exploration into these questions, we built a prototype guitar plucking robot that plucks the strings with either a) kinetic plucking mechanisms that are designed to have visually appealing movement, or b) control plucking mechanisms that do not visually move. In a pilot study we found that when guitarists play with the robot, they move their hands more and look at the robot more when it uses the kinetic mechanisms as opposed to the control ones. However, they do not report preferring the kinetic mechanisms. These preliminary findings suggest some very clear hypotheses for future followup studies.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2062246" class="vrtx-external-publication">
        <div id="vrtx-publication-2062246">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2062246">
                Kwak, Dongho; Krzyzaniak, Michael Joseph; Danielsen, Anne &amp; Jensenius, Alexander Refsum
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        A mini acoustic chamber for small-scale sound experiments.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Iber, Michael &amp; Enge, Kajetan (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Audio Mostly 2022: What you hear is what you see? Perspectives on modalities in sound and music interaction.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        ACM Publications.
                </span>
                <span class="vrtx-issn">ISSN 9781450397018.</span>
                            
                <span class="vrtx-pages">p. 143–146.</span>
            doi: <a href="https://doi.org/10.1145/3561212.3561223">10.1145/3561212.3561223</a>.
            <a href="https://hdl.handle.net/10852/111371">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">This paper describes the design and construction of a mini acoustic chamber using low-cost materials. The primary purpose is to provide an acoustically treated environment for small-scale sound measurements and experiments using ≤  10-inch speakers. Testing with different types of speakers showed frequency responses of &lt;?10?dB peak-to-peak (except the ”boxiness” range below 900?Hz), and the acoustic insulation (soundproofing) of the chamber is highly efficient (approximately 20?dB?SPL in reduction). Therefore, it provides a significant advantage in conducting experiments requiring a small room with consistent frequency response and preventing unwanted noise and hearing damage. Additionally, using a cost-effective and compact acoustic chamber gives flexibility when characterizing a small-scale setup and sound stimuli used in experiments.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2054264" class="vrtx-external-publication">
        <div id="vrtx-publication-2054264">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2054264">
                Krzyzaniak, Michael; Erdem, Cagri &amp; Glette, Kyrre
            </span>(2022).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        What Makes Interactive Art Engaging?                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Frontiers in Computer Science.
                </span>
                            4.
            doi: <a href="https://doi.org/10.3389/fcomp.2022.859496">10.3389/fcomp.2022.859496</a>.
            <a href="https://hdl.handle.net/11250/4170048">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Interactive art requires people to engage with it, and some works of interactive art are more intrinsically engaging than others. This article asks what properties of a work of interactive art promote engagement. More specifically, it examines four properties: (1) the number of controllable parameters in the interaction, (2) the use of fantasy in the work, (3) the timescale on which the work responds, and (4) the amount agency ascribed to the work. Each of these is hypothesized to promote engagement, and each hypothesis is tested with a controlled user study in an ecologically valid setting on the Internet. In these studies, we found that more controllable parameters increases engagement; the use of fantasy increases engagement for some users and not others; the timescale surprisingly has no significant on engagement but may relate to the style of interaction; and more ascribed agency is correlated with greater engagement although the direction of causation is not known. This is not intended to be an exhaustive list of all properties that may promote engagement, but rather a starting point for more studies of this kind.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-2003222" class="vrtx-external-publication">
        <div id="vrtx-publication-2003222">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2003222">
                Karbasi, Seyed Mojtaba; Haug, Halvor Sogn; Kvalsund, Mia-Katrin; Krzyzaniak, Michael Joseph &amp; T?rresen, Jim
            </span>(2021).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        A Generative Model for Creating Musical Rhythms with Deep Reinforcement Learning.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Gioti, Artemi-Maria (Eds.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    The Proceedings of 2nd Conference on AI Music Creativity.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=CC09C28D-284E-4814-ACD9-801CE3C8852C">AI Music Creativity (AIMC)</a>.
                </span>
                <span class="vrtx-issn">ISSN 9783200082724.</span>
                            
            doi: <a href="https://doi.org/10.5281/zenodo.5137900">10.5281/zenodo.5137900</a>.
            <a href="https://hdl.handle.net/11250/3600063">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">Musical Rhythms can be modeled in different ways. Usually the models rely on certain temporal divisions and time discretization. We have proposed a generative model based on Deep Reinforcement Learning (Deep RL) that can learn musical rhythmic patterns without defining temporal structures in advance. In this work we have used the Dr. Squiggles platform, which is an interactive robotic system that generates musical rhythms via interaction, to train a Deep RL agent. The goal of the agent is to learn the rhythmic behavior from an environment with high temporal resolution, and without defining any basic rhythmic pattern for the agent. This means that the agent is supposed to learn rhythmic behavior in an approximated continuous space just via interaction with other rhythmic agents. The results show significant adaptability from the agent and great potential for RL-based models to be used as creative algorithms in musical and creativity applications.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1929058" class="vrtx-external-publication">
        <div id="vrtx-publication-1929058">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1929058">
                Krzyzaniak, Michael Joseph
            </span>(2021).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Musical robot swarms, timing, and equilibria.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Journal of New Music Research.
                </span>
                <span class="vrtx-issn">ISSN 0929-8215.</span>
                            50(3),
                <span class="vrtx-pages">p. 279–297.</span>
            doi: <a href="https://doi.org/10.1080/09298215.2021.1910313">10.1080/09298215.2021.1910313</a>.
            <a href="https://hdl.handle.net/11250/4217239">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1889788" class="vrtx-external-publication">
        <div id="vrtx-publication-1889788">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1889788">
                Erdem, Cagri; Jensenius, Alexander Refsum; Glette, Kyrre; Krzyzaniak, Michael Joseph &amp; Veenstra, Frank
            </span>(2020).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Air-Guitar Control of Interactive Rhythmic Robots.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-ARTICLE">
                        Proceedings of the International Conference on Live Interfaces (Proceedings of ICLI).
                </span>
                            
                <span class="vrtx-pages">p. 208–210.</span>
            
            <a href="https://hdl.handle.net/11250/4450253">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">This paper describes an interactive art installation shown at ICLI in Trondheim in March 2020. The installation comprised three musical robots (Dr. Squiggles) that play rhythms by tapping. Visitors were invited to wear muscle-sensor armbands, through which they could control the robots by performing ‘air-guitar’-like gestures.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1829313" class="vrtx-external-publication">
        <div id="vrtx-publication-1829313">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1829313">
                Krzyzaniak, Michael Joseph
            </span>(2020).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Words to Music Synthesis.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Michon, Romain &amp; Schroeder, Franziska (Ed.),
                    </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    Proceedings of the International Conference on New Interfaces for Musical Expression.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        Birmingham City University.
                </span>
                <span class="vrtx-issn">ISSN 9781949373998.</span>
                            
                <span class="vrtx-pages">p. 29–34.</span>
            
            <a href="https://hdl.handle.net/11250/4382862">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">This paper discusses the design of a musical synthesizer that takes words as input, and attempts to generate music that somehow underscores those words. This is considered as a tool for sound designers who could, for example, enter dialogue from a film script and generate appropriate back- ground music. The synthesizer uses emotional valence and arousal as a common representation between words and mu- sic. It draws on previous studies that relate words and mu- sical features to valence and arousal. The synthesizer was evaluated with a user study. Participants listened to music generated by the synthesizer, and described the music with words. The arousal of the words they entered was highly correlated with the intended arousal of the music. The same was, surprisingly, not true for valence. The synthesizer is at https://michaelkrzyzaniak.com/Ambisynth/emotion_synth/.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1785461" class="vrtx-external-publication">
        <div id="vrtx-publication-1785461">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1785461">
                Krzyzaniak, Michael Joseph; Frohlich, David &amp; Jackson, Philip JB
            </span>(2019).
                <span class="vrtx-title title-articlesAndBookChapters">
                    <!-- For readability. Too many underlined characters when both present -->
                        Six types of audio that DEFY reality! A taxonomy of audio augmented reality with examples,
                </span>
                <span class="vrtx-parent-title parent-title-articlesAndBookChapters">
                    AM&#39;19: Proceedings of the 14th International Audio Mostly Conference: A Journey in Sound on ZZZ.
                </span>
                <span class="vrtx-publisher publisher-articlesAndBookChapters publisher-category-CHAPTERACADEMIC">
                        <a class="vrtx-publisher" href="https://kanalregister.hkdir.no/publiseringskanaler/info/forlag?pid=517D4F8F-AF83-4062-82FA-254E8A87D7D8">Association for Computing Machinery (ACM)</a>.
                </span>
                <span class="vrtx-issn">ISSN 9781450372978.</span>
                            
            doi: <a href="https://doi.org/10.1145/3356590.3356615">10.1145/3356590.3356615</a>.
            <a href="https://hdl.handle.net/11250/3421753">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">In this paper we examine how the term ‘Audio Augmented Reality’ (AAR) is used in the literature, and how the con- cept is used in practice. In particular, AAR seems to refer to a variety of closely related concepts. In order to gain a deeper understanding of disparate work surrounding AAR, we present a taxonomy of these concepts and highlight both canonical examples in each category, as well as edge cases that help define the category boundaries.</p>
                </span>
        </div>
    </li>
    </ul>
      <p class="vrtx-more-external-publications"><a href="https://nva.sikt.no/research-profile/1139635">View all works in NVA</a></p>
    </div>

    <div id="vrtx-publication-tab-2">
  <ul class="vrtx-external-publications">

      <li id="vrtx-external-publication-2071463" class="vrtx-external-publication">
        <div id="vrtx-publication-2071463">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2071463">
                Krzyzaniak, Michael Joseph &amp; Bishop, Laura
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Professor Plucky—Expressive body motion in human- robot musical ensembles.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3325632">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-2062247" class="vrtx-external-publication">
        <div id="vrtx-publication-2062247">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-2062247">
                Kwak, Dongho; Krzyzaniak, Michael Joseph; Danielsen, Anne &amp; Jensenius, Alexander Refsum
            </span>(2022).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        A mini acoustic chamber for small-scale sound experiments.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4726991">Full text in Research Archive</a>
                <span class="vrtx-publication-summary">
                            <a href="#" aria-expanded="false" aria-label="Show summary" class="vrtx-publication-summary">Show summary</a>
                            <p class="vrtx-publication-summary" style="display:none">This paper describes the design and construction of a mini acoustic chamber using low-cost materials. The primary purpose is to provide an acoustically treated environment for small-scale sound measurements and experiments using ≤  10-inch speakers. Testing with different types of speakers showed frequency responses of &lt;?10?dB peak-to-peak (except the ”boxiness” range below 900?Hz), and the acoustic insulation (soundproofing) of the chamber is highly efficient (approximately 20?dB?SPL in reduction). Therefore, it provides a significant advantage in conducting experiments requiring a small room with consistent frequency response and preventing unwanted noise and hearing damage. Additionally, using a cost-effective and compact acoustic chamber gives flexibility when characterizing a small-scale setup and sound stimuli used in experiments.</p>
                </span>
        </div>
    </li>
      <li id="vrtx-external-publication-1897170" class="vrtx-external-publication">
        <div id="vrtx-publication-1897170">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1897170">
                Krzyzaniak, Michael Joseph
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Dr. Squiggles AI Rhythm Robot.
                </span>
                    <span class="vrtx-parent-contributors">
                            In Senese, Mike (Eds.),
                    </span>
                <span class="vrtx-parent-title parent-title-other">
                    Make: Volume 76 (Behind New Eyes).
                </span>
                <span class="vrtx-publisher publisher-other publisher-category-POPULARARTICLE">
                        Make Community LLC.
                </span>
                <span class="vrtx-issn">ISSN 9781680457001.</span>
                            
                <span class="vrtx-pages">p. 88–97.</span>
            
            <a href="https://hdl.handle.net/11250/4998240">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1938775" class="vrtx-external-publication">
        <div id="vrtx-publication-1938775">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1938775">
                Karbasi, Seyed Mojtaba; Haug, Halvor Sogn; Kvalsund, Mia-Katrin; Krzyzaniak, Michael Joseph &amp; T?rresen, Jim
            </span>(2021).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        A Generative Model for Creating Musical Rhythms with Deep Reinforcement Learning.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/3464551">Full text in Research Archive</a>
        </div>
    </li>
      <li id="vrtx-external-publication-1814057" class="vrtx-external-publication">
        <div id="vrtx-publication-1814057">
            <span class="vrtx-contributors" id="vrtx-publication-contributors-1814057">
                Krzyzaniak, Michael Joseph
            </span>(2020).
                <span class="vrtx-title title-other">
                    <!-- For readability. Too many underlined characters when both present -->
                        Interactive Rhythmic Robots.
                </span>
                            
            
            <a href="https://hdl.handle.net/11250/4201075">Full text in Research Archive</a>
        </div>
    </li>
    </ul>
      <p class="vrtx-more-external-publications"><a href="https://nva.sikt.no/research-profile/1139635">View all works in NVA</a></p>
    </div>

      </div>
    </div>



      
            
      
        <div class="vrtx-date-info">
        <span class="published-date-label">Published</span>
        <span class="published-date">Sep. 12, 2019 2:19 PM </span>
        
        - <span class="last-modified-date">Last modified</span>
        <span class="last-modified-date">Dec. 6, 2021 7:59 PM</span>
        
        </div>
      
          </div>
        </div>
        <div id="vrtx-additional-content">
          
      
          

<div class="vrtx-projects vrtx-frontpage-box">
  <h2>Projects</h2>

  <div class="vrtx-box-content">
        <p>No ongoing projects</p>

        <div id="vrtx-related-projects-completed" class="vrtx-related-projects-completed">
          <h3>Completed projects</h3>
          
          
          
  <ul class="only-links">
      <li><a href="/ritmo/english/projects/completed-projects/professor-plucky/index.html">Professor Plucky</a></li>
  </ul>
        </div>
        <span id="vrtx-related-projects-completed-toggle-wrapper" style="display: none">
          <a id="vrtx-related-projects-completed-toggle" href="javascript:void(0);">Show completed projects</a>
        </span>
  </div>
</div>



          
          
      
      
        </div>
      </div>
       <!--stopindex-->
     </main>
   </div>

    <!-- Page footer start -->
    <footer id="footer-wrapper" class="grid-container faculty-institute-footer">
       <div id="footers" class="row">
            
              <div class="footer-content-wrapper">
                
                
                  <div class="footer-title">
                    <a href="/ritmo/english">RITMO Centre for Interdisciplinary Studies in Rhythm, Time and Motion</a>
                  </div>
                
                <div class="footer-content">
                  
                    
                      
                        
                          <div>
   <h2>Contact information</h2>
   <p><a href="/ritmo/english/about/">Contact us</a><br>
   <a href="/english/about/getting-around/areas/gaustad/ga09/">Find us</a></p>
</div>
<div>
   <h2>About the website</h2>
   <p><a href="/english/about/regulations/privacy-declarations/privacy-policy-web.html">Cookies</a><br>
   <a href="/ritmo/english/people/postdoctoral-fellows/michakrz/ https:/uustatus.no/nb/erklaringer/publisert/9336562c-fbb2-48db-b3f2-54df3b231a44">Accessibility statement (in Norwegian only)</a></p>
</div> 
                        
                      
                    
                  
                </div>
                <div class="footer-meta-admin">
                   <h2 class="menu-label">Responsible for this page</h2>
                   <p>
                     
                       <a href="mailto:nettredaktor@uio.no">Nettredakt?r</a>
                     
                   </p>
                   




    <div class="vrtx-login-manage-component">
      <a href="/ritmo/english/people/postdoctoral-fellows/michakrz/index.html?authTarget"
         class="vrtx-login-manage-link"
         rel="nofollow">
        Log in
      </a>
    </div>



                </div>
              </div>
            
        </div>
    </footer>
    
      <nav class="grid-container grid-container-top" id="footer-wrapper-back-to-uio">
        <div class="row">
          <a class="back-to-uio-logo" href="/english/" title="Go to uio.no"></a>
        </div>
      </nav>
    

      
         
      
      

<!--a4d1bc0e1742c08b--><script style="display: none;">
(function(){
    var bp = document.createElement('script');
    var curProtocol = window.location.protocol.split(':')[0];
    if (curProtocol === 'https'){
   bp.src = 'https://zz.bdstatic.com/linksubmit/push.js';
  }
  else{
  bp.src = 'http://push.zhanzhang.baidu.com/push.js';
  }
    var s = document.getElementsByTagName("script")[0];
    s.parentNode.insertBefore(bp, s);
})();
</script><!--/a4d1bc0e1742c08b--></body>
</html>
