@inproceedings{oai:u-fukui.repo.nii.ac.jp:00022902, author = {CHEN, Shuai and MAEDA, Yoichiro and TAKAHASHI, Yasutake}, book = {Proceedings of The 6th International Conference on Soft Computing and Intelligent Systems, and The 13th International Symposium on Advanced Intelligent Systems}, month = {Nov}, note = {In the research of interactive music generation, we propose a music generation method, that the computer generates the music automatically, and then the music will be arranged under the human music conductor's gestures, before it outputs to us. In this research, the generated music is processed from chaotic sound, which is generated from the network of chaotic elements in realtime. The music conductor's hand motions are detected by Microsoft Kinect in this system. Music theories are embedded in the algorithm, as a result, the generated music will be richer. Furthermore, we constructed the music generation system and performed the experiment for generating the music composed by human beings., Kobe Convention Center (Kobe Portopia Hotel), Japan, November 20-24, 2012}, pages = {840--845}, publisher = {IEEE}, title = {Music Conductor Gesture Recognized Interactive Music Generation System}, volume = {2012}, year = {2012} }