{
  "summary": {
    "total_publications": 103,
    "years": {
      "2009": 6,
      "2008": 4,
      "2007": 2,
      "2006": 6,
      "2005": 1,
      "2004": 3,
      "2003": 1,
      "2001": 1,
      "2000": 4,
      "1999": 8,
      "1998": 7,
      "1997": 1,
      "1996": 2,
      "1995": 2,
      "1994": 2,
      "1993": 3,
      "1992": 1,
      "1991": 1,
      "1990": 1,
      "1989": 3,
      "1984": 2,
      "1978": 1,
      "2010": 6,
      "1979": 1,
      "2016": 7,
      "2015": 7,
      "2014": 3,
      "2013": 5,
      "2012": 4,
      "2011": 6,
      "2017": 1,
      "1975": 1
    },
    "venues": {
      "Unknown": 16,
      "NIME 2009": 3,
      "SMC 2009": 1,
      "ICMC 2008": 1,
      "NIME": 4,
      "NIME 2008": 1,
      "IASTED Signal and Image Processing": 1,
      "ICMC 2007": 1,
      "New Interfaces for Musical Expression": 4,
      "6th International Symposium on Active Noise and Vibration Control": 1,
      "Audio Engineering Society 120th Convention": 1,
      "International Computer Music Conference": 21,
      "AES 121": 1,
      "Open Sound Control Conference": 3,
      "International Conference on New Interfaces for Musical Expression": 1,
      "IEEE International Conference on Acoustics": 1,
      "Audio Engineering Society 107th Convention": 4,
      "Audio Engineering Society 104th Convention": 1,
      "Institute of Electrical and Electronics Engineers Visualization 98": 1,
      "International Computer Music Conference (ICMC)": 2,
      "5th International Conference on Signal Processing Applications and Technology": 1,
      "Fourth International Conference on Signal Processing Applications and Technology": 1,
      "Adaptive and Learning Systems": 1,
      "17th International Computer Music Conference": 1,
      "15th International Computer Music Conference": 1,
      "Identites de la Guitare Electrique": 1,
      "3rd Music and Cognition Conference": 1,
      "New Instruments for Musical Expression": 1,
      "Linux Audio Conference": 1,
      "International Society for Music Information Retrieval Conference": 1,
      "SMC2016": 1,
      "ISMIR 2016": 1,
      "FARM2016": 1,
      "Bodies of Knowledge BOK2016": 1,
      "Study Day On Computer Simulation Of Musical Creativity": 1,
      "TEI 2015": 1,
      "ICMC": 2,
      "New Instruments of Musical Expression": 1,
      "SigCHI": 1,
      "SigChi": 1,
      "NIME 2012": 1,
      "Interactive Tabletops and Surfaces": 2,
      "Audio Engineering Society": 1,
      "Understanding VISUAL Music 2011": 1,
      "Pervasive 2011": 1,
      "ICMC 2011": 1,
      "NIME 2011": 1,
      "ICMC 2010": 1,
      "International Symposium on New Frontiers in Fiber Materials Science": 1,
      "SIGCHI Workshop: Organic experiences: (re)shaping interactions with deformable displays": 1,
      "Hacking Big Data Brother: From Biometrics to Intra-action": 1,
      "Stanford HCI Seminar": 1
    },
    "with_attachments": 15,
    "with_urls": 63,
    "with_abstracts": 92
  },
  "publications": [
    {
      "id": 117,
      "title": "A Low-level Embedded Service Architecture for Rapid DIY Design of Real-time Musical Instruments",
      "journal": "NIME 2009",
      "year": "2009",
      "note": "real-time musical interface, DIY design, embedded web services, rapid prototyping, reconfigurable firmware",
      "month": "04/06/2009",
      "abstract": "An on-the-fly reconfigurable low-level embedded service architecture is presented as a means to improve scalability, improve conceptual comprehensibility, reduce human error and reduce development time when designing new sensor-based electronic musical instruments with real-time responsiveness. The implementation of the concept in a project called micro-OSC is described. Other sensor interfacing products are evaluated in the context of DIY prototyping of musical instruments. The capabilities of the micro-OSC platform are demonstrated through a set of examples including resistive sensing, mixed digital-analog systems, many-channel sensor interfaces and time-based measurement methods.",
      "url": "http://cnmat.berkeley.edu/node/6902",
      "author": "Schmeder, Andrew and Freed, Adrian",
      "source_file": "117.html",
      "year_int": 2009
    },
    {
      "id": 118,
      "title": "Features and Future of Open Sound Control version 1.1 for NIME",
      "booktitle": "NIME 2009",
      "year": "2009",
      "month": "04/06/2009",
      "abstract": "The history and future of Open Sound Control (OSC) is discussed and the next iteration of the OSC specification is introduced with discussion of new features to support NIME community activities. The roadmap to a major revision of OSC is developed.",
      "url": "http://cnmat.berkeley.edu/node/7002",
      "author": "Freed, Adrian and Schmeder, Andy",
      "source_file": "118.html",
      "venue": "NIME 2009",
      "year_int": 2009
    },
    {
      "id": 119,
      "title": "The Tactus: a Tangible, Rhythmic Grid Interface Using Found-Objects",
      "booktitle": "NIME 2009",
      "year": "2009",
      "month": "03/06/2009",
      "address": "Pittsburgh, PA",
      "abstract": "This paper describes the inspiration and implementation of a tactile, tabletop synthesizer/step sequencer. The Tactus is an expandable and inexpensive musical interface for the creation of loop-based music. An optical camera, coupled with a computer running Max/MSP/Jitter can turn almost any matrix-like object into a step sequencer. The empty cells in the gridded object are filled with a fitting, colored object; the placement of which is analogous to adding an instrument or switching on a box in a step sequencer grid. The color and column position of every element in the matrix are used as parameters for a synthesizer while the row position of that element corresponds to the moment within the loop that entry is sounded. The two dimensional array can be positioned anywhere within the camera{\\textquoteright",
      "author": "Mann, Yotam and Lubow, Jeff and Freed, Adrian",
      "source_file": "119.html",
      "venue": "NIME 2009",
      "year_int": 2009
    },
    {
      "id": 120,
      "title": "Musical Applications and Design Techniques for the Gametrak Tethered Spatial Position Controller",
      "booktitle": "SMC 2009",
      "year": "2009",
      "abstract": "The Gametrak spatial position controller has been saved from the fate of so many discontinued gaming controllers to become an attractive and increasingly popular platform for experimental musical controllers, math and science manipulatives, large scale interactive installations and as a playful tangible gaming interface that promotes inter-generational creative play and discovery . After introducing the peculiarities of the GameTrak and comparing it to related spatial position sensing systems we survey musical applications of the device. The short paper format cannot do justice to the depth and breadth of such applications, so projects have been selected based on whether they represent unusual or surprising uses of the controller or because they represent fruitful platforms for future work.",
      "author": "Freed, Adrian and McCutchen, Devin and Schmeder, Andrew and Skriver Hansen, Anne-Marie and Overholt,Dan and Burleson,Winslow and Norgaard Jensen, Camilla and Mesker,Alex",
      "source_file": "120.html",
      "venue": "SMC 2009",
      "year_int": 2009
    },
    {
      "id": 121,
      "title": "Novel and Forgotten Current-steering Techniques for Resistive Multitouch, Duotouch, and Polytouch Position Sensing with Pressure",
      "booktitle": "NIME 2009",
      "year": "2009",
      "abstract": "The recent surge in development of multitouch gesture systems focuses around capacitive and optical sensing techniques. Although interesting and widely used by the NIME community, most musical applications of touch benefit greatly from concurrent acquisition of pressure or force at the touch points. Piezoresistive materials have proven the most effective modern choice for this. At least three generations of inventors have developed sensing circuitry for resistive arrays and piezoresistive materials with much duplication of effort along the way. This paper describes the key (and largely forgotten) methods and introduces new higher-performance circuits developed for musical instrument controllers.",
      "author": "Freed, Adrian",
      "source_file": "121.html",
      "venue": "NIME 2009",
      "year_int": 2009
    },
    {
      "id": 122,
      "title": "Implementation and Applications of Open Sound Control Timestamps",
      "booktitle": "ICMC 2008",
      "year": "2008",
      "month": "25/08/2008",
      "pages": "655-658",
      "publisher": "ICMA",
      "organization": "ICMA",
      "address": "Belfast, Ireland",
      "abstract": "The background, purpose, and function of Open Sound Control (OSC) timestamps is reviewed. An analysis shows that jitter-induced noise with dispersion over the millisecond range significantly degrades real-time highresolution sensor signal streams. The design of a distributed clock synchronization and event scheduling domain over an asynchronous network is described. A realization of this model is presented, created using the new micro-OSC (uOSC) hardware platform and host software components in MaxMSP. An OSC address schema for client-server clock synchronization is documented. Two new objects for MaxMSP are introduced: OSC-timetag and OSC-schedule.",
      "author": "Schmeder, Andy and Freed, Adrian",
      "source_file": "122.html",
      "venue": "ICMC 2008",
      "year_int": 2008
    },
    {
      "id": 123,
      "title": "uOSC: The Open Sound Control Reference Platform for Embedded Devices",
      "booktitle": "NIME",
      "year": "2008",
      "month": "05/06/2008",
      "address": "Genova, Italy",
      "abstract": "A general-purpose firmware for a low-cost microcontroller is described that employs the Open Sound Control protocol over USB. The software is designed with considerations for integration in new musical interfaces and embedded devices. Features of note include stateless design, efficient floating-point support, temporally correct data handling, and protocol completeness. The hardware/software combination is compared to other solutions. A performance analysis is conducted to quantify the latency characteristics under realistic conditions.",
      "author": "Schmeder, Andy and Freed, Adrian",
      "source_file": "123.html",
      "venue": "NIME",
      "year_int": 2008
    },
    {
      "id": 124,
      "title": "Application of new Fiber and Malleable Materials for Agile Development of Augmented Instruments and Controllers",
      "booktitle": "NIME 2008",
      "year": "2008",
      "note": "CNMAT",
      "abstract": "The paper introduces new fiber and malleable materials including piezoresistive fabric, conductive heat-shrink tubing and shows techniques and examples of how they may be used for rapid prototyping and agile develop of musical instrument controllers. New implementations of well-known designs are covered as well as enhancement of existing controllers. Finally two new controllers are introduced that are made possible by these recently available new materials and construction techniques",
      "keywords": "Agile Development, Rapid Prototyping, Conductive fabric, Piezoresistive fabric, conductive heatshrink tubing, augmented instruments",
      "url": "http://www.nime.org/proceedings/2008/nime2008_107.pdf",
      "attachments": "http://www.adrianfreed.com/sites/default/files/applicationsMalleableNIME2008AdrianFreed.pdf",
      "author": "Freed, Adrian",
      "source_file": "124.html",
      "venue": "NIME 2008",
      "keywords_list": [
        "Agile Development",
        "Rapid Prototyping",
        "Conductive fabric",
        "Piezoresistive fabric",
        "conductive heatshrink tubing",
        "augmented instruments"
      ],
      "year_int": 2008
    },
    {
      "id": 125,
      "title": "Applications of Environmental Sensing for Spherical Loudspeaker Arrays",
      "booktitle": "IASTED Signal and Image Processing",
      "year": "2008",
      "address": "Hawaii",
      "abstract": "This paper surveys emerging applications of inertial and environmental sensing associated with spherical loudspeaker arrays.",
      "author": "Freed, Adrian and Schmeder, Andy and Zotter, Franz",
      "source_file": "125.html",
      "venue": "IASTED Signal and Image Processing",
      "year_int": 2008
    },
    {
      "id": 126,
      "title": "CNMAT Information Infrastructure",
      "booktitle": "ICMC 2007",
      "volume": "1",
      "year": "2007",
      "month": "27/09/2007",
      "pages": "325-328",
      "publisher": "ICMA",
      "organization": "ICMA",
      "address": "Copenhagen, Denmark",
      "abstract": "We describe the new unified information infrastructure CNMAT uses for organizing and publishing its music production, teaching, research and development and administrative activities with a focus on integration of collaboration, semantic web, and version control technologies.",
      "author": "Schmeder, Andrew and Wright, Matthew and Freed, Adrian and Campion, Edmund and Wessel, David",
      "source_file": "126.html",
      "venue": "ICMC 2007",
      "year_int": 2007
    },
    {
      "id": 127,
      "title": "A Force Sensitive Multi-touch Array Supporting Multiple 2-D Musical Control Structures",
      "booktitle": "New Interfaces for Musical Expression",
      "year": "2007",
      "month": "07/06/2007",
      "pages": "41-45",
      "publisher": "ACM",
      "organization": "ACM",
      "address": "New York",
      "abstract": "We describe the design, implementation, and evaluation with real musical use of a force sensitive multi-touch array of touchpads. Each of the touchpads supports a three dimensional representation of musical material: two spatial dimensions plus a force measurement we typically use to control dynamics. Our system uses 24 pads laid out in a ?brick wall? style array that fits nicely under the two hands. We emphasize the treatment of gestures as sub-sampled audio signals. This tight coupling of gesture with audio provides for a high degree of control intimacy.",
      "keywords": "High-resolution gestural signals, Pressure and force sensing, Touchpad, VersaPad",
      "doi": "10.1145/1279740.1279745",
      "url": "http://portal.acm.org/citation.cfm?id=1279740.1279745",
      "author": "Wessel, David and Rimas Avizienis and Freed, Adrian and Wright, Matthew",
      "source_file": "127.html",
      "venue": "New Interfaces for Musical Expression",
      "keywords_list": [
        "High-resolution gestural signals",
        "Pressure and force sensing",
        "Touchpad",
        "VersaPad"
      ],
      "year_int": 2007
    },
    {
      "id": 129,
      "title": "Active Damping Of A Vibrating String",
      "booktitle": "6th International Symposium on Active Noise and Vibration Control",
      "year": "2006",
      "month": "2006/09/18",
      "address": "Adelaide, Australia",
      "abstract": "This paper presents an investigation of active damping of the vertical and horizontal transverse modes of a rigidly-terminated vibrating string. A state-space model that emulates the behavior of the string is introduced, and we explain the theory behind band pass filter control and proportional-integral-derivative (PID) control as applied to a vibrating string. After describing the characteristics of various actuators and sensors, we motivate the choice of collocated electromagnetic actuators and a multi- axis piezoelectric bridge sensor. Integral control is shown experimentally to be capable of damping the string independently of the fundamental frequency. We consider the difference between damping the energy in only one transverse axis, versus simultaneously damping the energy in both the vertical and horizontal transverse axes. We discuss the advantages and drawbacks of various string actuators and sensors and described an experimental setup. We present waveforms of actively damped string vibrations and demonstrate that the sensors and actuators must be carefully aligned in order to obtain completely independent control of the vertical and horizontal transverse axes. Future work will involve implementing various sound effects for the electric guitar directly on a guitar string. For example, the tremolo effect consists of periodically varying the amplitude of an input signal, and so the active damping parameter could be varied periodically to achieve this effect. The waveforms corresponding to the model, the magenta poles displayed in the root loci, and the experiments are available from: http://ccrma.stanford.edu/~eberdahl/Projects/ActiveDamping/index.html",
      "url": "http://cnmat.berkeley.edu/publication/active_damping_vibrating_string",
      "author": "Berdahl, Edgar and Smith III, Julius and Freed, Adrian",
      "source_file": "129.html",
      "venue": "6th International Symposium on Active Noise and Vibration Control",
      "year_int": 2006
    },
    {
      "id": 130,
      "title": "A Compact 120 Independent Element Spherical Loudspeaker Array with Programmable Radiation Patterns",
      "booktitle": "Audio Engineering Society 120th Convention",
      "year": "2006",
      "pages": "Convention Paper No. 6783",
      "address": "Paris, France",
      "abstract": "We describe the geometric and engineering design challenges that were overcome to create a new compact, 10-inch diameter spherical loudspeaker array with integrated class-D amplifiers and a 120 independent channel digital audio interface using Gigabit Ethernet. A special hybrid geometry is used that combines the maximal symmetry of a triangular-faceted icosahedron with the compact planar packing of 6 circles on an equilateral triangle (\"billiard ball packing\"). Six custom 1.25inch drivers developed by Meyer Sound Labs are mounted on each of 20 aluminum triangular circuit boards. Class D amplifiers for the six speakers are mounted on the other side of each board. Two pentagonal circuit boards in the icosahedron employ Xilinx Spartan 3E FPGA{\\textquoteright",
      "url": "http://cnmat.berkeley.edu/publication/compact_120_independent_element_spherical_loudspeaker_array_programmable_radiation_patterns",
      "author": "Rimas Avizienis and Freed, Adrian and Kassakian, Peter and Wessel, David",
      "source_file": "130.html",
      "venue": "Audio Engineering Society 120th Convention",
      "year_int": 2006
    },
    {
      "id": 131,
      "title": "Augmenting the Cello",
      "booktitle": "New Interfaces for Musical Expression",
      "year": "2006",
      "pages": "409-413",
      "publisher": "NIME",
      "organization": "NIME",
      "address": "Paris, France",
      "abstract": "Software and hardware enhancements to an electric 6-string cello are described with a focus on a new mechanical tuning device, a novel rotary sensor for bow interaction and control strategies to leverage a suite of polyphonic sound processing effects.",
      "url": "http://cnmat.berkeley.edu/publication/augmenting_cello",
      "author": "Freed, Adrian and Uitti, Frances-Marie and Wessel, David and Zbyszynski, Michael",
      "source_file": "131.html",
      "venue": "New Interfaces for Musical Expression",
      "year_int": 2006
    },
    {
      "id": 132,
      "title": "Beyond 0-5V: Expanding Sensor Integration Architectures",
      "booktitle": "New Interfaces for Musical Expression",
      "year": "2006",
      "pages": "97-100",
      "publisher": "NIME",
      "organization": "NIME",
      "address": "Paris, France",
      "abstract": "A new sensor integration system and its first incarnation is described. As well as supporting existing analog sensor arrays a new architecture allows for easy integration of the new generation of low-cost digital sensors used in computer music performance instruments and installation art.",
      "isbn": "2-84426-314-3",
      "url": "http://cnmat.berkeley.edu/publication/beyond_0_5v_expanding_sensor_integration_architectures",
      "author": "Freed, Adrian and Rimas Avizienis and Wright, Matthew",
      "source_file": "132.html",
      "venue": "New Interfaces for Musical Expression",
      "year_int": 2006
    },
    {
      "id": 133,
      "title": "Comparing Musical Control Structures and Signal Processing Strategies for the Augmented Cello and Guitar",
      "booktitle": "International Computer Music Conference",
      "year": "2006",
      "note": "Additional files.",
      "pages": "636-642",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "New Orleans, LA",
      "abstract": "In this paper we examine the hardware (sensors and connectivity), physical performance (placement, sensor type, tactile feedback), and musical performance (software development and control) concerns inherent to extending control to six-string chordophones in general, and specifically to the electric guitar (plucked) and electric cello (bowed).",
      "url": "http://cnmat.berkeley.edu/publication/comparing_musical_control_structures_and_signal_processing_strategies_augmented_cello_and_guitar",
      "author": "Freed, Adrian and Lee, Ahm and Schott, John and Uitti, Frances-Marie and Wright, Matthew and Zbyszynski, Michael",
      "source_file": "133.html",
      "venue": "International Computer Music Conference",
      "year_int": 2006
    },
    {
      "id": 134,
      "title": "Music MetaData Quality: a multiyear Case Study using the Music of Skip James",
      "booktitle": "AES 121",
      "year": "2006",
      "address": "San Francisco, CA",
      "abstract": "The case study reported here is an exploratory step towards developing a quantitative system for audio and music metadata quality measurement. Errors, their sources and their propagation mechanisms are carefully examined in a small but meaningful subset of music metadata centered on a single artist Skip James.",
      "url": "http://cnmat.berkeley.edu/publication/music_metadata_quality_multiyear_case_study_using_music_skip_james",
      "author": "Freed, Adrian",
      "source_file": "134.html",
      "venue": "AES 121",
      "year_int": 2006
    },
    {
      "id": 135,
      "title": "Control of VST Plug-ins Using OSC",
      "booktitle": "International Computer Music Conference",
      "year": "2005",
      "pages": "263-266",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Barcelona, Spain",
      "abstract": "The basic control structure of VST audio plug-ins can limit their usefulness. Control can be improved through the use of Open Sound Control by developing a flexible name space that employs multiple, intuitive parameter names (and aliases), higher-level controls and range mapping, simplifying control for the user. We will demonstrate these ideas with Max/MSP patches that repackage VST plug-ins in a more usable way and also introduce the idea that plug-in interfaces themselves can be improved by building in a well-formed OSC name space. Such a name space would enhance the longevity and flexibility of finished musical works. We will also show that when the plug-in is controlled directly with OSC atomicity and queries, control could be further improved.in",
      "url": "http://cnmat.berkeley.edu/publication/control_vst_plug_ins_using_osc",
      "author": "Zbyszynski, Michael and Freed, Adrian",
      "source_file": "135.html",
      "venue": "International Computer Music Conference",
      "year_int": 2005
    },
    {
      "id": 136,
      "title": "OSC Control of VST Plug-ins",
      "booktitle": "Open Sound Control Conference",
      "year": "2004",
      "month": "2004/07/30",
      "publisher": "CNMAT",
      "organization": "CNMAT",
      "address": "Berkeley, CA",
      "abstract": "The basic control structure of VST audio plug-ins can limit their usefulness. Control can be improved through the use of Open Sound Control by developing a flexible name space that employs multiple, intuitive parameter names (and aliases), higher-level controls and range mapping, simplifying control for the user. We will demonstrate these ideas with Max/MSP patches that repackage VST plug-ins in a more usable way and also introduce the idea that plug-in interfaces themselves can be improved by building in a well-formed OSC name space. Such a name space would enhance the longevity and flexibility of finished musical works. We will also show that when the plug-in is controlled directly with OSC atomicity and queries, control could be further improved.",
      "url": "http://cnmat.berkeley.edu/publication/osc_control_vst_plug_ins",
      "author": "Freed, Adrian and Zbyszynski, Michael",
      "source_file": "136.html",
      "venue": "Open Sound Control Conference",
      "year_int": 2004
    },
    {
      "id": 137,
      "title": "Towards a More Effective OSC Time Tag Scheme",
      "booktitle": "Open Sound Control Conference",
      "year": "2004",
      "month": "2004/07/30",
      "publisher": "CNMAT",
      "organization": "CNMAT",
      "address": "Berkeley, CA",
      "url": "http://cnmat.berkeley.edu/publication/osc_time_tags",
      "author": "Freed, Adrian",
      "source_file": "137.html",
      "venue": "Open Sound Control Conference",
      "year_int": 2004
    },
    {
      "id": 138,
      "title": "OSC and Gesture features of CNMAT{\\textquoteright",
      "booktitle": "Open Sound Control Conference",
      "year": "2004",
      "address": "Berkeley, CA",
      "url": "http://cnmat.berkeley.edu/publication/osc_and_gesture_features_cnmats_connectivity_processor",
      "author": "Rimas Avizienis and Freed, Adrian",
      "source_file": "138.html",
      "venue": "Open Sound Control Conference",
      "year_int": 2004
    },
    {
      "id": 139,
      "title": "Open Sound Control: State of the Art 2003",
      "booktitle": "International Conference on New Interfaces for Musical Expression",
      "year": "2003",
      "note": "Open Sound Control",
      "pages": "153-159",
      "publisher": "NIME",
      "organization": "NIME",
      "address": "Montreal",
      "abstract": "Open SoundControl (?OSC?) is a protocol for communication among computers, sound synthesizers, and other multimedia devices that is optimized for modern networking technology. OSC has achieved wide use in the field of computer-based new interfaces for musical expression for wide-area and local-area networked distributed music systems, inter-process communication, and even within a single application.",
      "url": "http://cnmat.berkeley.edu/publication/open_sound_control_state_art_2003",
      "author": "Wright, Matthew and Freed, Adrian and Momeni, Ali",
      "source_file": "139.html",
      "venue": "International Conference on New Interfaces for Musical Expression",
      "year_int": 2003
    },
    {
      "id": 140,
      "title": "Managing Complexity with Explicit Mapping of Gestures to Sound Control with OSC",
      "booktitle": "International Computer Music Conference",
      "year": "2001",
      "pages": "314-317",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Habana, Cuba",
      "url": "http://cnmat.berkeley.edu/publication/managing_complexity_explicit_mapping_gestures_sound_control_osc",
      "author": "Wright, Matthew and Freed, Adrian and Lee, Ahm and Madden, Timothy and Momeni, Ali",
      "source_file": "140.html",
      "venue": "International Computer Music Conference",
      "year_int": 2001
    },
    {
      "id": 141,
      "title": "An Open Architecture for Real-time Music Software",
      "booktitle": "International Computer Music Conference",
      "year": "2000",
      "pages": "492-495",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Berlin, Germany",
      "abstract": "Open Sound World (OSW) is a scalable, extensible object-oriented language that allows sound designers and musicians to process sound in response to expressive real-time control. OSW allows users to develop at different levels, including visual patching, XML editing, scripting and high-level C++. Components called transforms are combined to form programs called patches. The set of included transforms can be extended using the ?Externalizer,? a tool for writing high-level specifications of new transforms. The OSW real-time scheduler supports a uniform timing model for all components and symmetric multiprocessing.",
      "url": "http://cnmat.berkeley.edu/publication/open_architecture_real_time_music_software",
      "author": "Chaudhary, Amar and Freed, Adrian and Wright, Matthew",
      "source_file": "141.html",
      "venue": "International Computer Music Conference",
      "year_int": 2000
    },
    {
      "id": 143,
      "title": "An XML-based SDIF Stream Relationships Language",
      "booktitle": "International Computer Music Conference",
      "year": "2000",
      "pages": "186-189",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Berlin, Germany",
      "abstract": "We introduce the SDIF Stream Relationships Language (?SDIF-SRL?), a formal language for describing the relationships among streams in an SDIF file. SDIF-SRL is based on XML, the ?Extensible Markup Language,? an emerging standard for data modeling and representation. We describe the structure of SRL and its use in several applications.",
      "url": "http://cnmat.berkeley.edu/publication/xml_based_sdif_stream_relationships_language",
      "author": "Wright, Matthew and Chaudhary, Amar and Freed, Adrian and Khoury, Sami and Wessel, David and Momeni, Ali",
      "source_file": "143.html",
      "venue": "International Computer Music Conference",
      "year_int": 2000
    },
    {
      "id": 145,
      "title": "Scalable Connectivity Processor for Computer Music Performance Systems",
      "booktitle": "International Computer Music Conference",
      "year": "2000",
      "pages": "523-526",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Berlin, Germany",
      "abstract": "Standard laptop computers are now capable of sizeable quantities of sound synthesis and sound processing, but low-latency, high quality, multichannel audio I/O has not been possible without a cumbersome external card cage. CNMAT has developed a solution using the ubiquitous 100BaseT Ethernet that supports up to 10 channels of 24-bit audio, 64 channels of sample-synchronous control-rate gesture data, and 4 precisely time-stamped MIDI I/O streams. Latency measurements show that we can get signals into and back out of Max/MSP in under 7 milliseconds. The central component in the device is a field programmable gate array (FPGA). In addition to providing a variety of computer interface capabilities, the device can function as a cross-coder for a variety of protocols including GMICS. This paper outlines the motivation, design, and implementation of the connectivity processor.",
      "url": "http://cnmat.berkeley.edu/publication/scalable_connectivity_processor_computer_music_performance_systems",
      "author": "Rimas Avizienis and Freed, Adrian and Suzuki, Takahiko and Wessel, David",
      "source_file": "145.html",
      "venue": "International Computer Music Conference",
      "year_int": 2000
    },
    {
      "id": 146,
      "title": "A Fixed-Point Recursive Digital Oscillator for Additive Synthesis of Audio",
      "booktitle": "IEEE International Conference on Acoustics, Speech, and Signal Processing (ICASSP)",
      "year": "1999",
      "month": "1999/03/01",
      "address": "Phoenix, AZ",
      "abstract": "This paper summarizes our work adapting a recursive digital resonator for use on sixteen-bit fixed-point hardware. Our modified oscillator is a two-pole filter that maintains frequency precision at a cost of two additional operations per filter sample. The new filter?s error properties are expressly matched to use in the range of frequencies relevant to additive synthesis of digital audio and sinusoidal modeling of speech in order to minimize the additional computational overhead. We present the algorithm, an error analysis, a performance analysis, and measurements of an implementation on a fixed-point vector microprocessor system.",
      "url": "http://cnmat.berkeley.edu/publication/fixed_point_recursive_digital_oscillator_additive_synthesis_audio",
      "author": "Hodes, Todd and Hauser, J. and Freed, Adrian and Wawrzynek, John and Wessel, David",
      "source_file": "146.html",
      "venue": "IEEE International Conference on Acoustics, Speech, and Signal Processing (ICASSP)",
      "year_int": 1999
    },
    {
      "id": 147,
      "title": "An Open Architecture for Real-Time Audio Processing Software",
      "booktitle": "Audio Engineering Society 107th Convention,",
      "year": "1999",
      "pages": "preprint $\\#$5031",
      "publisher": "Audio Engineering Society",
      "organization": "Audio Engineering Society",
      "address": "New York, NY",
      "abstract": "OSW, or \"Open Sound World,\" allows development of audio applications using patching, C++, high-level specifications and scripting. In OSW, components called \"transforms\" are dynamically configured into larger units called \"patches.\" New components can be expressed using familiar mathematical definitions without deep knowledge of C++. High-level specifications of transforms are created using the \"Externalizer,\" and are compiled and loaded into a running OSW environment. The data used by transforms can have any valid C++ type. OSW uses a reactive real-time scheduler that safely and efficiently handles multiple processors, time sources and synchronous data flows.",
      "keywords": "OSW, Open Sound World, Music Programming Language, Data Flow, Visual Programming",
      "url": "http://cnmat.berkeley.edu/publication/open_architecture_real_time_audio_processing_software",
      "author": "Chaudhary, Amar and Freed, Adrian and Wright, Matthew",
      "source_file": "147.html",
      "venue": "Audio Engineering Society 107th Convention,",
      "keywords_list": [
        "OSW",
        "Open Sound World",
        "Music Programming Language",
        "Data Flow",
        "Visual Programming"
      ],
      "year_int": 1999
    },
    {
      "id": 148,
      "title": "Audio Applications of the Sound Description Interchange Format Standard",
      "booktitle": "Audio Engineering Society 107th Convention",
      "year": "1999",
      "pages": "preprint $\\#$5032",
      "publisher": "Audio Engineering Society",
      "organization": "Audio Engineering Society",
      "abstract": "The Sound Description Interchange Format (SDIF) is a recently established standard for the interchange of a variety of sound descriptions including spectral, time-domain, and higher-level models. SDIF consists of a specified data format framework and an extensible set of standard sound descriptions and their official representations. We begin with the history and goals of SDIF, followed by the specification of the data format and the standard sound descriptions. Then we describe some current applications of SDIF and conclude with a look at the future of SDIF.",
      "url": "http://cnmat.berkeley.edu/publication/audio_applications_sound_description_interchange_format_standard",
      "author": "Wright, Matthew and Chaudhary, Amar and Freed, Adrian and Khoury, Sami and Wessel, David",
      "source_file": "148.html",
      "venue": "Audio Engineering Society 107th Convention",
      "year_int": 1999
    },
    {
      "id": 149,
      "title": "Bidirectional AES/EBU Digital Audio and Remote Power over a Single Cable",
      "booktitle": "Audio Engineering Society 107th Convention",
      "year": "1999",
      "publisher": "Audio Engineering Society",
      "organization": "Audio Engineering Society",
      "address": "New York, NY",
      "abstract": "Although the AES/EBU digital audio standard has already been adapted to optical, twisted pair [1] and coaxial cables [2]. This paper explores cabling options for new and emerging applications of digital audio communications. Enabling features include remotely powering devices over the audio cable and bi-directional communications. Remote power benefits both ends of the audio reproduction chain: microphones and instrument pickups on the input end and loudspeakers and headphones on the output end. Bi-directionality is an important feature because so many audio applications have inputs and outputs that are physically proximate, e.g., headset microphones with monitoring, stage boxes, and signal processing effects. In contrast to studio situations, the aforementioned \"live\" audio applications require processing elements to be physically dispersed in locations where it may be expensive or inconvenient to provide electrical power. Note that although a guitar, for example, is primarily a source of audio, the guitarist can certainly take advantage of audio sent down a single cable to provide a monitor audio feed, or a tuning reference. Providing headset monitoring is now widely accepted practice for singers in live performance application, favoring a bi-directional audio link. There are also opportunities in digital audio applications to exploit bi-directional communication of gestures and indicator values [3]. The remote power requirement precludes optical and wireless approaches, leaving twisted pair and coaxial as the main alternatives. The computer industry has recently focussed on twisted pair solutions with USB [4] and Firewire [5]. GMICS [6], an interesting new proposal from Gibson Guitar, adopts 100BaseT Catagory 5 cable as one possible physical layer implementation for a multi-channel protocol supporting powered, bi-directional communication. This paper communicates results of exploratory experiments with other physical layer options: coaxial and tri-axial cable.",
      "url": "http://cnmat.berkeley.edu/publication/bidirectional_aes_ebu_digital_audio_and_remote_power_over_single_cable",
      "author": "Freed, Adrian",
      "source_file": "149.html",
      "venue": "Audio Engineering Society 107th Convention",
      "year_int": 1999
    },
    {
      "id": 150,
      "title": "Exploiting Parallelism in Real-Time Music and Audio Applications",
      "year": "1999",
      "pages": "49-54",
      "publisher": "Springer Berlin",
      "organization": "Springer Berlin",
      "address": "Heidelberg",
      "abstract": "We introduce a scalable, extensible object-oriented system developed primarily for signal processing and synthesis for musical and multimedia applications. The main performance issue with these applications concerns functions of discrete-time. Novel techniques exploit fine-grain parallelism in the calculation of these functions to allow users to express them at a high-level in C++. New scheduling strategies are used to exploit symmetric multiprocessors with emphasis on special hard real-time constraints.",
      "isbn": "978-3-540-66818-3",
      "url": "http://www.springerlink.com/content/81314334t1w10x48/",
      "author": "Chaudhary, Amar and Freed, Adrian and Wessel, David",
      "source_file": "150.html",
      "year_int": 1999
    },
    {
      "id": 151,
      "title": "Musical Applications of New Filter Extensions to Max/MSP",
      "booktitle": "International Computer Music Conference",
      "year": "1999",
      "pages": "504-507",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Beijing, China",
      "abstract": "We introduce three new Max/MSP signal processing extensions that efficiently implement IIR filters: biquadbank~, peqbank~ and resonators~. After describing their common properties, specific features and new support objects, we conclude the paper with a summary of their musical applications.",
      "keywords": "resonances, biquads, parametric eq",
      "url": "http://cnmat.berkeley.edu/publication/musical_applications_new_filter_extensions_max_msp",
      "author": "Jehan, Tristan and Freed, Adrian and Dudas, Richard",
      "source_file": "151.html",
      "venue": "International Computer Music Conference",
      "keywords_list": [
        "resonances",
        "biquads",
        "parametric eq"
      ],
      "year_int": 1999
    },
    {
      "id": 154,
      "title": "Visualization, Editing and Spatialization of Timbral Resources using the OSE Framework",
      "booktitle": "Audio Engineering Society 107th Convention",
      "year": "1999",
      "pages": "preprint $\\#$5027",
      "publisher": "Audio Engineering Society",
      "organization": "Audio Engineering Society",
      "address": "New York, NY",
      "abstract": "OpenSoundEdit is a sound-editing system that provides a unified three-dimensional user interface to edit complex sounds composed from different representations. Sounds are mapped onto a single three-dimensional space representing time, amplitude and a view-dependent third dimension (e.g., frequency, channel number, etc.) Sounds can be edited by direct manipulation on the 3D display window or by using additional representation-specific controls. Users can also model the 3D location and movement of sounds in a room. As the user edits sounds, changes can be heard through a synthesis server, an application that generates sound in response to commands supplied by OSE. OSE is implemented using extensible, multi-platform technologies. This paper illustrates the features of OSE with examples of several sound representations.",
      "keywords": "OSE, spatialization",
      "url": "http://cnmat.berkeley.edu/publication/visualization_editing_and_spatialization_timbral_resources_using_ose_framework",
      "author": "Chaudhary, Amar and Freed, Adrian",
      "source_file": "154.html",
      "venue": "Audio Engineering Society 107th Convention",
      "keywords_list": [
        "OSE",
        "spatialization"
      ],
      "year_int": 1999
    },
    {
      "id": 155,
      "title": "Volumetric Modeling of Acoustic Fields for Musical Sound Design in a New Sound Spatialization Theatre",
      "booktitle": "International Computer Music Conference",
      "year": "1999",
      "pages": "488-491",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Beijing, China",
      "abstract": "A new tool for real-time visualization of acoustic sound fields has been developed for CNMATs sound spatialization theatre. Unique features of the theatre and the acoustic and volumetric modeling software are described.",
      "url": "http://cnmat.berkeley.edu/publication/volumetric_modeling_acoustic_fields_musical_sound_design_new_sound_spatialization_theatre",
      "author": "Kaup, Arnold and Freed, Adrian and Khoury, Sami and Wessel, David",
      "source_file": "155.html",
      "venue": "International Computer Music Conference",
      "year_int": 1999
    },
    {
      "id": 156,
      "title": "A 3D Graphical User Interface for Resonance Modeling",
      "booktitle": "International Computer Music Conference",
      "year": "1998",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Ann Arbor, Michigan",
      "abstract": "Resonance models describe a wide variety of musical sounds using perceptually-meaningful parameters. The compact representation of resonance models allows efficient storage, modification and real-time implementation. Widespread adoption of resonance models has been hampered by the lack of specialized tools to display and manipulate them. We demonstrate here a novel 3D editor designed for musicians, composers and sound designers to manipulate sounds described by resonance models. Visualization and modification of resonance parameters is based on familiar metaphors from the physical world. The editor is a component of OpenSoundEdit, a portable sound-editing framework.",
      "url": "http://cnmat.berkeley.edu/publication/3d_graphical_user_interface_resonance_modeling",
      "author": "Chaudhary, Amar and Freed, Adrian and Khoury, Sami and Wessel, David",
      "source_file": "156.html",
      "venue": "International Computer Music Conference",
      "year_int": 1998
    },
    {
      "id": 157,
      "title": "Communication of Musical Gesture using the AES/EBU Digital Audio Standard",
      "booktitle": "International Computer Music Conference",
      "year": "1998",
      "pages": "220-223",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Ann Arbor, Michigan",
      "abstract": "We have adapted the AES/EBU digital audio standard to the coding and transmission of transduced gestures. We discuss the advantages of the AES/EBU standard over MIDI and other candidate methods and describe alternative mappings of gestural data to the audio streams of the AES/EBU protocol. We conclude with a description of a reactive glove system and a continuous position-sensing keyboard controller using AES/EBU communications.",
      "url": "http://cnmat.berkeley.edu/publication/communication_musical_gesture_using_aes_ebu_digital_audio_standard",
      "author": "Freed, Adrian and Wessel, David",
      "source_file": "157.html",
      "venue": "International Computer Music Conference",
      "year_int": 1998
    },
    {
      "id": 159,
      "title": "Music Programming with the new Features of Standard C++",
      "booktitle": "International Computer Music Conference",
      "year": "1998",
      "pages": "244-247",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Ann, Arbor, Michigan",
      "abstract": "Object-oriented programming using C++ classes is established practice in the general programming community and is beginning in computer music applications. However, large components of computer music systems are still commonly written in the C programming language, either because object-orientation is felt unnecessary or more often because of efficiency concerns. Such concerns are central to successful implementations of reactive performance-oriented computer music systems. By judicious use of new features of the recently established ISO Standard C++ , real-time computer music applications may be developed that are more efficient and reliable than typical C programs, easier to understand and write, and easier to optimize for a particular operating environment. This paper reviews new features of ISO C++ relevant to reactive music system programming and illustrates by example a new programming style for musical applications that exploits unique strengths of C++.",
      "url": "http://cnmat.berkeley.edu/publication/music_programming_new_features_standard_c",
      "author": "Freed, Adrian and Chaudhary, Amar",
      "source_file": "159.html",
      "venue": "International Computer Music Conference",
      "year_int": 1998
    },
    {
      "id": 160,
      "title": "New Applications of the Sound Description Interchange Format",
      "booktitle": "International Computer Music Conference",
      "year": "1998",
      "pages": "276-279",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Ann Arbor, Michigan",
      "abstract": "The Sound Description Interchange Format (SDIF) is a recently adopted standard that can store a variety of sound representations including spectral, time domain, and higher-level models. SDIF consists of a specified data format and a set of standard sound descriptions and their official representation. SDIF is flexible in that new sound descriptions can be represented, and new kinds of data can be added to existing sound descriptions, facilitating innovation and research. This paper describes the goals and design of SDIF and its standard frame types, followed by a review of recent SDIF work at CNMAT, IRCAM, and IUA.",
      "url": "http://cnmat.berkeley.edu/publication/new_applications_sound_description_interchange_format",
      "author": "Wright, Matthew and Chaudhary, Amar and Freed, Adrian and Wessel, David and Rodet, Xavier and Virolle, Dominique and Woehrmann, Rolf and Serra, Xavier",
      "source_file": "160.html",
      "venue": "International Computer Music Conference",
      "year_int": 1998
    },
    {
      "id": 162,
      "title": "OpenSoundEdit: An Interactive Visualization and Editing Framework for Timbral Resources",
      "booktitle": "International Computer Music Conference",
      "year": "1998",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Ann, Arbor, Michigan",
      "abstract": "OpenSoundEdit is a sound-editing system that provides a unified three-dimensional user interface to edit complex sounds composed from different representations. Sounds are mapped onto a single three-dimensional space representing time, amplitude and a view-dependent third dimension (e.g., frequency, channel number, etc.) Sounds can be edited by direct manipulation on the 3D display window or using additional representation-specific controls. As the user edits sounds, changes can be heard through a synthesis server, an application that generates sound in response to commands supplied by OSE. OSE is implemented using extensible, multi-platform technologies. This paper illustrates the features of OSE with sinusoidal-track and resonance models.",
      "url": "http://cnmat.berkeley.edu/publication/opensoundedit_interactive_visualization_and_editing_framework_timbral_resources_0",
      "author": "Chaudhary, Amar and Freed, Adrian and Rowe, Lawrence",
      "source_file": "162.html",
      "venue": "International Computer Music Conference",
      "year_int": 1998
    },
    {
      "id": 164,
      "title": "Volumetric Modeling of Acoustic Fields in CNMAT{\\textquoteright",
      "booktitle": "Audio Engineering Society 104th Convention",
      "year": "1998",
      "publisher": "Audio Engineering Society",
      "organization": "Audio Engineering Society",
      "address": "San Francisco, CA",
      "abstract": "A new tool for real-time visualization of acoustic sound fields has been developed for a new sound spatialization theatre. Unique features of the theatre and the acoustic and volumetric modeling software are described.",
      "url": "http://cnmat.berkeley.edu/publication/volumetric_modeling_acoustic_fields_cnmats_sound_spatialization_theatre",
      "author": "Khoury, Sami and Freed, Adrian and Wessel, David",
      "source_file": "164.html",
      "venue": "Audio Engineering Society 104th Convention",
      "year_int": 1998
    },
    {
      "id": 165,
      "title": "Volumetric Visualization of Acoustic Fields in CNMAT{\\textquoteright",
      "booktitle": "Institute of Electrical and Electronics Engineers Visualization 98",
      "year": "1998",
      "pages": "439-442\\&562",
      "publisher": "Institute of Electrical and Electronics Engineers",
      "organization": "Institute of Electrical and Electronics Engineers",
      "address": "Research Triangle Park, NC",
      "abstract": "A new tool for real-time visualization of acoustic sound fields has been developed for a new sound spatialization theatre. The theatre is described and several applications of the acoustic and volumetric modeling software are presented.",
      "url": "http://cnmat.berkeley.edu/publication/volumetric_visualization_acoustic_fields_cnmats_sound_spatialization_theatre",
      "author": "Khoury, Sami and Freed, Adrian and Wessel, David",
      "source_file": "165.html",
      "venue": "Institute of Electrical and Electronics Engineers Visualization 98",
      "year_int": 1998
    },
    {
      "id": 170,
      "title": "Open Sound Control: A New Protocol for Communicating with Sound Synthesizers",
      "booktitle": "International Computer Music Conference (ICMC)",
      "year": "1997",
      "month": "1997",
      "pages": "101-104",
      "publisher": "International Computer Music Association (ICMA)",
      "organization": "International Computer Music Association (ICMA)",
      "address": "Thessaloniki, Hellas",
      "abstract": "Open SoundControl is a new protocol for communication among computers, sound synthesizers, and other multimedia devices that is optimized for modern networking technology. Entities within a system are addressed individually by an open-ended URL-style symbolic naming scheme that includes a powerful pattern matching language to specify multiple recipients of a single message. We provide high resolution time tags and a mechanism for specifying groups of messages whose effects are to occur simultaneously. There is also a mechanism for dynamically querying an Open SoundControl system to find out its capabilities and documentation of its features.",
      "keywords": "OSC",
      "url": "http://cnmat.berkeley.edu/http://hdl.handle.net/2027/spo.bbp2372.1997.033/open_sound_control_new_protocol_communicating_sound_synthesizers",
      "attachments": "http://www.adrianfreed.com/sites/default/files/open-soundcontrol-a-new-protocol-for-communicating-with.pdf",
      "author": "Wright, Matthew and Freed, Adrian",
      "source_file": "170.html",
      "venue": "International Computer Music Conference (ICMC)",
      "keywords_list": [
        "OSC"
      ],
      "year_int": 1997
    },
    {
      "id": 172,
      "title": "Music And Audio Technology Projects To Stir Your Imagination",
      "journal": "Computer Music Journal",
      "volume": "20",
      "year": "1996",
      "note": "UCB Music ML1 .C857UCB Music MUSI F204UCD Shields ML1 C64 StacksUCI Main Lib [Microfilm] S 000405 Curr Per RmUCI Main Lib ML 1 C857UCLA Music ML 1 C857UCLA Music ML 1 C857UCR Rivera ML1 C63UCSB Arts Lib ML1 .C857 MusicUCSC McHenry ML1.C66UCSD Music ML 1 C857 Current PeriodicalsCSU Dom Hills ML1 .C857CSU Hayward ML 1 .C857CSU Humboldt ML1.C857CSU LongBeach ML1.C857CSU Los Ang No call numberCSU Northridg No call numberCSU Pomona ML1.C857CSU Sacrmento No call numberCSU San Bern No call numberCSU San Diego No call numberCSU San Fran No call numberCSU San Jose ML1.C857CSU San Luis ML 1 .C857CSU Stanisls No call numberSTAN Math/CS ML1 .C7385 SerialsSTAN Music ML1 .C7385 RefSTAN Music ML1 .C7385 CCRMA - Contact Music ReferenceNews ItemSUM",
      "pages": "4-4",
      "keywords": "Engineering, Computing \\& Technology (TECH)",
      "url": "http://cnmat.berkeley.edu/publication/music_and_audio_technology_projects_stir_your_imagination",
      "author": "Freed, Adrian",
      "source_file": "172.html",
      "keywords_list": [
        "Engineering",
        "Computing \\& Technology (TECH)"
      ],
      "year_int": 1996
    },
    {
      "id": 173,
      "title": "Bring Your Own Control Additive Synthesis",
      "booktitle": "International Computer Music Conference",
      "year": "1995",
      "pages": "303-306",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Banff, Canada",
      "abstract": "Twenty years have passed since the first digital signal processing systems were used for real-time additive synthesis of sound [Chamberlin 76, DiGiugno 76]. This year, new algorithmic and implementation developments have resulted in passing the symbolic milestone of 1000 sinusoidal partials at 44.1kHz sampling rate on a desktop computer (SGI Power Indigo 2). Unlike the systems of two decades ago, this milestone was achieved with frequency and amplitude interpolated partials, and an efficient mechanism by which partials can be controlled during musical performance. This control mechanism, BYO (Bring Your Own), is the subject of this paper.",
      "url": "http://cnmat.org/~adrian/ICMC95.html",
      "author": "Freed, Adrian",
      "source_file": "173.html",
      "venue": "International Computer Music Conference",
      "year_int": 1995
    },
    {
      "id": 174,
      "title": "Improving Graphical User Interfaces For Computer Music Applications",
      "journal": "Computer Music Journal",
      "volume": "19",
      "year": "1995",
      "month": "1995",
      "pages": "4-5",
      "abstract": "This note is a plea to the computer music community to aim higher in the development of new graphical tools and better graphical user interfaces (GUIs) for computer music applications.",
      "keywords": "User Interfaces, Computer Music, 3D, GUI",
      "doi": "10.2307/3680983",
      "url": "http://www.jstor.org/stable/3680983",
      "author": "Freed, Adrian",
      "source_file": "174.html",
      "keywords_list": [
        "User Interfaces",
        "Computer Music",
        "3D",
        "GUI"
      ],
      "year_int": 1995
    },
    {
      "id": 175,
      "title": "Codevelopment of user interface, control and digital signal processing with the HTM environment",
      "booktitle": "5th International Conference on Signal Processing Applications and Technology",
      "volume": "2",
      "year": "1994",
      "note": "18-21 Oct. 1994",
      "month": "1994/04/16",
      "pages": "1179-83",
      "publisher": "DSP Associates",
      "organization": "DSP Associates",
      "address": "Dallas, TX, USA",
      "abstract": "The HTM system supports parallel development of the basic elements of DSP applications: a user interface, control structure and digital signal processing code. The user interface and control is central in many new DSP applications. To facilitate successful collaborative development between a team of specialists, HTM tools: 1) support the construction of complete system prototypes that run in real-time at the full (audio) sample rate of the application, and, 2) allow designers to use tools most productive for and familiar to them regardless of the computing platform the tools require. Until recently, general-purpose computers have been too slow to use alone for rapid prototyping of DSP algorithms and control strategies. The tough real-time and arithmetic computational performance demands of DSP applications are usually satisfied by supplementing general-purpose computers with multiple signal processors. Unfortunately, these signal processing systems are expensive and harder to program than their controlling computers. The HTM system exploits advances in recently introduced superscaler RISC workstations: increased arithmetic computational performance, compiler quality, real-time scheduling and networking performance. The HTM system components include: 1) a library of stateless signal processing vector functions, 2) a library of higher level \"unit generators\", 3) real-time resource allocation functions for the SGI workstation, 4) TCP/IP support for Opcode MAX, Matlab, and other Macintosh and UNIX clients, and, 5) a collection of example applications including a singing voice sound synthesizer",
      "url": "http://cnmat.berkeley.edu/publication/codevelopment_user_interface_control_and_digital_signal_processing_htm_environment",
      "author": "Freed, Adrian",
      "source_file": "175.html",
      "venue": "5th International Conference on Signal Processing Applications and Technology",
      "year_int": 1994
    },
    {
      "id": 176,
      "title": "Real-Time Additive Synthesis Controlled by a Mixture of Neural-Networks and Direct Manipulation of Physical and Perceptual Attributes",
      "booktitle": "International Computer Music Conference",
      "year": "1994",
      "note": "Only abstract and references appear in proceedings. \"(This paper will be provided as addenda upon receipt. (editor))\"",
      "pages": "362-363",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Aarhus, Denmark",
      "url": "http://cnmat.berkeley.edu/publication/real_time_additive_synthesis_controlled_mixture_neural_networks_and_direct_manipulation_physical_and",
      "author": "Freed, Adrian and Goldstein, Mark and Goodwin, Michael and Lee, Michael and McMillen, Keith and Rodet, Xavier and Wessel, David and Wright, Matthew",
      "source_file": "176.html",
      "venue": "International Computer Music Conference",
      "year_int": 1994
    },
    {
      "id": 178,
      "title": "Synthesis and control of hundreds of sinusoidal partials on a desktop computer without custom hardware",
      "booktitle": "Fourth International Conference on Signal Processing Applications and Technology",
      "volume": "2",
      "year": "1993",
      "note": "28 Sept.-1 Oct. 1993",
      "month": "1993/04/16",
      "pages": "1024-30",
      "publisher": "DSP Associates",
      "organization": "DSP Associates",
      "address": "Santa Clara, CA",
      "abstract": "This paper describes a new technique for additive synthesis, the FFT-inverse method. This technique provides an efficient method for adding colored noise to sinusoidal partials, which is needed to successfully synthesize speech and the Japanese Shakuhachi flute, for example. Before describing the details of the FFT-inverse method, additive synthesis will be compared to the popular synthesis methods: frequency modulation and digital sampling",
      "url": "http://cnmat.berkeley.edu/publication/synthesis_and_control_hundreds_sinusoidal_partials_desktop_computer_without_custom_hardware",
      "author": "Freed, Adrian and Rodet, Xavier and Depalle, Philippe",
      "source_file": "178.html",
      "venue": "Fourth International Conference on Signal Processing Applications and Technology",
      "year_int": 1993
    },
    {
      "id": 179,
      "title": "Guidelines for signal processing applications in C",
      "journal": "C Users Journal",
      "year": "1993",
      "month": "1993",
      "pages": "85(9)",
      "abstract": "Signal processing algorithms programmed in C must be as efficient and clear as possible because of their importance to time-critical applications. Careful programming and use of an optimizing compiler for a modern processor make it possible to make the algorithms efficient without sacrificing clarity. The first audio signal processing library function listed uses C{\\textquoteright",
      "url": "http://collaboration.cmc.ec.gc.ca/science/rpn/biblio/ddj/Website/articles/CUJ/1993/9309/freed/freed.htm",
      "author": "Freed, Adrian",
      "source_file": "179.html",
      "year_int": 1993
    },
    {
      "id": 180,
      "title": "Performance, Synthesis and Control of Additive Synthesis on a Desktop Computer Using FFT-1",
      "booktitle": "International Computer Music Conference",
      "volume": "19",
      "year": "1993",
      "pages": "98-101",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Waseda University",
      "url": "http://cnmat.berkeley.edu/publication/performance_synthesis_and_control_additive_synthesis_desktop_computer_using_fft_1",
      "author": "Freed, Adrian and Rodet, Xavier and Depalle, Philippe",
      "source_file": "180.html",
      "venue": "International Computer Music Conference",
      "year_int": 1993
    },
    {
      "id": 181,
      "title": "Neural networks for simultaneous classification and parameter estimation in musical instrument control",
      "booktitle": "Adaptive and Learning Systems",
      "series": "Proceedings of the SPIE - The International Society for Optical Engineering",
      "volume": "1706",
      "year": "1992",
      "month": "1992/04/16",
      "pages": "244-55",
      "address": "Orlando, FL, USA",
      "abstract": "In this report we present our tools for prototyping adaptive user interfaces in the context of real-time musical instrument control. Characteristic of most human communication is the simultaneous use of classified events and estimated parameters. We have integrated a neural network object into the MAX language to explore adaptive user interfaces that considers these facets of human communication. By placing the neural processing in the context of a flexible real-time musical programming environment, we can rapidly prototype experiments on applications of adaptive interfaces and learning systems to musical problems. We have trained networks to recognize gestures from a Mathews radio baton, Nintendo Power GloveTM, and MIDI keyboard gestural input devices. In one experiment, a network successfully extracted classification and attribute data from gestural contours transduced by a continuous space controller, suggesting their application in the interpretation of conducting gestures and musical instrument control. We discuss network architectures, low-level features extracted for the networks to operate on, training methods, and musical applications of adaptive techniques.",
      "keywords": "machine learning, neural networks",
      "doi": "10.1117/12.139949",
      "url": "https://www.spiedigitallibrary.org/conference-proceedings-of-spie/1706/1/Neural-networks-for-simultaneous-classification-and-parameter-estimation-in-musical/10.1117/12.139949.short?SSO=1",
      "author": "Lee, Michael and Freed, Adrian and Wessel, David",
      "source_file": "181.html",
      "venue": "Adaptive and Learning Systems",
      "keywords_list": [
        "machine learning",
        "neural networks"
      ],
      "year_int": 1992
    },
    {
      "id": 184,
      "title": "Real-Time Neural Network Processing of Gestural and Acoustic Signals",
      "booktitle": "17th International Computer Music Conference",
      "year": "1991",
      "month": "1991",
      "pages": "277-280",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Montreal",
      "abstract": "A new object added to the MAX language facilitates neural computations. It makes MAX an ideal environment for rapid experiments in the application of adaptive techniques to musical problems. Networks in MAX were programmed to recognize gestures from a MIDI keyboard, a Zeta MIDI guitar, a radio drum, and Lightning. In one experiment, a network issued messages to synchronize MIDI guitar motives with images from a HyperMedia work. Configurations and musical applications of the networks are also discussed. (authors)",
      "issn": "2223-3881",
      "url": "http://hdl.handle.net/2027/spo.bbp2372.1991.064",
      "attachments": "http://www.adrianfreed.com/sites/default/files/real-time-neural-network-processing-of-gestural-and-acoustic.pdf",
      "author": "Lee, Michael and Freed, Adrian and Wessel, David",
      "source_file": "184.html",
      "venue": "17th International Computer Music Conference",
      "year_int": 1991
    },
    {
      "id": 185,
      "title": "DSP driver software for performance-oriented music synthesis systems",
      "booktitle": "International Computer Music Conference",
      "year": "1990",
      "note": "10-15 Sept. 1990",
      "month": "1990/04/16",
      "pages": "79-81",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Glasgow, UK",
      "abstract": "A software interface between high level musical performance environments and low level software drivers for digital signal multiprocessors is described. The authors consider the implementation of a driver using the proposed interface for low-cost, commercially available, DSP56001-based signal processors (Reson8, Sound Accelerator, AudioMedia) and the way the driver is used from within the HyperCard and MAX environments",
      "url": "http://hdl.handle.net/2027/spo.bbp2372.1990.019",
      "author": "Freed, Adrian and Gordon, Keith",
      "source_file": "185.html",
      "venue": "International Computer Music Conference",
      "year_int": 1990
    },
    {
      "id": 186,
      "title": "A Digital Signal Multiprocessor and its Musical Application",
      "booktitle": "15th International Computer Music Conference",
      "year": "1989",
      "pages": "17-20",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "Ohio State University",
      "keywords": "resonances, reson8, macmix, DSP multiprocessor",
      "url": "http://cnmat.berkeley.edu/publication/digital_signal_multiprocessor_and_its_musical_application",
      "author": "Barriere, J-B. and Baisnee, P-F. and Freed, Adrian and Baudot, M-D.",
      "source_file": "186.html",
      "venue": "15th International Computer Music Conference",
      "keywords_list": [
        "resonances",
        "reson8",
        "macmix",
        "DSP multiprocessor"
      ],
      "year_int": 1989
    },
    {
      "id": 188,
      "title": "The Center for New Music and Audio Technologies: Studio Report 1989",
      "booktitle": "International Computer Music Conference",
      "year": "1989",
      "pages": "336-339",
      "publisher": "International Computer Music Association",
      "organization": "International Computer Music Association",
      "address": "San Francisco",
      "abstract": "The University of California at Berkeley has established a new facility for music research, composition, performance, and education. The Center for New Music and Audio Technologies, CNMAT, provides a common ground for music and audio related research activities in the departments of music, electrical engineering and computer sciences (EECS), psychology, linguistics, and architecture. The facility will provide an environment consisting of music and audio workstations, acoustically isolated studios, and an experimental performance space. CNMAT specific projects include computer architectures for sound synthesis and processing, research on music perception and cognition, the development of new media resources for music scholarship and education, and the development of software and controllers for live performance computer music. (author)",
      "url": "http://cnmat.berkeley.edu/publication/center_new_music_and_audio_technologies_cnmat_studio_report",
      "author": "Wessel, David and Felciano, Richard and Freed, Adrian and Wawrzynek, John",
      "source_file": "188.html",
      "venue": "International Computer Music Conference",
      "year_int": 1989
    },
    {
      "id": 189,
      "title": "Understanding Time in Distributed Music Systems",
      "booktitle": "International Computer Music Conference (ICMC)",
      "year": "1984",
      "address": "Paris, France",
      "abstract": "Describes a set of constraints based on psychoacoustic considerations required to build systems with musically useful latency performance properties. Introduces the basic requirements of such systems and identifies Barry Vercoe{\\textquoteright",
      "author": "Freed, Adrian",
      "editor": "CMA",
      "source_file": "189.html",
      "venue": "International Computer Music Conference (ICMC)",
      "year_int": 1984
    },
    {
      "id": 197,
      "title": "An audio oscillator using digital ICs",
      "journal": "Electronics Australia",
      "volume": "39",
      "number": "10",
      "year": "1978",
      "note": "Jan. 1978vol.39,no.10",
      "month": "01/1978",
      "pages": "53-4",
      "abstract": "This unusual audio oscillator circuit uses digital waveform synthesis to generate both sine and square waves. It has slightly higher distortion than the conventional Wien bridge configuration, but overcomes the stability problems inherent in the latter",
      "attachments": "http://www.adrianfreed.com/sites/default/files/EA 1978-01 January_text.pdf",
      "author": "Freed, A.",
      "source_file": "197.html",
      "year_int": 1978
    },
    {
      "id": 199,
      "title": "Le Blit - Video",
      "year": "1984",
      "note": "Blit",
      "publisher": "IRCAM",
      "address": "Paris, France",
      "author": "Freed, Adrian and Michael Hawley",
      "source_file": "199.html",
      "year_int": 1984
    },
    {
      "title": "Joy of Hex and the Cordless Guitars of the Future",
      "booktitle": "Identites de la Guitare Electrique",
      "year": "2009",
      "month": "05/18/2009",
      "address": "Paris, France",
      "abstract": "My view of the primary identity of the electric guitar is as a vehicle for sonic, and stylistic imitation. I started my talk with a sound example from Hendrix{\\textquoteright",
      "author": "Freed, Adrian",
      "source_file": "231.html",
      "venue": "Identites de la Guitare Electrique",
      "year_int": 2009
    },
    {
      "title": "The Fingerboard Instruments: Reframing Lutherie without Strings",
      "booktitle": "3rd Music and Cognition Conference",
      "year": "2010",
      "month": "02/2010",
      "publisher": "Schulich School of Music, McGill University",
      "organization": "Schulich School of Music, McGill University",
      "address": "McGill University, Montreal",
      "abstract": "Recent attempts to extend organology to add useful classifications of electrophones and music controllers have rightly focussed on music performance gestures . The naive approach of classification by gestural types (strum, pluck, hit, slap etc..) fails because gestures say more about musicians and their music than their instruments. The routine development in institutions of \"extended\" techniques and the drive for uniqueness and theatricality in popular music practice results in each instrument being associated with an increasing number of different performance gestures defeating useful, differentiating classifications. Also, Hendrix{\\textquoteright",
      "isbn": "978-0-7717-0673-8",
      "url": "http://www.idmil.org/_media/publications/abstract_proceedings_mg3_final.pdf",
      "author": "Freed, Adrian",
      "source_file": "232.html",
      "venue": "3rd Music and Cognition Conference",
      "year_int": 2010
    },
    {
      "id": 336,
      "title": "CPLOT, a Graphics Package for Tektronix terminals.",
      "year": "1979",
      "publisher": "AGSM released on magnetic tape as part of AUSAM release to AUUGN members",
      "abstract": "CPLOT contains a set of C procedures, designed to make interactive graphics programs easy to write and fast to run. This manual describes the \"cplot\" graphics library which was designed with Tektronics terminals and the C language in mind. It is now firmly based on the standard i/o library and uses it internally.",
      "author": "Freed, Adrian and Graeme Elsworthy and Richard Grevis",
      "source_file": "336.html",
      "year_int": 1979
    },
    {
      "title": "Support Vector Machine Learning for Gesture Signal Estimation with a Piezo Resistive Fabric Touch Surface",
      "booktitle": "NIME",
      "year": "2010",
      "note": "Draft paper in submission. Do not redistribute without permission.",
      "address": "Sydney, Australia",
      "abstract": "The design of an unusually simple fabric-based touch and pressure sensor is introduced. An analysis of the raw sensor data is shown to have significant non-linearities and non-uniform noise. Using support vector machine learning and a state-dependent adaptive filter it is demonstrated that these problems can be overcome. The method is evaluated quantitatively using a statistical estimate of the instantaneous rate of information transfer. The SVM regression alone is shown to improve the gesture signal information rate by up to 20\\% with zero added latency, and in combination with filtering by 40\\% subject to a constant latency bound of 10 milliseconds.",
      "author": "Schmeder, Andrew and Freed, Adrian",
      "source_file": "378.html",
      "venue": "NIME",
      "year_int": 2010
    },
    {
      "title": "Visualizations and Interaction Strategies for Hybridization Interfaces",
      "booktitle": "New Instruments for Musical Expression",
      "year": "2010",
      "address": "Sydney, Australia",
      "abstract": "We present two complementary approaches for the visualization and interaction of dimensionally reduced data sets using hybridization interfaces. Our implementations privilege syncretic systems allowing one to explore combinations (hybrids) of disparate elements of a data set through their placement in a 2-D space. The first approach allows for the placement of data points anywhere on the plane according to an anticipated performance strategy. The contribution (weight) of each data point varies according to a power function of the distance from the control cursor. The second approach uses constrained vertex colored triangulations of manifolds with labels placed at the vertices of triangular tiles. Weights are computed by barycentric projection of the control cursor position.",
      "attachments": "http://www.adrianfreed.com/sites/default/files/rbfi.pdf",
      "author": "Freed, Adrian and MacCallum, John and Schmeder, Andy and Wessel, David",
      "source_file": "379.html",
      "venue": "New Instruments for Musical Expression",
      "year_int": 2010
    },
    {
      "title": "Best Practices for Open Sound Control",
      "booktitle": "Linux Audio Conference",
      "year": "2010",
      "address": "Utrecht, NL",
      "abstract": "The structure of the Open Sound Control (OSC) content format is introduced with historical context. The needs for temporal synchronization and dynamic range of audio control data are described in terms of accuracy, precision, bit-depth, bit-rate, and sampling frequency. Speci c details are given for the case of instrumental gesture control, spatial audio control and synthesis algorithm control. The consideration of various transport mechanisms used with OSC is discussed for datagram, serial and isochronous modes. A summary of design approaches for describing audio control data is shown, and the case is argued that multi-layered information-rich representations that support multiple strategies for describing semantic structure are necessary.",
      "author": "Schmeder, Andrew and Freed, Adrian and Wessel, David",
      "source_file": "380.html",
      "venue": "Linux Audio Conference",
      "year_int": 2010
    },
    {
      "id": 384,
      "title": "MacMix Processing and Synthesis UserHooks (now known as Audio Plug-in)",
      "year": "1989",
      "month": "12/1989",
      "type": "Software Documentation",
      "abstract": "Some MacMix users have asked for hooks into MacMix so that they can write their own external functions for signal processing and synthesis. This package describes a new facility for external functions.",
      "author": "Freed, Adrian",
      "source_file": "384.html",
      "year_int": 1989
    },
    {
      "id": 402,
      "title": "Specification Mining For Machine Improvisation With Formal Specifications",
      "journal": "ACM : Computers in Entertainment (Musical Metacreation)",
      "volume": "14 Issue 3",
      "year": "2016",
      "month": "2016",
      "abstract": "We address the problem of mining musical specifications from a training set of songs, and using these specifications in a machine improvisation system capable of generating improvisations imitating a given style of music. Our inspiration comes from Control Improvisation, which combines learning and synthesis from formal specifications. We learn from symbolic musical data specifications based on musical and general usage patterns. We use the mined specifications to ensure that an improvised musical sequence satisfies desirable properties given a harmonic context and musical form. We present a specification mining strategy based on finite state automata and Markov chains, and apply it to the problem of supervising the improvisation of blues songs. We present an analysis of the mined specifications and compare the results of supervised and unsupervised improvisations.",
      "doi": "10.1145/2967504",
      "url": "https://dl.acm.org/citation.cfm?doid=3023312.2967504",
      "attachments": "http://www.adrianfreed.com/sites/default/files/msm16.pdf",
      "author": "Valle, Rafael and Donz{\\'e",
      "source_file": "402.html",
      "year_int": 2016
    },
    {
      "id": 403,
      "title": "Learning and Visualizing Music Specifications Using Pattern Graphs",
      "booktitle": "International Society for Music Information Retrieval Conference",
      "year": "2016",
      "month": "2016",
      "address": "New York",
      "abstract": "We describe a system to learn and visualize specifications from song(s) in symbolic and audio formats. The core of our approach is based on a software engineering procedure called specification mining. Our procedure extracts patterns from feature vectors and uses them to build pattern graphs. The feature vectors are created by segmenting song(s) and extracting time and and frequency domain features from them, such as chromagrams, chord degree and interval classification. The pattern graphs built on these feature vectors provide the likelihood of a pattern between nodes, as well as start and ending nodes. The pattern graphs learned from a song(s) describe formal specifications that can be used for human interpretable quantitatively and qualitatively song comparison or to perform supervisory control in machine improvisation. We offer results in song summarization, song and style validation and machine improvisation with formal specifications.",
      "url": "https://18798-presscdn-pagely.netdna-ssl.com/ismir2016/wp-content/uploads/sites/2294/2016/07/280_Paper.pdf",
      "attachments": "http://www.adrianfreed.com/sites/default/files/280_Paper.pdf",
      "author": "Valle, Rafael and J. Fremont, Daniel and Akkaya, Ilge and Donze, Alexandre and Freed, Adrian and S. Seshia, Sanjit",
      "source_file": "403.html",
      "venue": "International Society for Music Information Retrieval Conference",
      "year_int": 2016
    },
    {
      "id": 404,
      "title": "Experimental Collective Experience",
      "year": "2016",
      "address": "UC Irvine",
      "abstract": "We present different examples of experimentally varying collective experience in order to understand phenomena or qualities of experience such as embodiment, sense of other, pre-individual subjectivity, rhythm and entrainment, ethico-aesthetic improvisation in unmarked performative events.This includes methodological matters such as {\\textbullet",
      "keywords": "collective, entrainment, entrainment,, environment, experience,pre-individual, gestural, media, media,, movement, neurophenomenology, of, performance, phenomenology,, philosophy, realtime, responsive, rhythm, subjectivity,non-anthropocentric, temporality, timebased",
      "author": "Sha, Xin Wei and Solomonova , Elizaveta and Freed, Adrian and Veissi{\\`e",
      "source_file": "404.html",
      "keywords_list": [
        "collective",
        "entrainment",
        "entrainment",
        "",
        "environment",
        "experience",
        "pre-individual",
        "gestural",
        "media",
        "media",
        "",
        "movement",
        "neurophenomenology",
        "of",
        "performance",
        "phenomenology",
        "",
        "philosophy",
        "realtime",
        "responsive",
        "rhythm",
        "subjectivity",
        "non-anthropocentric",
        "temporality",
        "timebased"
      ],
      "year_int": 2016
    },
    {
      "id": 405,
      "title": "David Wessel{\\textquoteright",
      "booktitle": "SMC2016",
      "year": "2016",
      "month": "2016",
      "address": "Hamburg",
      "abstract": "David Wessel{\\textquoteright",
      "author": "Freed, Adrian",
      "source_file": "405.html",
      "venue": "SMC2016",
      "year_int": 2016
    },
    {
      "id": 406,
      "title": "Learning and Visualizing Music Specifications Using Pattern Graphs",
      "booktitle": "ISMIR 2016",
      "year": "2016",
      "address": "New York",
      "abstract": "We describe a system to learn and visualize specifications from song(s) in symbolic and audio formats. The core of our approach is based on a software engineering proce-dure called specification mining. Our procedure extracts patterns from feature vectors and uses them to build pat-tern graphs. The feature vectors are created by segmenting song(s) and extracting time and and frequency domain features from them, such as chromagrams, chord degree and interval classification. The pattern graphs built on these feature vectors provide the likelihood of a pattern between nodes, as well as start and ending nodes. The pat-tern graphs learned from a song(s) describe formal specifications that can be used for human interpretable quantitatively and qualitatively song comparison or to perform supervisory control in machine improvisation. We offer r-sults in song summarization, song and style validation and machine improvisation with formal specifications.",
      "attachments": "http://www.adrianfreed.com/sites/default/files/280_Paper_0.pdf",
      "author": "Valle, Rafael and Fremont, Daniel J. and Akkaya, Ilge and Donz{\\'e",
      "source_file": "406.html",
      "venue": "ISMIR 2016",
      "year_int": 2016
    },
    {
      "id": 407,
      "title": "o.OM: Structured-Functional Communication between Computer Music Systems using OSC and Odot",
      "booktitle": "FARM2016",
      "year": "2016",
      "publisher": "ACM",
      "organization": "ACM",
      "address": "Nara, Japan",
      "abstract": "O.{\\textemdash",
      "author": "Bresson, Jean and MacCallum, John and Freed, Adrian",
      "source_file": "407.html",
      "venue": "FARM2016",
      "year_int": 2016
    },
    {
      "id": 408,
      "title": "Sounds from the Electrified Human Body: Reconfigurations of Embodied and Encultured Knowledge from the Development of Electrosomatophones",
      "booktitle": "Bodies of Knowledge BOK2016",
      "year": "2016",
      "month": "2016",
      "publisher": "University of California",
      "organization": "University of California",
      "address": "UC Irvine",
      "abstract": "Until the twentieth century, fundamental discoveries of electricity were experienced and articulated by integrating the living and dead flesh of human and other animal bodies into electrical circuits. Examples of this include Watson{\\textquoteright",
      "url": "https://escholarship.org/uc/item/9p66b2bj",
      "attachments": "http://www.adrianfreed.com/sites/default/files/qt9p66b2bj.pdf",
      "author": "Freed, Adrian",
      "source_file": "408.html",
      "venue": "Bodies of Knowledge BOK2016",
      "year_int": 2016
    },
    {
      "id": 409,
      "title": "Batera : Drummer Agent with Style Learning and Interpolation",
      "booktitle": "Study Day On Computer Simulation Of Musical Creativity",
      "year": "2015",
      "month": "27/06/2015",
      "address": "University of Huddersfield, UK",
      "abstract": "In this work, we address the challenge of learning a drum agent that is capable of learning styles and interpolating between them during performance, taking into account rhythmic expressivity, musical sections and phrases and drum patterns.",
      "keywords": "improvisation,, learning, machine, music,",
      "url": "https://simulationofmusicalcreativity.wordpress.com/programme/$\\#$rafa",
      "author": "Valle, Rafael and Freed, Adrian",
      "source_file": "409.html",
      "venue": "Study Day On Computer Simulation Of Musical Creativity",
      "keywords_list": [
        "improvisation",
        "",
        "learning",
        "machine",
        "music",
        ""
      ],
      "year_int": 2015
    },
    {
      "id": 410,
      "title": "Antony: A Reimagining",
      "booktitle": "International Computer Music Conference",
      "year": "2015",
      "month": "25/09/2015",
      "address": "University of North Texas, Denton, TX, USA",
      "abstract": "We present a re-realization of David Wessel{\\textquoteright",
      "author": "MacCallum, John and Goodheart, Matthew and Freed, Adrian",
      "source_file": "410.html",
      "venue": "International Computer Music Conference",
      "year_int": 2015
    },
    {
      "id": 411,
      "title": "Dynamic Message-Oriented Middleware with Open Sound Control and Odot",
      "booktitle": "International Computer Music Conference",
      "year": "2015",
      "month": "25/09/2015",
      "address": "University of North Texas, Denton, TX, USA",
      "abstract": "We present recent work on odot, a system that extends Open Sound Control and facilitates the rapid and dynamic con- struction of Message-Oriented Middleware providing an in- teroperability layer for communication between applications. Unlike traditional middleware systems, odot, when embedded in a host environment, provides a node where computation can take place, allowing middleware to take shape dynami- cally as the needs of the system develop.",
      "author": "John MacCallum, Rama Gottfried, Ilya Rostovtsev, Jean Bresson, Adrian Freed",
      "source_file": "411.html",
      "venue": "International Computer Music Conference",
      "year_int": 2015
    },
    {
      "id": 412,
      "title": "An Accessible Platform for Exploring Haptic Interactions with Co-located Capacitive and Piezoresistive Sensors",
      "booktitle": "TEI 2015",
      "year": "2015",
      "month": "2015",
      "publisher": "ACM",
      "organization": "ACM",
      "address": "Stanford, California",
      "abstract": "This paper introduces an open research platform for exploring haptic interactions with co-located, capacitive and piezoresistive sensors. The solution uses readily available material, hardware and software components and allows for experiments on many system levels from low-level material concerns up to high-level sensor fusion software. This provides the HCI community with a platform to accelerate explorations of the many applications that have opened up of sensor fusion in haptic interaction.",
      "isbn": "978-1-4503-3305-4",
      "doi": "10.1145/2677199.2680571",
      "url": "https://dl.acm.org/citation.cfm?id=2680571",
      "attachments": "http://www.adrianfreed.com/sites/default/files/ColocatedSensorsTEI2015optim.pdf",
      "author": "Freed, Adrian and Wessel, David",
      "source_file": "412.html",
      "venue": "TEI 2015",
      "year_int": 2015
    },
    {
      "id": 413,
      "title": "Simple Synchronization for Open Sound Control",
      "booktitle": "ICMC",
      "year": "2015",
      "publisher": "ICMA",
      "organization": "ICMA",
      "address": "Texas",
      "abstract": "Clock synchronisation is a mature and important aspect ofdistributed computing systems. Despite the importance of accurate timing in music, there are relatively few widely-applicable synchronisation solutions available to computer music practitioners. In this paper we present a simple OSC-based synchronisation method for wired and wireless applications, whichis designed to be easy to apply and is shown to offer accuracy appropriate for fine-grained music applications. The proposed solution relies on a single master sending a synchronisation message to all slaves. Empirical studies witha heterogeneous network of 17 Wi-Fi slaves and five Ethernet slaves demonstrate that each homogeneous group is able to achieve a relative synchronisation accuracy of 166 us and 100 us respectively, offset from the master time by their respective network latencies. An acoustic localisation system isimplemented to demonstrate an application that requires both accurate synchronisation and benefits from wireless connectivity. The system is shown to precisely locate a sound source with a standard deviation of 1.8 mm.",
      "author": "Madgwick, Sebastian and Mitchell, Thomas and Barreto, Carlos and Freed, Adrian",
      "source_file": "413.html",
      "venue": "ICMC",
      "year_int": 2015
    },
    {
      "id": 414,
      "title": "Symbolic Music Similarity using Neuronal Periodicity and Dynamic Programming",
      "journal": "Mathematics and Computation in Music",
      "year": "2015",
      "publisher": "Springer",
      "address": "London, UK",
      "abstract": "We introduce NP-MUS, a symbolic music similarity algorithm tailored for polyphonic music with continuous representations of pitch and duration. The algorithm uses dynamic programming and a cost function that relies on a mathematical model of tonal fusion based on neuronal periodicity detection mechanisms. This paper reviews the general requirements of melodic similarity and offers a similarity method that better addresses contemporary and non-traditional music. We provide experiments based on monophonic and polyphonic excerpts inspired by spectral music and Iannis Xenakis.",
      "author": "Valle, Rafael and Freed, Adrian",
      "editor": "Springer",
      "source_file": "414.html",
      "year_int": 2015
    },
    {
      "id": 415,
      "title": "New Tools for Aspect-Oriented Programming in Music and Media Programming Environments",
      "booktitle": "International Computer Music Conference",
      "year": "2014",
      "month": "14/09/2014",
      "address": "Athens, Greece",
      "abstract": "Media/arts programming is often experimental and exploratory in nature and requiring a flexible development environment to enable continually changing requirements and to facilitate iterative design in which the development of software impacts the design of a work of art, which in turn produces new requirements for the software. We discuss agile development as it relates to media/arts programming. We present aspect-oriented programming and its implementation in Max/MSP using Open Sound Control and the odot library as tool for mobilizing the benefits of agile development.",
      "author": "MacCallum, John and Freed, Adrian and Wessel, David",
      "source_file": "415.html",
      "venue": "International Computer Music Conference",
      "year_int": 2014
    },
    {
      "id": 416,
      "title": "Making the Most of Wifi: Optimisations for Robust Wireless Live Music Performance",
      "booktitle": "New Instruments of Musical Expression",
      "year": "2014",
      "abstract": "Wireless technology is growing increasingly prevalent in thedevelopment of new interfaces for live music performance.However, with a number of different wireless technologiesoperating in the 2.4 GHz band, there is a high riskof interference and congestion, which has the potentialto severely disrupt live performances. With its hightransmission power, channel bandwidth and throughput,Wi-Fi (IEEE 802.11) presents an opportunity for highlyrobust wireless communications. This paper presents ourpreliminary work to optimise the components of a Wi-Fisystem for live performance scenarios. We summarise themanufacture and testing of a prototype directional antennathat is designed to maximise sensitivity to a performer{\\textquoteright",
      "author": "Mitchell, Thomas and Madgwick, Sebastian and Rankine, Simon and Hilton, Geoffrey and Freed, Adrian and Nix Andrew",
      "source_file": "416.html",
      "venue": "New Instruments of Musical Expression",
      "year_int": 2014
    },
    {
      "id": 417,
      "title": "o.io: a Unified Communications Framework for Music, Intermedia and Cloud Interaction",
      "booktitle": "ICMC",
      "year": "2014",
      "publisher": "IMCA",
      "organization": "IMCA",
      "address": "Athens, Greece",
      "abstract": "We present work on the {\\textquotedblleft",
      "keywords": "Control,, Cyber-physical, Device, Discovery, Engineering, Franca,, Integration,, Lingua, Open, OSC,, Protocol,, Software, Sound, Systems,",
      "author": "Freed, Adrian and DeFilippo, David and Gottfried , Rama and MacCallum, John and Lubow, Jeff and Razo, Derek and Rostovtsev, Ilya and Wessel, David",
      "source_file": "417.html",
      "venue": "ICMC",
      "keywords_list": [
        "Control",
        "",
        "Cyber-physical",
        "Device",
        "Discovery",
        "Engineering",
        "Franca",
        "",
        "Integration",
        "",
        "Lingua",
        "Open",
        "OSC",
        "",
        "Protocol",
        "",
        "Software",
        "Sound",
        "Systems",
        ""
      ],
      "year_int": 2014
    },
    {
      "id": 418,
      "title": "Agile Interface Development using OSC Expressions and Process Migration",
      "booktitle": "NIME",
      "year": "2013",
      "address": "Daejeon Korea",
      "abstract": "This paper introduces {\\textquotedblleft",
      "keywords": "Functional Programming, Gesture, Homoiconicity, Process Migration",
      "author": "MacCallum, John and Freed, Adrian and Wessel, David",
      "source_file": "418.html",
      "venue": "NIME",
      "keywords_list": [
        "Functional Programming",
        "Gesture",
        "Homoiconicity",
        "Process Migration"
      ],
      "year_int": 2013
    },
    {
      "id": 419,
      "title": "Colocated Surface Sound Interaction",
      "booktitle": "SigCHI",
      "year": "2013",
      "address": "Paris, France",
      "abstract": "We present three related schemes for colocating sensing and sound actuation on flat surfaces One uses conductive paper to create a musical instrument, another uses magnets mounted in gloves and printed conductors to form planar loudspeaker arrays. Finally we show how conductive and resistive fabrics can be integrated with loudspeaker drivers.",
      "keywords": "Flexible Audio Array, Interactive Electronics, Paper Speakers, Sensor Actuators.",
      "author": "Rowland, Jess and Freed, Adrian",
      "source_file": "419.html",
      "venue": "SigCHI",
      "keywords_list": [
        "Flexible Audio Array",
        "Interactive Electronics",
        "Paper Speakers",
        "Sensor Actuators."
      ],
      "year_int": 2013
    },
    {
      "id": 420,
      "title": "Sound Design as Human Matter Interaction",
      "booktitle": "SigChi",
      "year": "2013",
      "month": "2013",
      "publisher": "ACM",
      "organization": "ACM",
      "address": "Paris, France",
      "abstract": "Recently, terms like \"material computation\" or \"natural computing\" in foundations of computer science and engineering, and \"new materiality\" in cultural studies signal a broader turn to conceptions of the world that are not based on solely human categories. While respecting the values of human-centered design, how can we begin to think about the design of responsive environments and computational media while paying as much attention to material qualities like elasticity, density, wear, and tension as to social and cognitive schema? This presumes understanding computation as a potential property of matter in a non-reductionist way that plausibly spans formal divides between symbolic-semiotic, social, and physical processes. We begin this in the concrete practices of computational sound and sound design.",
      "isbn": "978-1-4503-1952-2",
      "doi": "10.1145/2468356.2468718",
      "url": "https://dl.acm.org/citation.cfm?id=2468718",
      "attachments": "http://adrianfreed.com/sites/default/files/Sha_AltChi-198smaller.pdf",
      "author": "Wei, Sha Xin and Freed, Adrian, Naveb Navid",
      "source_file": "420.html",
      "venue": "SigChi",
      "year_int": 2013
    },
    {
      "id": 421,
      "title": "{\\textquotedblleft",
      "booktitle": "NIME",
      "year": "2013",
      "address": "Daejeon Korea",
      "abstract": "This paper addresses the problem that most electrophones and computer-based musical instruments are ephemera lasting long enough to signal academic and technical prowess. They rarely are used more than in a few musical performances. We offer a case study that suggests that longevity of use depends on stabilizing the interface and innovating the implementation to maintain the required stability of performance for players.",
      "keywords": "Best practices, controller, Critical Organology, Fingerboard, Recrudescence, Unobtainium",
      "author": "Freed, Adrian and Uitti, Frances-Marie and Mansfield, Sam and MacCallum, John",
      "source_file": "421.html",
      "venue": "NIME",
      "keywords_list": [
        "Best practices",
        "controller",
        "Critical Organology",
        "Fingerboard",
        "Recrudescence",
        "Unobtainium"
      ],
      "year_int": 2013
    },
    {
      "id": 422,
      "title": "The Paper FingerPhone: a case study of Musical Instrument Redesign for Sustainability",
      "booktitle": "NIME 2012",
      "year": "2012",
      "month": "20/05/2012",
      "publisher": "NIME",
      "organization": "NIME",
      "address": "Ann Arbor, Michigan",
      "abstract": "Recyclable carbon-loaded conductive paper and a used pizza box can be used to replace the plastic, metal, and fiber glass components of the classic Stylophone musical toy.",
      "isbn": "978-3-319-47214-0",
      "doi": "10.1007/978-3-319-47214-0_25",
      "url": "www.nime.org/proceedings/2012/nime2012_264.pdf",
      "attachments": "http://www.adrianfreed.com/sites/default/files/264_Final_Manuscript.pdf",
      "author": "Freed, Adrian",
      "source_file": "422.html",
      "venue": "NIME 2012",
      "year_int": 2012
    },
    {
      "id": 423,
      "title": "Flexible Surfaces for Interactive Audio",
      "booktitle": "Interactive Tabletops and Surfaces",
      "year": "2012",
      "month": "11/14/2012",
      "publisher": "ITS",
      "organization": "ITS",
      "address": "Cambridge, MA",
      "abstract": "We present here flat flexible audio speaker surface arrays, which are transparent, formed to various environments, and allow for user interaction. These speaker arrays provide an alternative to traditional models of sound reproduction, which often involve discreet point source systems and bulky hardware passively received by the user. The surface array system opens up new possibilities for acoustic spaces, creativity, and sound interactivity",
      "keywords": "Flexible Audio Array, Interactive Electronics, Paper Speakers",
      "author": "Rowland, Jess and Freed, Adrian",
      "source_file": "423.html",
      "venue": "Interactive Tabletops and Surfaces",
      "keywords_list": [
        "Flexible Audio Array",
        "Interactive Electronics",
        "Paper Speakers"
      ],
      "year_int": 2012
    },
    {
      "id": 424,
      "title": "Flexible Surfaces for Interactive Audio",
      "booktitle": "Interactive Tabletops and Surfaces",
      "year": "2012",
      "month": "1/14/2012",
      "address": "Cambridge, MA",
      "author": "Rowland, Jess and Freed, Adrian",
      "source_file": "424.html",
      "venue": "Interactive Tabletops and Surfaces",
      "year_int": 2012
    },
    {
      "id": 425,
      "title": "Integration of Touch Pressure and Position Sensing with Speaker Diaphragms (Colocating Loudspeakers and Touch Interaction)",
      "booktitle": "Audio Engineering Society",
      "year": "2012",
      "publisher": "AES",
      "organization": "AES",
      "address": "San Francisco",
      "abstract": "Speaker cones and other driver diaphragms are usually too fragile to be good sites for touch interaction. This can be solved by employing new, lightweight piezoresistive e-textiles with flat, rectangular, stiff surfaces used in full-range drivers from HiWave. Good low-frequency performance of piezoresistive fabric has an advantage over piezoelectric sensing for this situation. Applications of these integrated sensor/actuators include haptic feedback user interfaces and responsive electronic percussion instruments.",
      "author": "Freed, Adrian",
      "source_file": "425.html",
      "venue": "Audio Engineering Society",
      "year_int": 2012
    },
    {
      "id": 426,
      "title": "Composability for Musical Gesture Signal Processing using new OSC-based Object and Functional Programming Extensions to Max/MSP",
      "booktitle": "New Interfaces for Musical Expression",
      "year": "2011",
      "month": "30/05/2011",
      "publisher": "NIME",
      "organization": "NIME",
      "address": "Oslo, Norway",
      "abstract": "An effective programming style for gesture signal processing is described using a new library that brings efficient run-time polymorphism, functional and instance-based object-oriented programming to Max/MSP. By introducing better support for generic programming and composability Max/MSP becomes a more productive environment for managing the growing scale and complexity of gesture sensing systems for musical instruments and interactive installations.",
      "keywords": "Composability, Delegation, Functional Programming, Gesture, Max/MSP, Object Oriented, Programming",
      "author": "Freed, Adrian and MacCallum, John and Schmeder, Andrew",
      "source_file": "426.html",
      "venue": "New Interfaces for Musical Expression",
      "keywords_list": [
        "Composability",
        "Delegation",
        "Functional Programming",
        "Gesture",
        "Max/MSP",
        "Object Oriented",
        "Programming"
      ],
      "year_int": 2011
    },
    {
      "id": 427,
      "title": "Diverse Roles of Objets Trouv{\\'e",
      "booktitle": "Understanding VISUAL Music 2011",
      "year": "2011",
      "month": "26/08/2011",
      "address": "Montreal, CA",
      "abstract": "Analysis of three examples of live, solo visual music practice prepared with CNMAT support will be presented: Cenzontle by Roberto Morales; SoundPainter by Diane Douglas and Arduino Video Installation by Sabine Gruffat. Although materially and structurally quite different these pieces have some interesting features in common: the visual and sonic matieral is controlled concurrently by a single performer and certain objets trouv{\\'e",
      "url": "http://uvm2011.hexagram.ca/presenters.html",
      "author": "Freed, Adrian",
      "source_file": "427.html",
      "venue": "Understanding VISUAL Music 2011",
      "year_int": 2011
    },
    {
      "id": 428,
      "title": "Pervasive Cameras: Making Sense of Many Angles using Radial Basis Function Interpolation and Salience Analysis",
      "booktitle": "Pervasive 2011",
      "year": "2011",
      "month": "12/06/2011",
      "address": "San Francisco, CA",
      "abstract": "This paper describes situations which lend themselves to the use of numerous cameras and techniques for displaying many camera streams simultaneously. The focus here is on the role of the {\\textquotedblleft",
      "url": "http://cnmat.org/files/attachments/pervasivecameras_formatted2.pdf",
      "attachments": "http://www.adrianfreed.com/sites/default/files/telematic.png , http://www.adrianfreed.com/sites/default/files/mango.png , http://www.adrianfreed.com/sites/default/files/salad.png , http://www.adrianfreed.com/sites/default/files/Cilantro.png",
      "author": "Mann, Yotam and Freed, Adrian",
      "source_file": "428.html",
      "venue": "Pervasive 2011",
      "year_int": 2011
    },
    {
      "id": 429,
      "title": "Dynamic, Instance-based, Object-Oriented Programming (OOP) in Max/MSP using Open Sound Control (OSC) Message Delegation",
      "booktitle": "ICMC 2011",
      "year": "2011",
      "publisher": "ICMA",
      "organization": "ICMA",
      "address": "Huddersfield, England",
      "abstract": "A new media programming style is introduced thatbrings efficient run-time polymorphism, functional andinstance-based object-oriented programming toMax/MSP and related visual dataflow languages.Examples are presented to illustrate new, unusual andeffective applications of the approach of using OSCmessages for object representations and data flow formethod delegation.",
      "author": "Freed, Adrian and MacCallum, John and Schmeder, Andy",
      "source_file": "429.html",
      "venue": "ICMC 2011",
      "year_int": 2011
    },
    {
      "id": 430,
      "title": "Robust and Reliable Fabric, Piezoresistive Multitouch Sensing Surfaces for Musical Controllers",
      "booktitle": "NIME 2011",
      "year": "2011",
      "publisher": "NIME",
      "organization": "NIME",
      "abstract": "The design space of fabric multitouch surface interaction is explored with emphasis on novel materials and construction techniques aimed towards reliable, repairable pressure sensing surfaces for musical applications.",
      "keywords": "controller, drum, etextiles, fabric, Multitouch, piezoresistive, sensor, surface, tangible",
      "author": "Roh, Jung-Sim and Freed, Adrian and Mann, Yotam and Wessel, David",
      "source_file": "430.html",
      "venue": "NIME 2011",
      "keywords_list": [
        "controller",
        "drum",
        "etextiles",
        "fabric",
        "Multitouch",
        "piezoresistive",
        "sensor",
        "surface",
        "tangible"
      ],
      "year_int": 2011
    },
    {
      "id": 431,
      "title": "Advances in the Parallelization of Musical Applications",
      "booktitle": "ICMC 2010",
      "year": "2010",
      "publisher": "ICMA",
      "organization": "ICMA",
      "author": "Battenberg, Eric and Freed, Adrian and Wessel, David",
      "source_file": "431.html",
      "venue": "ICMC 2010",
      "year_int": 2010
    },
    {
      "title": "David Wessel{\\textquoteright",
      "journal": "The Journal of the Acoustical Society of America",
      "volume": "141",
      "number": "5",
      "year": "2017",
      "month": "2017",
      "pages": "3560-3560",
      "abstract": "The main professional focus of David Wessel{\\textquoteright",
      "doi": "10.1121/1.4987551",
      "url": "https://doi.org/10.1121/1.4987551",
      "author": "Freed, Adrian",
      "source_file": "433.html",
      "year_int": 2017
    },
    {
      "id": 440,
      "title": "Languages for Real-Time Control and Sound Synthesis",
      "year": "2000",
      "abstract": "An evaluation of alternatives to Max",
      "author": "Freed, Adrian",
      "source_file": "440.html",
      "year_int": 2000
    },
    {
      "id": 441,
      "title": "Computing Engine Research",
      "year": "1996",
      "abstract": "Summary of CNMAT{\\textquoteright",
      "author": "Freed, Adrian",
      "source_file": "441.html",
      "year_int": 1996
    },
    {
      "id": 443,
      "title": "Ideas on Building Interactivity in Textiles and Related Fibers",
      "booktitle": "International Symposium on New Frontiers in Fiber Materials Science",
      "year": "2011",
      "month": "08/2011",
      "publisher": "The Fiber Society",
      "organization": "The Fiber Society",
      "address": "Charlston, South Carolina",
      "abstract": "The theme of the symposium, Fibers as Building Blocks of Advanced Material, was emphasized in a special keynote dinner talk where ideas on Building Interactivity in Textiles and Related Fibers were illustrated with different fiber and textile based musical instruments.",
      "url": "http://www.thefibersociety.org/Portals/0/Past\\%20Conferences/2011_Fall_Abstracts.pdf",
      "author": "Freed, Adrian",
      "source_file": "443.html",
      "venue": "International Symposium on New Frontiers in Fiber Materials Science",
      "year_int": 2011
    },
    {
      "id": 445,
      "title": "Sound, Vibration, and Retroaction in Deformable Displays",
      "booktitle": "SIGCHI Workshop: Organic experiences: (re)shaping interactions with deformable displays",
      "year": "2013",
      "month": "04/2013",
      "publisher": "ACM",
      "organization": "ACM",
      "address": "Paris/France",
      "abstract": "We present and discuss working designs of deformable displays that use sound, vibration and retroaction for output. Deformation is achieved using stretchable e-textiles and transducer diaphragms. Major remaining engineering challenges associated with these designs are introduced: increased temporal and spatial resolution and precision.",
      "keywords": "Colocated Sound and Interaction, Deformable display, e-textile Sensors, Interactive Electronics; Sensor Actuators",
      "isbn": "978-1-4503-1952-2",
      "url": "https://dl.acm.org/citation.cfm?id=2479639",
      "attachments": "http://www.adrianfreed.com/sites/default/files/CHI2013FreedDeformableSmaller.pdf",
      "author": "Freed, Adrian",
      "source_file": "445.html",
      "venue": "SIGCHI Workshop: Organic experiences: (re)shaping interactions with deformable displays",
      "keywords_list": [
        "Colocated Sound and Interaction",
        "Deformable display",
        "e-textile Sensors",
        "Interactive Electronics; Sensor Actuators"
      ],
      "year_int": 2013
    },
    {
      "id": 466,
      "title": "Door Bleeper",
      "journal": "Everyday Electronics",
      "year": "1975",
      "month": "10/1975",
      "attachments": "http://www.adrianfreed.com/sites/default/files/Pages from Everyday-Electronics-1975-10.pdf",
      "author": "Freed, Adrian",
      "source_file": "466.html",
      "year_int": 1975
    },
    {
      "id": 472,
      "title": "Entrainment Accounts of Intra-action for Intermedia and Metabody Cybernetics.",
      "booktitle": "Hacking Big Data Brother: From Biometrics to Intra-action",
      "year": "2015",
      "month": "07/2015",
      "address": "Madrid, Spain",
      "abstract": "I explore various formulations of the concept of entrainment as a way to model and operationalize intra-actions in intermedia performance practice. Data (Big or otherwise) in these formulations are subjugated (via OSC) to disposable utterances that serve an accounting role analogous to the roles energy, action, momentum serve in physics. An important strength of entrainment is that it requires neither telos, a stable presupposition of an arrow of time, nor causality {\\textendash",
      "url": "https://www.youtube.com/watch?v=AbXFTbLRVrM",
      "author": "Freed, Adrian",
      "source_file": "472.html",
      "venue": "Hacking Big Data Brother: From Biometrics to Intra-action",
      "year_int": 2015
    },
    {
      "id": 473,
      "title": "The Anti-Ergonomy of Instruments of Interaction",
      "booktitle": "Stanford HCI Seminar",
      "year": "2010",
      "month": "09/2010",
      "publisher": "Stanford University",
      "organization": "Stanford University",
      "address": "Stanford, CA",
      "abstract": "Antiergonomy rules. I use the idea of anti-ergonomy to pierce the veil of objects imbued with charismatic authority (holy water, iPhone, etc) and expose the often contradictory value systems of the stakeholders. A quick illustration: Let{\\textquoteright",
      "url": "https://www.youtube.com/watch?v=jbHm3DnwamM",
      "author": "Freed, Adrian",
      "source_file": "473.html",
      "venue": "Stanford HCI Seminar",
      "year_int": 2010
    }
  ]
}