p5.js/Hydra code:

let p5 = new P5()
s0.init({src: p5.canvas})
p5.hide();

let particles = [];
let numParticles = 50;
let mode = 2;

for (let i = 0; i < numParticles; i++) {
  particles.push({
    x: p5.random(p5.width),
    y: p5.random(p5.height),
    size: p5.random(5, 20),
    speedX: p5.random(-2, 2),
    speedY: p5.random(-2, 2),
    color: p5.color(p5.random(255), p5.random(255), p5.random(255))
  });
}

p5.draw = () => {
  p5.background(0); 

  if (ccActual[0] < 20) {
    mode = 0; // Particles mode
  } else if (ccActual[0] < 40) {
    mode = 1; // Spiral mode
  } else if (ccActual[0] < 60) {
    mode = 2; // Grid mode
  } else if (ccActual[0] < 80) {
    mode = 3; // Starburst mode
  } else if (ccActual[0] < 110) {
    mode = 4; // Waveform mode
  } else {
    mode = 5; // Fractal mode
  }

  // Draw based on current mode
  switch(mode) {
    case 0: // Particles
      drawParticles();
      break;
    case 1: // Spiral
      drawSpiral();
      break;
    case 2: // Grid
      drawGrid();
      break;
    case 3: // Starburst
      drawStarburst();
      break;
    case 4: // Waveform
      drawWaveform();
      break;
    case 5: // Fractal
      drawFractal();
      break;
  }
}

function drawParticles() {
  p5.noStroke();
  for (let i = 0; i < particles.length; i++) {
    let p = particles[i];
    p5.fill(p.color);
    p5.ellipse(p.x, p.y, p.size, p.size);

    p.x += p.speedX;
    p.y += p.speedY;

    if (p.x < 0 || p.x > p5.width) p.speedX *= -1;
    if (p.y < 0 || p.y > p5.height) p.speedY *= -1;
  }
}

function drawSpiral() {
  p5.stroke(255, 0, 100);
  p5.strokeWeight(3);
  p5.noFill();
  p5.translate(p5.width/2, p5.height/2);

  for (let i = 0; i < 200; i++) {
    let r = i * 0.5;
    let angle = i * 0.1 + time * 0.2;
    let x = r * p5.cos(angle);
    let y = r * p5.sin(angle);
    p5.point(x, y);
    if (i > 0) {
      let prevX = (i-1) * 0.5 * p5.cos((i-1) * 0.1 + time * 0.2);
      let prevY = (i-1) * 0.5 * p5.sin((i-1) * 0.1 + time * 0.2);
      p5.line(prevX, prevY, x, y);
    }
  }
}

function drawGrid() {
  p5.stroke(0, 255, 255);
  p5.strokeWeight(2);
  let cellSize = 50;

  for (let x = 0; x < p5.width; x += cellSize) {
    for (let y = 0; y < p5.height; y += cellSize) {
      let distFromCenter = p5.dist(x, y, p5.width/2, p5.height/2);
      let size = p5.map(p5.sin(distFromCenter * 0.01 + time), -1, 1, 10, cellSize-5);
      p5.rect(x, y, size, size);
    }
  }
}

function drawStarburst() {
  p5.stroke(255, 255, 0);
  p5.strokeWeight(2);
  p5.translate(p5.width/2, p5.height/2);

  for (let i = 0; i < 36; i++) {
    let angle = i * p5.TWO_PI / 36;
    let x1 = 100 * p5.cos(angle);
    let y1 = 100 * p5.sin(angle);
    let x2 = 300 * p5.cos(angle + time * 0.5);
    let y2 = 300 * p5.sin(angle + time * 0.5);
    p5.line(x1, y1, x2, y2);
  }
}

function drawWaveform() {
  p5.stroke(0, 255, 0);
  p5.strokeWeight(3);
  p5.noFill();
  p5.beginShape();

  for (let x = 0; x < p5.width; x += 10) {
    let y = p5.height/2 + p5.sin(x * 0.02 + time) * 100 +
            p5.cos(x * 0.01 - time * 0.5) * 50;
    p5.vertex(x, y);
  }

  p5.endShape();
}

function drawFractal() {
  p5.stroke(255);
  p5.noFill();
  p5.translate(p5.width/2, p5.height/2);
  drawBranch(100, 0, 8);
}

function drawBranch(len, angle, depth) {
  if (depth <= 0) return;

  p5.strokeWeight(depth);
  p5.stroke(255 - depth * 30, depth * 30, 150);

  p5.push();
  p5.rotate(angle);
  p5.line(0, 0, 0, -len);
  p5.translate(0, -len);

  let t = time * 0.5;
  drawBranch(len * 0.7, angle + p5.sin(t) * 0.5, depth - 1);
  drawBranch(len * 0.7, angle - p5.cos(t) * 0.5, depth - 1);
  p5.pop();
}
src(s0).modulate(noise(5,0.1),0.1).blend(osc(15,0.2,()=>ccActual[0]/127).hue(()=>ccActual[0]/20),0.3).out()

TidalCycles code:


d1 $ ccv (slow 4 "0 25 50 75 100 127")
  # ccn "0"
  # s "midi"

d2 $ stack [
  n (arp "<up down diverge>" (slow 4 $ "a'min7 c'maj7 e'min7 g'maj7"))
    # s "arpy"
    # gain (slow 4 $ range 0.5 0.9 $ "0 25 50 75 100 127" / 127)
    # room 0.3 # size 0.5,
  n (slow 4 $ "a2 c3 e3 g3 c4 e4")
    # s "jvbass"
    # lpf (slow 4 $ range 300 2000 $ "0 25 50 75 100 127")
    # gain 0.8,
    
  every 3 (fast 2) $ n (slow 2 $ scramble 8 $ run 8)
    # s "east"
    # gain 0.7
    # pan (slow 8 $ sine)
]

d3 $ slow 8 $ s "padlong"
    # gain 0.6
    # lpf (slow 4 $ range 500 5000 $ "0 25 50 75 100 127")
    # hpf 300

d4 $ every 4 (jux rev) $ whenmod 8 6 (fast 2) $
    n (slow 2 $ "0 [~ 1] 2 [3 4]")
    # s "feel"
    # gain 0.75
    # room 0.2
    # orbit 1

d5 $ every 2 (# gain 1.5) $
    s "glitch:5*8"
    # gain (slow 2 $ range 0 0.8 $ "0 25 50 75 100 127" / 127)
    # speed (range 0.5 1.5 $ slow 16 sine)
    # pan (slow 3 $ rand)
    # cut 1

d2 silence 
d3 silence
d5 silence
hush

Link to demo video (apologies for the keystrokes, my supercollider/pulsar has been very laggy and buggy and I was not able to record from supercollider so I had to rely on Quicktime recording)

Thank you!

Following the words of the famed producer Kanye West, I wanted to incorporate a lot of the human voice into my project. My overall theme was a transition from happy, serene sounds with the backdrop of the phrase “I love live coding” spelled out, into a transition with a countdown and then a descent into madness with the phrase “pain” spelled out in repeat and it ends with the word “help” spelled out rather lifelessly. 

The visuals then had to match this sequence, where I wanted the initial scenery to be colorful with pretty shapes. For the transition, I wanted the shapes to get a bit more distorted, and for the number of shapes to match the countdown so you can both see and hear the countdown for the beat drop. Right after, comes a brief moment of serenity before a descent into chaos, untilizing a lot of dark libraries as a backdrop to the prominent “pain” repeated throughout. 

Making this project was a lot of fun and I am excited to see everyone’s project.

Tidalcycles:


setcps (135/60/4)  -- 1. Set global tempo


happyComplex = do {
  hush;
  d1 $ slow 4 $ s "alphabet" <| n "~ ~ 8 ~ ~ 11 14 21 4 ~ ~ 11 8 21 4 ~~ 2 14 3  8 13 6 ~ ~"
       # speed "1.2"
       # gain (range 1.5 1 (slow 64 sine))   -- starts loud then fades over time
       # room "0.1"
       # pan (slow 8 "0 1");
  d2 $ stack [
         fast 2 $ s "arpy" >| note (arp "updown" (scale "major" ("[0,2,4,6]" + "c5")))
              # gain (range 1.2 0.8 (slow 64 sine)),
         s "arpy" >| note (scale "major" ("[<-7 -5 -3 -1>,0,2](3,8)" + "c5"))
              # gain (range 1.2 0.8 (slow 64 sine))
       ];
  -- Light percussion: gentle claps and hi-hats.
  d3 $ stack [ s "~ cp" # room 0.5,
              fast 2 $ s "hh*2 hh*2 hh*2 <hh*6 [hh*2]!3>"
                   # gain (range 1 0.5 (slow 64 sine))
                   # room 0.7
            ];
  -- Ambient textures: acoustic drum loop and soft piano.
  d4 $ loopAt 8 $ chop 100  $ s "bev:1"  # room 0.8 # legato 12; -- # gain (range 1 0.3 (slow 64 sine));
  d5 $ slow 8 $ s "superpiano" <| n "c d f g a c6 d6 f6"
       # gain (range 1 0.3 (slow 64 sine))
};



happyComplex

d8 $ ccv (segment 128 (range 127 0 saw)) # ccn "0" # s "midi" -- 5.
d9 $ ccv (segment 128 (range 127 0 saw)) # ccn "1" # s "midi" -- 6.

d1 $ qtrigger $ slow 2 $ s "numbers" # n "<3 2 1>" # gain 1.5
--ctrl 4
d1 silence 

d10 $ ccv "0 127 0 127" # ccn "2" # s "midi" -- 8.
d11 $ ccv "<127 0 127 0>" # ccn "0" # s "midi" -- 8.

scaryComplex2

d1 $ slow 2 $ s "alphabet" <| n "~ 15 0 8 13 ~"
     # speed "0.6"
     # legato 2
     # gain (range 1.3 2 (slow 64 saw))
     # room "0.1"
     # pan (slow 8 "0 1")

 d1 $ s "alphabet" <| n "7 4 11 15" --19
   # gain 1.5  -- 21, 23, 25 change to 1, 0.5 0


hush
-- transitionComplex = do {
--   -- # hush;
--   d5 silence;
--   d1 $ qtrigger $ seqP [
--       (0.01, 1.01, s "numbers" <| n "3" # gain 1.5),
--       (1,    2,    s "off"),
--       (2,    3,    s "off"),
--       (3,    4,    s "numbers" <| n "2" # gain 1.5),
--       (4,    5,    s "off"),
--       (5,    6,    s "off"),
--       (6,    7,    s "numbers" <| n "1" # gain 1.5),
--       (7,    8,    s "off"),
--       (8,    9,    s "off")
--     ];
--   d2 $ fast 2 $ s "hh" <| n (run 6)
--        # gain 0.8
--        # speed (slow 4 (range 1 2 saw));
--   d3 $ loopAt 16 $ s "sheffield" # gain (range 0.2 0.4 (slow 32 sine)) # room 0.9;
--   xfadeIn 4 2 silence
-- };


hush

scaryComplex2 = do {
  clutch 2 $ s "ades3" <| n (run 7) # gain (range 1.2 0.8 (slow 64 sine)) # room 0.2;
  clutch 3 $ loopAt 1 $ s "dist:1" # gain 1.0;
  clutch 4 $ slow 8 $ s "bass1" <| n (run 30)
       # gain (range 1.0 0.7 (slow 64 sine))
       # up "-2" # room 0.3;
  clutch 5 $ stack [
         fast 2 $ s "arpy" >| note (arp "updown" (scale "minor" ("[0,2,3,5]" + "c4")))
              # gain 0.8,
         s "arpy" >| note (scale "minor" ("[0,1,3,5]" + "c4"))
              # gain 0.8
       ] # room 0.5;
  clutch 6 $ slow 4 $ s "industrial" <| n (run 32) # gain 1.0 # hpf 800
};







d1 $ s "alphabet" <| n "7 4 11 15" --19
  # gain 1.5  -- 21, 23, 25 change to 1, 0.5 0



hush --27

Hydra:

//S1

gradient(0.1)
  .colorama(0.1 + cc[0] * 0.7)         
  .mult(
    voronoi(5, 0.2 + cc[1] * 0.5, 0.7)
  )
  .modulateRotate(osc(10, 0.05, Math.PI / 2), cc[0])
  .modulate(noise(()=>(cc[0]+cc[1])*3,.1))
  //7. change to 3 -> 30 -> 300 -> 3000 
  .out(o0); // 4.

//S2
  osc(20, 0.1, 0)
    .modulate(noise(3, 0.1))
    .add(
      osc(10, 0.1, Math.PI / 2)
        .modulateRotate(noise(2, 0.05), 0.05)
    )
    .colorama(0.1 + cc[0] * 0.1).colorama(0.1).colorama(0.1).colorama(0.1).colorama(0.1)          // Subtle hue shift modulated by CC0
    .mult(
      shape(4, 0.2 + cc[2]*0.03)
        .repeat(3,3)
        //.repeat(2,2) // 11.
        //.repeat(1,1) // 12.
        //.repeat(3000,3000) // 13.
    )
    .modulate(noise(()=>(cc[0]+cc[1]) * 3, 0.1)) // 14. change to 3
    .out(o0); //10. play with the 3


  osc(200, 0.01, 4)
    .rotate(2 + cc[2]*20)
    .layer(
      osc(30, 0, 1)
        .luma(0.1, 0.1)
        .color(0, 0, 0, 1)
    )
    .layer(
      osc(300, 0.8, 1)
        .luma(0.1, 0.01)
    )
    .modulate(noise(() => (cc[2] + cc[2]) * 20, 0.5)) // 0 1
    .colorama(0.05 + cc[2] * 0.05).colorama(0.1).colorama(0.1).colorama(0.1)          // Modulated hue via CC2
    .mult(
      voronoi(5, 3, 0.7) //17. change to 30, 300
    )
    .out(o0); //16
    

shape(2, 0.01).thresh().out() //22
hush() //26


Link to video.

Ryoichi Kurokawa’s art dissects and abstracts elements from vastly different scales to evoke a sense of majesty. His work involves dissecting, “de-naturing,” and distilling phenomena into abstract sounds and images to reveal the wonders contained within.

While Kurokawa’s synaesthetic approach is intriguing, I wonder if the emphasis on uniting hearing and seeing is more about overcoming the limitations of traditional art forms than a genuine exploration of sensory experience. Perhaps the “mechanical separation” he refers to is not a flaw to be corrected, but a characteristic that allows each sense to be appreciated independently.

Kurokawa embraces technological advancements while maintaining an appreciation for older forms. He is interested in romancing both old and new technologies. His installations often feature multiple screens, creating immersive environments that explore the interplay between figuration and abstraction. However, I question whether the sheer scale and technological complexity of his installations risk overshadowing the subtleties of the natural phenomena they are meant to represent. The sublime, as Kurokawa seems to define it, may be lost in translation if technology becomes an end in itself rather than a means of enhancing our understanding of nature.

Vuo is a visual programming environment designed for artists, designers, and creative developers to build interactive media applications without traditional coding. It is especially popular in fields like live visuals, generative art, motion graphics, and real-time interactive installations.

What makes Vuo unique is its node-based interface, which allows users to create complex visual and audio-driven projects through a modular drag-and-drop system. Unlike traditional coding environments, Vuo’s event-driven architecture enables seamless real-time interactivity, making it ideal for projects that require immediate feedback and dynamic responsiveness. It also supports Syphon, OSC, MIDI, and 3D graphics, making it a versatile tool for multimedia creators.

Vuo was developed by Kosada as an alternative to Apple’s now-discontinued Quartz Composer, which was widely used for real-time graphics. Launched in 2014, Vuo was designed to be a modern, GPU-accelerated, cross-platform tool that extends beyond Quartz Composer’s capabilities. Over time, it has grown to support 3D rendering, audio-reactive visuals, MIDI control, and OSC communication, making it a powerful tool for digital artists.

Today, Vuo is widely used for live performances, interactive installations, and experimental visual art, providing an intuitive and powerful platform for creative expression. It is also popular in VJing, projection mapping, and interactive museum exhibits, making it an essential tool in modern digital art. 

Below are some images of what I implemented.

Noise-Based Image Generator

Simple Pattern

It’s fascinating how much expression can be packed into the smallest timing variations in music. This paper really opened my eyes to the idea that even simple, repetitive patterns can be incredibly expressive through subtle shifts in intensity and timing. It seems like the core of groove-based music lies in this interplay between a steady pulse and the almost imperceptible ways musicians play with it. It’s like they’re having a conversation with each other, using these tiny deviations in timing to create different moods and feelings.

I found it particularly interesting how the paper connects the backbeat, with its slight delay, to the body’s natural rhythms. The idea that the way we move – our feet anticipating a sound and our hands following it – might influence musical timing is pretty interesting. It makes you think about how much our physicality is tied to the music we create and enjoy.

The discussion of technology’s role in music was thought-provoking too. The absence of those human-like microtiming variations can be as powerful as their presence, and the strategic use of “robotic” rhythms can be musically meaningful. And the way musicians use technology to manipulate sampled recordings, giving them a sort of “pseudo-human” feel, highlights the ongoing conversation between humans and machines in music. It makes me wonder what is “soul” in music, and if it is really a powerfully embodied human presence? I’m starting to think that it might be related to these very subtle, almost invisible traces of the human body in music.

The concept of live coding, as presented in the text, feels both exciting and deeply relevant to my experiences as a computer science and math double major. The idea of writing and modifying code in real time, while making the process visible to an audience, challenges the traditional view of programming as a solitary or rigid task. Instead, it reframes coding as an improvisational and collaborative act, which resonates with how I often approach problem-solving—iteratively and creatively.


What stands out to me is how live coding emphasizes “thinking in public.” As someone familiar with AI due to my major, this reminds me of the iterative nature of training models: experimenting, adapting, and learning from feedback. Similarly, live coding invites a dialogue between the coder, the machine, and the audience. The notion of making algorithms “strange” also intrigues me—it’s a reminder to question assumptions and explore new perspectives, something I value in both my technical and mathematical work.


Ultimately, live coding feels like a bridge between technical rigor and artistic expression. It inspires me to think about how my skills could be used not just to solve problems but to create meaningful, interactive experiences that push the boundaries of what technology can do.