Someone asked us for the code of the visuals. The bulk of it is P5, the height map is inspired by Shiffman’s tutorial. The height values of the terrain are then multiplied by a factor of the distance from the center (hence the flat part in the middle). Square movement is two sine functions, one for their y positions and the other for their rotation. The sun is the shader in the class tutorial.

Hydra is used for feedback and coloring and the final transition. The transition is a Voronoi-modulated oscillator.

Inspiration for the composition of the visual was drawn from this video.

Here’s the full code. Steps:

  1. run shader
  2. run p5
  3. run for loop
  4. run draw()
  5. run o1
  6. run o3
  7. run render(o3)
  8. tinker with o3 as commented

 

 

// glow circle
setFunction({
  name: 'glowCircle',
  type: 'src',
  inputs: [{
      type: 'float',
      name: 'locX',
      default: 0.,
    },
    {
      type: 'float',
      name: 'locY',
      default: 0.,
    },
    {
      type: 'float',
      name: 'glowAmount',
      default: 50.,
    },
    {
      type: 'float',
      name: 'r',
      default: 0.6,
    },
    {
      type: 'float',
      name: 'g',
      default: 0.3,
    },
    {
      type: 'float',
      name: 'b',
      default: 0.5,
    },
  ],
  glsl: `
  vec2 loc = vec2(locX,locY);
  // loc is in screen spaces, but _st is in normalized space
  float dist = glowAmount/distance(_st*resolution, loc);
  return vec4(r*dist,g*dist,b*dist,0.1);
`
})


p5 = new P5({width: window.innerWidth, height:window.innerHeight, mode: 'WEBGL'})
s0.init({src: p5.canvas})
src(s0).out(0)
p5.hide();
scl = 50;
w = 4200;
h = 3000;
//set m as 300
m = 100;
cols = w / scl;
rows = h / scl
flying = 0
terrain = []
spikes = []
toggle = 0
toggle2 = 0
size = 3;
pink = p5.color(255, 34, 240);
blue = p5.color(23, 200, 255);
neon = p5.color(10, 220, 255);
prv = [0,0,0];
ctr = [0,0,0];

p5.remove()

//make electro sound go up with the other one
for (var x = 0; x < cols; x++) {
    terrain[x] = [];
  	spikes[x] = [];
    for (var y = 0; y < rows; y++) {
      terrain[x][y] = 0; //specify a default value for now
      spikes[x][y] = 0;
    }
  }

p5.draw = ()=> {
  blue = p5.color(1, 6, 40);
  m = 100;
  size = p5.random(2,5);
  fade = 0.8;
  //p5.lights();
  p5.background(blue);
  p5.translate(0, 300, -100);
  p5.rotateX(42*p5.PI/72);
  //p5.rotateZ(time*p5.PI / 3);
  //p5.fill(255*p5.noise(1), 190*p5.noise(1), 150 + 200*p5.noise(1), 255);
  p5.translate(-w/2, -h/2);
  p5.noStroke();
  //p5.stroke(255, 34, 240);
  //GRID
  for (var i = 0; i < cols; i++)
  {
    p5.line(i*scl, 0, i*scl, h);
  }
  for (var i = 0; i < rows; i++)
  {
    p5.line(0, i*scl, w, i*scl);
  }
  //p5.noStroke();
  flying -= 0.03;
  var yoff = flying;
  for (var y = 0; y < rows; y++) {
    var xoff = 0;
    for (var x = 0; x < cols; x++) {
      terrain[x][y] = p5.map(p5.noise(xoff, yoff), 0, 1, 0, m) + spikes[x][y];
      spikes[x][y] *= fade;
      //
      xoff += 0.03;
    }
    yoff += 0.04;
  }
  //big blocks
  let cn = 12;
  if (cc[cn] != toggle){
    toggle = cc[cn];
    x = p5.int(p5.random(0.4, 0.6)*cols);
    y = p5.int(p5.random(1)*rows);
    x = p5.constrain(x, 1, cols-size-2);
    y = p5.constrain(y, 1, rows-size-2);
    //spike it up
    for(let i = 1; i < size; i++)
    {
      for(let j =1; j< size; j++)
      {
        spikes[x+i][y] = ccActual[cn]*55;
        spikes[x+i][y] = ccActual[cn]*55;
        spikes[x+i][y+j] = ccActual[cn]*55;
        spikes[x][y+j] = ccActual[cn]*55;
      }
    }
  }
  //sharp spikes
  let cn2 = 10;
  if (cc[cn2] != toggle2){
    toggle2 = cc[cn2];
    x = p5.int(p5.random(0.4, 0.6)*cols);
    y = p5.int(p5.random(1)*rows);
    //spike it up
    spikes[x][y] = 105*ccActual[cn2];
  }
  //terrain
  for (var y = 0; y < rows - 1; y++) {
    //left side
    p5.fill(blue);
    //p5.noFill();
    //p5.stroke(pink);
    p5.noStroke();
    p5.beginShape(p5.TRIANGLE_STRIP);
    for (var x = 0; x < cols-1; x++) {
      let dist = p5.pow(x-cols/2,2)/20;
      p5.vertex(x * scl, y * scl, terrain[x][y]*dist);
      p5.vertex(x * scl, (y + 1) * scl, terrain[x][y + 1]*dist);
    }
  	p5.endShape();
    for (var x = 0; x < cols-1; x++) {
      p5.strokeWeight(10);
      p5.stroke(pink);
      if (x%10==0)
      {
        p5.stroke(neon);
      }
      let dist = p5.pow(x-cols/2,2)/20;
      p5.line(x*scl, y*scl, terrain[x][y]*dist, x*scl, (y+1)*scl, terrain[x][y+1]*dist);
      //p5.line(x*scl, y*scl, terrain[x][y]*dist, (x+1)*scl, (y)*scl, terrain[x+1][y]*dist);
    }
  }
  //translate
  p5.strokeWeight(5);
  p5.stroke(neon);
  p5.fill(pink);
  //central box
  p5.push();
  p5.translate(w/2,2300, 70 + 40*p5.cos(flying*7-3));
  p5.rotateX(-flying*3);
  p5.box(50 + ccActual[13]*0);
  prv[0]=cc[12];
  p5.pop();
  //box left
  p5.push();
  p5.strokeWeight(7);
  p5.translate(w/2-100,1700, 100 + 60*p5.cos(flying*7-1));
  p5.rotateX(-flying*3 - 1);
  p5.box(50 + ccActual[13]*0);
  prv[1]=cc[11];
  p5.pop();
  //box right
  p5.strokeWeight(10);
  p5.push();
  p5.translate(w/2-60,100, 80 + 60*p5.cos(flying*7 - 6));
  p5.rotateX(-flying*3);
  p5.box(50 + ccActual[13]*0);
  p5.pop();
  //box left2
  //box center
}

//o0
src(s0).out(o0)

//MY GPU CANTTT
osc(1,2,0).color(10, 10,10).brightness(-100).modulate(noise()).mask(src(o0).sub(osc(0.9, 0.4).modulate(voronoi(20,10), 0.9))).add(src(o0)).invert().out(o1)

//final output [MAIN VISUAL]
//option1: modulate by o0, become crystalline/transparent: v pretty
//option2: blend multiple times o3
//option3: switch source to o1, this is the main function then blend with o3.
src(o0).layer(glowCircle(31*p5.width/70, 7*p5.height/24, ()=> ccActual[13]*100+2500, ()=>0.3, 0.1 ,0.06)).blend(o3).out(o3)

render(o3)

//black screen
//track 1 => automatically switches
//build up => bring the sun out || sun moves
//drop the beat => sun automatically detects that
//on drop => o1
//modulate o(0) blend o3
//o1 source then hush

hush()


















 

I think I learned a lot in this reading, I never knew about the Fluxus movement, and there were a lot of names that I had to google–in a good way, a way that made me feel less alone in the practice of new media arts.

It’s also interesting to me how many musicians came from Art School, and how art and music social bubbles have intermixed internationally. In fact, the reading even mentions that people didn’t always choose to wear multiple hats by choice, but rather because the market and economy dictated it. These economic constraints and the emergence of dadaism left a very noticeable effect on individuals who joined the Fluxus movement. A lot of the artists that were mentioned in the reading made anti-art intermedia pieces that were concise and short, sometimes humorous with less focus on the aesthetic than the message. 

I don’t think there’s much to agree or disagree with in this reading. It’s like a hyperlink framework of names relevant to what we do in class, which gives us history and therefore purpose.

 

Visuals:

I really liked the mask code examples and how they interact with basic geometric shapes, like encasing chaos in one small orderly container. So I chose to use triangles with very chaotic (noise, Voronoi, screen recording) fillings. This was to allow for the readability of discrete sounds by affecting the base shapes of containers (triangles!) and complexity by using more fluid cc values to affect noise and other chaotic textures of the shapes.

Edit: After the first day of performances, I changed a lot of my visuals to be more minimalistic and readable in the ways that they interact with the music. I also had to dump many of my previous visuals without looking back, simply because they did not fit the mood of the musical score.

Music:

I think this was the hardest part for me. I never learned to play an instrument or done anything music-related, so I felt very behind in stuff related to TidalCycle. To make up for that I’ve been going to the piano room often and trying to replicate the sounds I made on the keyboard in TidalCycle. It’s been fun and very helpful because it gave me an intuition of what I wanted to go for. The idea was to make a creepy sound, something that inspired anxiety, and contrast it with sounds that brought a soft, almost hopeful joy. Eiden had a very similar project to mine in that aspect and her performance inspired many of the changes I made to mine.

A lot of times for the music I had to borrow other people’s ears to ask what was wrong with the things that bothered me and give them names (learned about dissonant notes). I listened to some soundtracks that reflected my feelings and what I wanted to make out of this piece.

LiveCodeLab is a web-based livecoding environment for 3D visuals and sound that are created on the spot without evaluating of lines of code. The project was built on top of of the Code Mirror editor by Marijin Haverbeke and the live coding framework Fluxus by the Pawful collective. A lot of the infrastructure, logic and routines are inspired from computer web-based computer graphics libraries like three.js and processing.js.

 

I thought this project was really interesting in the way that it removes the traditional frustrations of programming from the workflow, thus making it an optimal entry point for visual artists with no programming experience. Its facility to pick up and simple same-window on-the-go live tutorials make it a great tool for early prototyping. In LiveCodeLab, code is evaluated as it is typed. So code that doesn’t make sense is simply not implemented, while other lines are evaluated–no delay whatsoever.

 

The Github isn’t very well documented but I’m assuming that as opposed to Hydra it isn’t based on shaders but rather a live canvas updated frame by frame through the CPU. This makes it noticeably slow when shapes become more complex (I’ve done some testing and the window crashes at 100 cubes rotating on an M1). But you have to take it for what it is: a prototyping and educational tool.

When a person holds an instrument to play, they don’t always intend to produce the resulting sounds. Sometimes they play a single random note–more so with the intent to play than the intent to create pleasant sounds–and another one follows, and another one; initial chaos that eventually becomes an orderly symphony. How can we reproduce this process through code? How can we make randomly pleasant sounds? True randomness is hard to define in reality, and I’m sure the author would argue that the very first notes played by the person in my example, aren’t truly random. He’d argue that they are rather the result of meaningful noise that loses its meaning in a musical context. A ‘random’ function, and even the seemingly natural ‘noise’, are based on randomly occurring physical phenomena–like cosmic radiation or radioactive decay. The thought that some other phenomenon that has a random frequency that resonates with music could exist is fascinating, and I would love to see its applications.

 

In the meanwhile anything generated randomly on Tidal (even with noise) sounds awful lol.

I still wonder what the point of live coding is.The authors seem thrilled by the concept because it resolves an issue of ‘authorship’. In other words, live coding puts the human in a position where it is obvious that they, and not the software. are the artist. I’m not sure if the authors really care about authorship as much as recognition though. Most artists wouldn’t mind a little pat on the back for all the tedious work that led to the final result. But the whole point of art to me is to make the audience wonder about the journey that led to the final outwork. We stand in front of art and think: what does this mean, how was this drawn? And that thinking process, this series of questions, is what we artists provide our audiences. I’m not sure if it’s worth giving that up just so that the audience understands how hard it is to create our art pieces or realize that it’s us and not the software that’s drawing stuff.

 

The article also frames live coding as a novel way of self-expression because of the different notion of time. But I believe these same creative incentives can be reproduced in live performance without the need for risk-taking or literal coding–which I find optimistic at best and elitist at worst considering most people wouldn’t understand the code anyway.