Final Project Update

For the final project, I have been working on creating a “personal diary simulator”. I chose the fictional horror short story “The Horla” to build my project around. I used a text and audio reading of the story that I found online.

When the user runs the file, the sketch will run on full screen. The background is a yellow-ish color. The date of the current diary entry will be displayed on the upper part of the screen, and the text of the entry will start appearing on the middle of the screen. The audio reading of the entry will also start playing.

I have been working on three types of interactions in the project:

Camera-based interaction: 

I used frame differencing for this interaction. The camera will be set up downwards, facing a table. The interaction will be triggered when the user places their hands over the table and/or move it. When that happens, a red strip (rectangle) will appear on the left side and start expanding in height as long as the user has their hand on top of the table. If the user removes their hands, the strip will start shrinking. If the strip grows all the way to the bottom of the screen, the background’s color will turn red. When the user removes their hand, the screen will slowly transition from the red color to the original yellow color.

To implement: When the red screen is triggered, the audio reading of the entry will change to a deeper/demonic voice and will start transitioning back to the normal voice as the screen’s color turns yellow.

Selecting Diary entry:

In this interaction, the user will be able to move through the different entries of the diary. I’m still thinking of the best way to implement this interaction. I don’t want this interaction to unintentionally trigger the camera-based, so I want to make it separate from the table.

To implement: I have implemented the transition between texts of different entries, but I’m having trouble implementing the audio transition. For some reason, the function for playing the audio does not work. I still have to fix this.

Vibration Interaction (maybe?): 

For this interaction, I want to play an audio file of a gasp or heavy breathing whenever there is some vibration detected on the table. The text/sketch screen would also shake for a brief moment.

To implement: I have not started working on this interaction. I have thought of using the camera captured video to implement this interaction, but I think it would interfere too much with the other camera-based interaction. I’m not sure if there is a type of sensor that can help with implementing this interaction. I will think of different ways to do it.

User Testing:

Code (Mid-debugging Sorry!):

import processing.sound.*;
import processing.video.*;

Capture video;
int vidX, vidY;
PImage prevFrame;
float motion_threshold=38;

SoundFile s_bg, s_clock, s_breathing, s_drinkwater;
SoundFile[] entries_audio;
PImage paper;
PFont font;
String[] entries;
int entries_cnt = 39;
int paper_offset_y = 0;
int page_start_i = 0;

int letter_i = 0;

int red_level = 0;

int vid_width = 640;
int vid_height = 480;

color bg_yellow = color(229, 229, 144);
color bg_red = color(183, 0, 0);
color bg_color = bg_yellow;
color text_color = color(0);
color bar_color = bg_red;
float bg_phase = 0;
float bar_phase = 0;
float text_phase = 0;

int entry_switch_offset = 0;
int entry_switch_threshold = 10;

int prev_entry = 0;
int curr_entry = 0;

void setup() {
  //size(1000,600);
  fullScreen();
  background(bg_color);

  String[] cameras = Capture.list();
  video = new Capture(this, cameras[1]);
  video.start();
  prevFrame=createImage(width, height, RGB);

  paper = loadImage("paper.jpg");

  font = createFont("SyneMono-Regular.ttf", 25);
  textFont(font);

  entries_audio = new SoundFile[18];
  for (int i=0; i<18; i++) {
    entries_audio[i] = new SoundFile(this, "entries_audio/"+i+".mp3");
  }
  s_bg = new SoundFile(this, "sound/Ambience/night-crickets-ambience-on-rural-property.wav");
  s_clock = new SoundFile(this, "sound/Ambience/loopable-ticking-clock.wav");
  s_breathing = new SoundFile(this, "sound/Human/breath-male.wav");
  s_drinkwater = new SoundFile(this, "sound/Human/drink-sip-and-swallow.wav");
  entries_audio[curr_entry].play();
  //s_bg.loop(1, 0.5);
  //s_clock.loop(1, 0.1);
  //s_breathing.loop();
  //s_drinkwater.loop(0.7);

  entries = new String[entries_cnt];
  for (int i=0; i<entries_cnt; i++) {
    entries[i] = readFile("entries/"+i+".txt");
  }
}

void draw() {
  background(bg_color);
  entry_switch_offset++;
  drawPaper();
  detectMotion();
  updateEntry();
  //playEntry(entries_audio[prev_entry], entries_audio[curr_entry]);
  //if (entries_audio[prev_entry].isPlaying()) {
  //  entries_audio[prev_entry].stop();
  //}
  //if (entry_switch_offset == entry_switch_threshold) {
  //  //entry_switch_offset = 0;
  //  entries_audio[curr_entry].play();
  //  println("in if");
  //}
  //entries_audio[curr_entry].play();
  //println("offset" + " " + entry_switch_offset);
  //println("threshold" + " " + entry_switch_threshold);
}

void drawPaper() {
  imageMode(CENTER);
  //image(paper, width/2, height/2, paper.width/3, paper.height/3);
  writeText(entries[curr_entry]);
}

void writeText(String text) {
  int x = (width/2) - (paper.width/6) + 70;
  int y = (height/2) - (paper.height/6) + 70;
  int char_width = 0;
  int char_row = 0;
  String date = "";
  int c = 0;
  while (text.charAt(c) != '.') {
    date = date + text.charAt(c);
    c++;
  }
  if (page_start_i == 0) {
    page_start_i = c+2;
  }

  pushMatrix();
  textSize(40);
  text(date, x, 80);
  popMatrix();

  pushMatrix();
  textSize(25);
  translate(x, y + paper_offset_y);
  fill(text_color);

  if (entry_switch_offset > entry_switch_threshold) {
    if (frameCount%2 == 0 && letter_i < text.length()) {
      letter_i++;
    }
    for (int i=page_start_i; i < letter_i; i++) {
      char_width += textWidth(text.charAt(i));
      text(text.charAt(i), char_width, char_row*30);

      if (x + char_width >= (width/2) + (paper.width/6) - 160 && text.charAt(i) == ' ') {
        char_row++;
        char_width = 0;
      }
      if (text.charAt(i) == '\n') {
        char_row++;
        char_width = 0;
      }
    }

    if (char_row > 10) {
      page_start_i = letter_i;
    }
  }
  popMatrix();
}

String readFile(String path) {
  String s = "";
  String[] arr = loadStrings(path);
  for (int i=0; i<arr.length; i++) {
    s = s + '\n' + arr[i];
  }
  return s;
}

void detectMotion() {
  if (video.available()) {
    prevFrame.copy(video, 0, 0, width, height, 0, 0, width, height);
    prevFrame.updatePixels();
    video.read();
  }
  video.loadPixels();
  prevFrame.loadPixels();
  loadPixels();
  float totalMotion=0;
  for (int y=0; y<vid_height; y++) {
    for (int x=0; x<vid_width; x++) {
      int loc = (video.width-x-1)+(y*vid_width);
      //println(video.width);
      color pix=video.pixels[loc];
      color prevPix=prevFrame.pixels[loc];
      float r1=red(pix);
      float g1=green(pix);
      float b1=blue(pix);
      float r2=red(prevPix);
      float g2=green(prevPix);
      float b2=blue(prevPix);
      float diff=dist(r1, g1, b1, r2, g2, b2);
      totalMotion+=diff;
    }
  }
  float avgMotion=totalMotion/(vid_width*vid_height);
  if (avgMotion>motion_threshold && frameCount%2 == 0 && red_level <= height+10) {
    red_level += 8;
  } else if (frameCount%2 == 0 && red_level >= -5) {
    red_level -= 3;
  }
  //println(avgMotion);
  video.updatePixels();
  prevFrame.updatePixels();
  updatePixels();
  pushMatrix();
  barSwitch();
  fill(bar_color);
  noStroke();
  rect(300, 0, 15, red_level);
  //text(avgMotion, 200, 200);
  popMatrix();
  bgSwitch();
}

void bgSwitch() {
  if (red_level >= height) {
    bg_color = bg_red;
    text_color = color(255);
  } else if (bg_color != bg_yellow && bg_phase<1) {
    bg_color = colorFade(bg_color, bg_red, bg_yellow, bg_phase);
    text_color = colorFade(text_color, color(255), color(0), text_phase);
    bg_phase = phaseFade(bg_phase);
    text_phase = phaseFade(text_phase);
  } else {
    bg_color = bg_yellow;
    text_color = color(0);
    bg_phase = 0;
    text_phase = 0;
  }
}

void barSwitch() {
  if (red_level >= height) {
    bar_color = bg_yellow;
  } else if (bar_color != bg_red && bar_phase<1) {
    bar_color = colorFade(bar_color, bg_yellow, bg_red, bar_phase);
    bar_phase = phaseFade(bar_phase);
  } else {
    bar_color = bg_red;
    bar_phase = 0;
  }
}

color colorFade(color curr, color from, color to, float phase) {
  if (frameCount%10 == 0) {
    return lerpColor(from, to, phase);
  }
  return curr;
}

float phaseFade(float phase) {
  if (frameCount%10 == 0) {
    return phase + 0.01;
  }
  return phase;
}

void updateEntry() {
  if (keyPressed && keyCode == LEFT && curr_entry > 0) {
    prev_entry = curr_entry;
    curr_entry--;
    //println(curr_entry);
    page_start_i = 0;
    letter_i = 0;
  }
  if (keyPressed && keyCode == RIGHT && curr_entry < 18) {
    prev_entry = curr_entry;
    curr_entry++;
    //println(curr_entry);
    page_start_i = 0;
    letter_i = 0;
  }
}

void playEntry(SoundFile preventry, SoundFile currentry) {
  if (preventry.isPlaying()) {
    preventry.stop();
  }
  if (entry_switch_offset == entry_switch_threshold) {
    currentry.play();
  }
}

Leave a Reply