Conveyor Belt in Cellular Automata

c

var canvas = document.getElementById("canvas");
var canvas = document.getElementById("canvas"); 
var ctx = canvas.getContext("2d"); 
var img; var current, prev, imageData;

function RunOnce()  
{ 
   for (var y = 1; y < img.height - 1; ++y) 
   { 
      for (var x = 1; x < img.width - 1; ++x)
      { 
          var n = (y * img.width + x) * 4; // pixel address 
          var c = prev[n]; // color var neighbours = 0; 
          for (var a = -1; a < 2; ++a) 
              for (var b = -1; b < 2; ++b) 
                 if (!(a == 0 && b == 0) && prev[((y + b) * img.width + (x + a)) * 4] == 240) 
                     neighbours++; 

          if (c == 240 && neighbours >= 3 && neighbours <= 5) 
              continue; 

          if (c >= 80) 
              current[n] = c - 80; 
          else 
              current[n] = neighbours == 2 ? 240 : 0; 
       } 
    } 

    ctx.putImageData(imageData, 0, 0); 
    prev.set(current);  
    setTimeout(function() { RunOnce(); }, 10); 
}

function loadImageFromFile(event)
{
    document.getElementById("label").style.visibility = "hidden";
    var reader = new FileReader();
    img = document.getElementById("world");
    reader.onload = function(event) 
    {
       img.onload = function() 
       { 
           ctx.drawImage(img, 0, 0); 
           imageData = ctx.getImageData(0, 0, img.width, img.height);
           current = imageData.data;
           prev = new Uint8ClampedArray(imageData.data);
           RunOnce();
       };
       img.src = event.target.result;
    } 
    reader.readAsDataURL(event.target.files[0]);
}
conveyor

Source image for conveyor belt example. Should be in 24bit bmp format

glider_gun

Source image for glider gun

burning_arrow

Source image for burning arow

 

Strange Attractors

attractor

var canvas = document.getElementById("canvas"); 
var ctx = canvas.getContext("2d"); 
var width = 800; 
var height = 800; 
var pixels = Array(width).fill(0).map(x => Array(height).fill(0.0)); 

//var a = -1.4, b = 1.6, c = 1.0, d = 0.7; 
//var a = 1.7, b = 1.7, c = 0.6, d = 1.2; 
//var a = -1.8, b = -2.0, c = -0.5, d = -0.9; 
var a = -1.4, b = 1.6, c = -1.0, d = 0.9; 
//var a = 1.5, b = -1.8, c = 1.6, d = 0.9; 

var x = 0, y = 0; 
var max = 0.0; 

// generate 
for (var i = 0; i < 40000000; ++i) 
{ 
   var x1 = Math.sin(a * y) + c * Math.cos(a * x); 
   var y1 = Math.sin(b * x) + d * Math.cos(b * y); 

   var m = Math.round((x1 + 2.5) * width / 5); 
   var n = Math.round((2.5 - y1) * height / 5); 
   var h = ++pixels[m][n]; 
   max = Math.max(max, h); 

   x = x1; 
   y = y1; 
} 

// draw 
for (var m = 0; m < width; ++m) 
{ 
   for (var n = 0; n < height; ++n) 
   { 
       var color = 2 * Math.round(Math.sqrt(pixels[m][n] / max) * 100.0); 
       color = Math.max(0, Math.min(color, 100)); 
       ctx.fillStyle = "hsl("+ 30 + "," + 100 + "%," + (100 - color) +"%)"; 
       ctx.fillRect(m, n, 1, 1); 
   } 
}

A Bunch of Dragon Flowers

Capture

var canvas = document.getElementById("canvas"); 
var ctx = canvas.getContext("2d"); 

for (var x = 0; x < 1000; ++x) 
{ 
   for (var y = 0; y < 600; ++y) 
   { 
       var c = -0.835, ci = -0.2321; 
       var z = x / 300.0 - 1.7; 
       var zi = y / 300.0 - 1; 

       for (var i = 255; i > 0; --i) 
       { 
           var a = z * z - zi * zi + c; 
           var b = 2 * z * zi + ci; 
           if (a * a + b * b > 50) 
           { 
               ctx.fillStyle = "rgb("+ i + "," + i + "," + 255 +")"; 
               ctx.fillRect(x, y, 1, 1); 
               break; 
           } 
           z = a; 
           zi = b; 
       } 
    } 
}

 

Tracing Voronoi Diagrams – The Greedy Approach

Voronoi Diagrams are pretty and useful. I use them for PCB isolation milling. This piece of code (C++/MFC) traces the voronoi diagrom of some random points. The algorithm is the most simple, lowest performing greedy method.

void TraceVoronoi(CDC* pDC, std::vector<CPoint>& vPoints, std::vector<COLORREF>& vColors)
{
    for (int x = 0; x < 600; ++x)
    {
        for (int y = 0; y < 400; ++y)
        {
            double dMinDist = 100000000;
            CPoint pt;
            int n = 0;
            for (int i = 0; i < vPoints.size(); ++i)
            {
                CPoint a = vPoints[i];
                double d = (x - a.x) * (x - a.x) + (y - a.y) * (y - a.y);
                //double d = abs(x - a.x) + abs(y - a.y); // Manhattan distance
                //double d = max(abs(x - a.x), abs(y - a.y)); // ?? distance
                if (d < dMinDist)
                {
                    pt = a;
                    n = i;
                    dMinDist = d;
                }
           }
           pDC->SetPixelV(x, y, vColors[n]);
       }
   }
   // Points
   for (int i = 0; i < vPoints.size(); ++i)
   {
       CPoint a = vPoints[i];
       pDC->FillSolidRect(a.x - 1, a.y - 1, 3, 3, 0x0);
   }
}

Output 1

Image

Output 2 (B/W)

Image

Output 3 (Manhattan Distance)

Image

Output 4 (?? Distance)

Image

Discrete Fourier Transformation (DFT) based Spectrum Tracer

DFT can be used to convert audio samples from time domain to frequency domain. That is the dancing spectrum in graphic equalizers. This piece of code (C++ with MFC) traces the spectrum of a wave file.

const double PI = 3.14159265;
const int N = 1024; // DFT point count
void TraceSpectrum(CDC* pDC)
{
char* pzFile ="D:\\in.wav";
HMMIO h = mmioOpen((LPTSTR)pzFile, NULL, MMIO_READ | MMIO_ALLOCBUF | MMIO_DENYWRITE); for (int i = 0; i < 50000; ++i)
{
double realInput[N] = { 0 }; for (int j = 0; j < N; ++j)
{
SHORT iLeft;
SHORT iRight;
mmioRead(h, (HPSTR)&iLeft, 2); // 16 bit samples
mmioRead(h, (HPSTR)&iRight, 2); // 16 bit samples realInput[j] = iLeft / 1000.0;
}
TraceDFT(realInput, pDC);
Sleep(10);
} mmioClose(h, 0);
}

void TraceDFT(double* realInput, CDC* pDC) {
double realOutput[N] = { 0 };
double imagOutput[N] = { 0 }; for (int k = 0; k < N / 2; ++k) // Output is symmetric, hence N/2
{
for (int n = 0; n < N; ++n)
{
// Theory: F(k) = sigma(T(n) * (cos(x) - j*sin(x))) Where x = 2PIkn/N double x = 2 * PI * k * n / N;
double re = realInput[n] * cos(x);
double im = realInput[n] * -sin(x);
realOutput[k] += re;
imagOutput[k] += im;
}
}

// To Decibels
double spectrum[N / 2];
for (int i = 0; i < N / 2; ++i)
{
double d = sqrt(realOutput[i] * realOutput[i] + imagOutput[i] * imagOutput[i]);
d = d < 1 ? 0 : 20 * log(d);
spectrum[i] = min(d, 200);
}
// Draw the graph
pDC->FillSolidRect(0, 0, 1000, 500, 0xffffff);
for (int i = 0; i < N / 2; ++i)
pDC->FillSolidRect(20 + i, 200 - spectrum[i], 1, spectrum[i], 0x0);
}

Output

Image

Ray Tracer: Hello world

Ray tracing is a deep and complex subject. It needs reasonable amount of mathematics to write even a simple one. But that kind of simple one still generates awesome output. The basic theory is to simulate the travel of light rays from one or more light sources to the eye, subjecting to reflections, refraction and deflections by various objects through its way. To reduce the computational complexity a view is assumed in-front of eye (or camera) and calculates the color of each pixel in the view, based on the amount and hue of the light that may arrive there.

Image

A vector class is essential to write a ray tracer.

class vec
{
public:
    vec()                   { x = y = z = 0; }
    vec(double _x, double _y, double _z) { x = _x; y = _y; z = _z; }

    vec operator-(vec& rhs) { return vec(x - rhs.x, y - rhs.y, z - rhs.z); }
    vec operator-()         { return vec(-x, -y, -z); }
    vec operator+(vec& rhs) { return vec(x + rhs.x, y + rhs.y, z + rhs.z); }
    vec operator*(double t) { return vec(x * t, y * t, z * t); }
    double dot(vec& r)      { return x * r.x + y * r.y + z * r.z; }
    vec cross(vec& r)       { return vec(y * r.z - z * r.y, z * r.x - x * r.z
                                         , x * r.y - y * r.x); }
    double length()         { return sqrt(x * x + y * y + z * z); }
    bool is_equal(vec& r, double eps) { return abs(x - r.x) < eps 
                                               && abs(y - r.y) < eps 
                                               && abs(z - r.z) < eps; }

    void normalize()
    {
        double d = length();
        assert(d);
        x /= d;
        y /= d;
        z /= d;
    }

    static double angle(vec v1, vec v2) { return acos(v1.dot(v2) 
                                                / (v1.length() * v2.length())); }

    double x;
    double y;
    double z;
};

Our first attempt is to trace a sphere illuminated by one omnidirectional light source. For that, this handy function is also required.

bool IntersectLineAndSphere(vec a, vec u, vec S, double r, double& t)
{
    // Line R = a + u*t
    // Sphere center S with radius r. x*x + y*y + z*z = r*r
    // To find the intersection point of ray and the sphere substitute ray on 
    // circle and solve 
    // the quadratic equation for minimum positive t.
    double A = 1;
    double B = 2 * (u.x * (a.x - S.x) + u.y * (a.y - S.y) + u.z * (a.z - S.z));
    double C = (a.x - S.x) * (a.x - S.x) + (a.y - S.y) * (a.y - S.y) 
               + (a.z - S.z) * (a.z - S.z) - r * r;
    double D = B * B - 4 * A * C;
    if (D >= 0)
    {
        double t1 = (-B + sqrt(D)) / 2;
        double t2 = (-B - sqrt(D)) / 2;
        // assuming t1 > 0 && t2 > 0 for this specific setup and as t1 >= t2
        t = t2;
        return true;
    }
    return false;
}

Rays (straight line) are represented in parametric form R = a + ut or x = x0 + dx * t, y = y0 + dy * t, z = z0 + dz * t where t is the parameter that vary along the line. Above function returns the parameter value at the nearest intersection point.

Step 1

Lets forget the light source. Just check where the sphere is.

void Trace(CDC* pDC)
{
    const double PI = 3.14159;
    const double eps = 0.001;

    // Eye or camera
    vec e(0, 0, 0);

    // View port
    vec vp0(-200, -200, 200);
    int vpcx = 400;
    int vpcy = 400;

    // Sphere
    vec S(0, 0, 450);
    double r = 300;

    for (int vpy = vp0.y; vpy < vp0.y + vpcy; ++vpy)
    {
        for (int vpx = vp0.x; vpx < vp0.x + vpcx; ++vpx)
        {
            // For each pixel in view port, the unit vector of ray is
            vec u(vpx - e.x, vpy - e.y, vp0.z - e.z);
            u.normalize();

            int color;
            double t;
            if (IntersectLineAndSphere(e, u, S, r, t))
                color = 100;
            else
                color = 0;

            pDC->SetPixel(vpcx / 2 - vpx, vpcy / 2 - vpy
                          , RGB(color, color, color));
        }
    }
}

Image

Step 2

Easily decorate the background with a checker board pattern.

void Trace(CDC* pDC)
{
    ...

    for (int vpy = vp0.y; vpy < vp0.y + vpcy; ++vpy)
    {
        for (int vpx = vp0.x; vpx < vp0.x + vpcx; ++vpx)
        {
            ...

            double t;
            if (IntersectLineAndSphere(e, u, S, r, t))
            {
                 int color = 100;
                 pDC->SetPixel(vpcx / 2 - vpx, vpcy / 2 - vpy
                               , RGB(color, color, color));
            }
            else // no intersection
            {
=>               bool dark = (int)(vpx - vp0.x) / 8 % 2 
                                   != (int)(vpy - vp0.y) / 8 % 2;
=>               pDC->SetPixel(vpcx / 2 - vpx, vpcy / 2 - vpy
                               , dark ? 0x0 : 0x202020); // grid
            }
        }
    }
}

Image

Step 3

The sphere is illuminated by the ambient light only. i.e. each point on sphere has a constant brightness value. That light is emitted radially. So amount of light coming from the points at boundary is less than that near the center.

void Trace(CDC* pDC)
{
    ...

    for (int vpy = vp0.y; vpy < vp0.y + vpcy; ++vpy)
    {
        for (int vpx = vp0.x; vpx < vp0.x + vpcx; ++vpx)
        {
            ...

            double t;
            if (IntersectLineAndSphere(e, u, S, r, t))
            {
                 vec vi = e + u * t; // intersection point
                 vec vn = (S - vi) * (1 / r); // surface normal
                 vn.normalize();

=>               double c = (PI / 2 - abs(vec::angle(vn, u))) / (PI / 2) * 256;
=>               int color = min(c / 3, 255);
                 pDC->SetPixel(vpcx / 2 - vpx, vpcy / 2 - vpy
                               , RGB(color, color, color));
            }
            else // no intersection
            {
                 ...
            }
        }
    }
}

Image

Step 4

Lights ON. Check whether the light hits the point that we are interested on. Calculate the amount of light that is reflected towards the selected pixel in the view port. Ambient light turned off to see the reflected light only. Various magic numbers are required to tune the light.

void Trace(CDC* pDC)
{
    ...
    // Omnidirectional Light
    vec L(-400, 400, -600);
    for (int vpy = vp0.y; vpy < vp0.y + vpcy; ++vpy)
    {
        for (int vpx = vp0.x; vpx < vp0.x + vpcx; ++vpx)
        {
             // For each pixel in view port, the unit vector of ray is
             vec u(vpx - e.x, vpy - e.y, vp0.z - e.z);
             u.normalize();
             double t = 0;
             if (IntersectLineAndSphere(e, u, S, r, t))
             {
                  vec vi = e + u * t; // intersection point
                  vec vn = (S - vi) * (1 / r); // surface normal
                  vn.normalize();
                  int color = 0;
                  // Check if light hits this point.

                  // Unit vector of light
                  vec uL = vi - L;
                  uL.normalize();
                  if (IntersectLineAndSphere(L, uL, S, r, t))
                  {
                      vec vL1 = L + uL * t; // nearest point to the light
                      if (vL1.is_equal(vi, eps)) // light hits here
                      {
                          // reflected light
                          vec vr = vn * uL.dot(vn) * 2 - uL;
                          double c = (PI - abs(vec::angle(u, vr))) / PI * 256;
                          c = c * c / 256 + c * c * c / 256 / 500;
                          color += min(c / 1.5, 255);
                       }
                   }
                   pDC->SetPixel(vpcx / 2 - vpx, vpcy / 2 - vpy
                                 , RGB(color, color, color));
              }
              else // no intersection
              {
                   ...
              }
         }
     }
}

Image

Step 5

Image

Ambient + Reflection  Adding more objects and light sources hugely complicates the algorithm.