Init from given files
This commit is contained in:
158
src/barnesHut.js
Normal file
158
src/barnesHut.js
Normal file
@@ -0,0 +1,158 @@
|
||||
import constant from "./constant";
|
||||
import jiggle from "./jiggle";
|
||||
import {x, y} from "./xy";
|
||||
import {quadtree} from "d3-quadtree";
|
||||
|
||||
/**
|
||||
* The refinement of the existing Barnes-Hut implementation in D3
|
||||
* to fit the use case of the project. Previously the algorithm stored
|
||||
* strength as internal node, now the random child is stored as internal
|
||||
* node and the force calculations are done between the node and that internal
|
||||
* object if they are sufficiently far away.
|
||||
* The check to see if the nodes are far away was also changed to the one described in original Barnes-Hut paper.
|
||||
* @return {force} calculated forces.
|
||||
*/
|
||||
export default function() {
|
||||
var nodes,
|
||||
node,
|
||||
alpha,
|
||||
distance = constant(300),
|
||||
theta = 0.5;
|
||||
|
||||
/**
|
||||
* Constructs a quadtree at every iteration and apply the forces by visiting
|
||||
* each node in a tree.
|
||||
* @param {number} _ - controls the stopping of the
|
||||
* particle simulations.
|
||||
*/
|
||||
function force(_) {
|
||||
var i, n = nodes.length, tree = quadtree(nodes, x, y).visitAfter(accumulate);
|
||||
for (alpha = _, i = 0; i < n; ++i) {
|
||||
node = nodes[i], tree.visit(apply);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Function used during the tree construction to fill out the nodes with
|
||||
* correct data. Internal nodes acquire the random child while the leaf
|
||||
* nodes accumulate forces from coincident quadrants.
|
||||
* @param {quadrant} quad - node representing the quadrant in quadtree.
|
||||
*/
|
||||
function accumulate(quad) {
|
||||
var q, d, children = [];
|
||||
|
||||
// For internal nodes, accumulate forces from child quadrants.
|
||||
if (quad.length) {
|
||||
for (var i = 0; i < 4; ++i) {
|
||||
if ((q = quad[i]) && (d = q.data)) {
|
||||
children.push(d);
|
||||
}
|
||||
}
|
||||
// Choose a random child.
|
||||
quad.data = children[Math.floor(Math.random() * children.length)];
|
||||
quad.x = quad.data.x;
|
||||
quad.y = quad.data.y;
|
||||
}
|
||||
|
||||
// For leaf nodes, accumulate forces from coincident quadrants.
|
||||
else {
|
||||
q = quad;
|
||||
q.x = q.data.x;
|
||||
q.y = q.data.y;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Function that applies the forces for each node. If the objects are
|
||||
* far away, the approximation is made. Otherwise, forces are calculated
|
||||
* directly between the nodes.
|
||||
* @param {quadrant} quad - node representing the quadrant in quadtree.
|
||||
* @param {number} x1 - lower x bound of the node.
|
||||
* @param {number} _ - lower y bound of the node.
|
||||
* @param {number} x2 - upper x bound of the node.
|
||||
* @return {boolean} - true if the approximation was applied.
|
||||
*/
|
||||
function apply(quad, x1, _, x2) {
|
||||
|
||||
var x = quad.data.x + quad.data.vx - node.x - node.vx,
|
||||
y = quad.data.y + quad.data.vy - node.y - node.vy,
|
||||
w = x2 - x1,
|
||||
l = Math.sqrt(x * x + y * y);
|
||||
|
||||
// Apply the Barnes-Hut approximation if possible.
|
||||
// Limit forces for very close nodes; randomize direction if coincident.
|
||||
if (w / l < theta) {
|
||||
if (x === 0) x = jiggle(), l += x * x;
|
||||
if (y === 0) y = jiggle(), l += y * y;
|
||||
if (quad.data) {
|
||||
l = (l - +distance(node, quad.data)) / l * alpha;
|
||||
x *= l, y *= l;
|
||||
quad.data.vx -= x;
|
||||
quad.data.vy -= y;
|
||||
node.vx += x;
|
||||
node.vy += y;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise, process points directly.
|
||||
else if (quad.length) return;
|
||||
|
||||
// Limit forces for very close nodes; randomize direction if coincident.
|
||||
if (quad.data !== node || quad.next) {
|
||||
if (x === 0) x = jiggle(), l += x * x;
|
||||
if (y === 0) y = jiggle(), l += y * y;
|
||||
}
|
||||
|
||||
do if (quad.data !== node) {
|
||||
l = (l - +distance(node, quad.data)) / l * alpha;
|
||||
x *= l, y *= l;
|
||||
quad.data.vx -= x;
|
||||
quad.data.vy -= y;
|
||||
node.vx += x;
|
||||
node.vy += y;
|
||||
} while (quad = quad.next);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the stress. Basically, it computes the difference between
|
||||
* high dimensional distance and real distance. The lower the stress is,
|
||||
* the better layout.
|
||||
* @return {number} - stress of the layout.
|
||||
*/
|
||||
function getStress() {
|
||||
var totalDiffSq = 0, totalHighDistSq = 0;
|
||||
for (var i = 0, source, target, realDist, highDist; i < nodes.length; i++) {
|
||||
for (var j = 0; j < nodes.length; j++) {
|
||||
if (i !== j) {
|
||||
source = nodes[i], target = nodes[j];
|
||||
realDist = Math.hypot(target.x-source.x, target.y-source.y);
|
||||
highDist = +distance(nodes[i], nodes[j]);
|
||||
totalDiffSq += Math.pow(realDist-highDist, 2);
|
||||
totalHighDistSq += highDist * highDist;
|
||||
}
|
||||
}
|
||||
}
|
||||
return Math.sqrt(totalDiffSq/totalHighDistSq);
|
||||
}
|
||||
|
||||
// API for initializing the algorithm, setting parameters and querying
|
||||
// metrics.
|
||||
force.initialize = function(_) {
|
||||
nodes = _;
|
||||
};
|
||||
|
||||
force.distance = function(_) {
|
||||
return arguments.length ? (distance = typeof _ === "function" ? _ : constant(+_), force) : distance;
|
||||
};
|
||||
|
||||
force.theta = function(_) {
|
||||
return arguments.length ? (theta = _, force) : theta;
|
||||
};
|
||||
|
||||
force.stress = function() {
|
||||
return getStress();
|
||||
};
|
||||
|
||||
return force;
|
||||
}
|
||||
8
src/constant.js
Normal file
8
src/constant.js
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* @return a constant defined by x.
|
||||
*/
|
||||
export default function(x) {
|
||||
return function() {
|
||||
return x;
|
||||
};
|
||||
}
|
||||
154
src/hybridSimulation.js
Normal file
154
src/hybridSimulation.js
Normal file
@@ -0,0 +1,154 @@
|
||||
import { dispatch } from "d3-dispatch";
|
||||
import constant from "./constant";
|
||||
import interpolation from "./interpolation";
|
||||
import interpolationPivots from "./interpolationPivots";
|
||||
|
||||
export default function (nodes) {
|
||||
|
||||
var hybrid,
|
||||
fullSimulation,
|
||||
distance = constant(300),
|
||||
MULTIPLIER = 50,
|
||||
PIVOTS = false,
|
||||
NUMPIVOTS = 3,
|
||||
SAMPLE_ITERATIONS = 300,
|
||||
FULL_ITERATIONS = 20,
|
||||
neighbourSize = 6,
|
||||
sampleSize = 3,
|
||||
event = d3Dispatch.dispatch("sampleTick", "fullTick", "startFull", "end");
|
||||
|
||||
var sets = sampleFromNodes(nodes, nodes.length, Math.sqrt(nodes.length));
|
||||
var sample = sets.sample;
|
||||
var remainder = sets.remainder;
|
||||
var interpSubset = sampleFromNodes(sample, sample.length, Math.sqrt(sample.length)).sample;
|
||||
|
||||
var sampleSimulation = d3.forceSimulation()
|
||||
.alphaDecay(1 - Math.pow(0.001, 1 / SAMPLE_ITERATIONS));
|
||||
|
||||
sampleSimulation
|
||||
.force("neighbourSampling", d3.forceNeighbourSampling()
|
||||
.distance(function (s, t) {
|
||||
return distance(s, t, props, norm) * MULTIPLIER;
|
||||
})
|
||||
.neighbourSize(neighbourSize)
|
||||
.sampleSize(sampleSize))
|
||||
.nodes(sample)
|
||||
.on("tick", function () {
|
||||
event.call("sampleTick", sampleSimulation);
|
||||
})
|
||||
.on("end", ended);
|
||||
|
||||
|
||||
function ended() {
|
||||
if (PIVOTS) {
|
||||
interpolationPivots(sample, remainder, interpSubset, NUMPIVOTS, distance);
|
||||
} else {
|
||||
interpolation(sample, remainder, interpSubset, distance);
|
||||
}
|
||||
|
||||
fullSimulation = d3.forceSimulation()
|
||||
.alphaDecay(1 - Math.pow(0.001, 1 / FULL_ITERATIONS));
|
||||
|
||||
event.call("startFull", fullSimulation);
|
||||
|
||||
fullSimulation
|
||||
.force("neighbourSampling", d3.forceNeighbourSampling()
|
||||
.distance(function (s, t) {
|
||||
return distance(s, t, props, norm) * MULTIPLIER;
|
||||
})
|
||||
.neighbourSize(neighbourSize)
|
||||
.sampleSize(sampleSize))
|
||||
.nodes(nodes)
|
||||
.on("tick", function () {
|
||||
event.call("fullTick", fullSimulation);
|
||||
})
|
||||
.on("end", function () {
|
||||
event.call("end", fullSimulation);
|
||||
});
|
||||
}
|
||||
|
||||
return hybrid = {
|
||||
distance: function (_) {
|
||||
return arguments.length ? (distance = typeof _ === "function" ? _ : constant(+_), hybrid) : distance;
|
||||
},
|
||||
|
||||
stop: function () {
|
||||
if (typeof sampleSimulation !== 'undefined') {
|
||||
sampleSimulation.stop();
|
||||
}
|
||||
if (typeof fullSimulation !== 'undefined') {
|
||||
fullSimulation.stop();
|
||||
}
|
||||
return hybrid;
|
||||
},
|
||||
|
||||
pivots: function (_) {
|
||||
return arguments.length ? (PIVOTS = _, hybrid) : PIVOTS;
|
||||
},
|
||||
|
||||
numPivots: function (_) {
|
||||
return arguments.length ? (NUMPIVOTS = +_, hybrid) : NUMPIVOTS;
|
||||
},
|
||||
|
||||
multiplier: function (_) {
|
||||
return arguments.length ? (MULTIPLIER = +_, hybrid) : MULTIPLIER;
|
||||
},
|
||||
|
||||
sampleIterations: function (_) {
|
||||
return arguments.length ? (SAMPLE_ITERATIONS = +_, hybrid) : SAMPLE_ITERATIONS;
|
||||
},
|
||||
|
||||
fullIterations: function (_) {
|
||||
return arguments.length ? (FULL_ITERATIONS = +_, hybrid) : FULL_ITERATIONS;
|
||||
},
|
||||
|
||||
neighbourSize: function (_) {
|
||||
return arguments.length ? (neighbourSize = +_, hybrid) : neighbourSize;
|
||||
},
|
||||
|
||||
sampleSize: function (_) {
|
||||
return arguments.length ? (sampleSize = +_, hybrid) : sampleSize;
|
||||
},
|
||||
|
||||
on: function (name, _) {
|
||||
return arguments.length > 1 ? (event.on(name, _), hybrid) : event.on(name);
|
||||
},
|
||||
|
||||
sample: function (_) {
|
||||
return arguments.length ? (sample = _, hybrid) : sample;
|
||||
},
|
||||
|
||||
remainder: function (_) {
|
||||
return arguments.length ? (remainder = _, hybrid) : remainder;
|
||||
},
|
||||
|
||||
stress: function () {
|
||||
return fullSimulation.force("neighbourSampling").stress();
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
|
||||
function sampleFromNodes(nodes, max, size) {
|
||||
var randElements = [];
|
||||
|
||||
for (var i = 0; i < size; ++i) {
|
||||
var rand = nodes[Math.floor((Math.random() * max))];
|
||||
// If the rand is already in random list or in exclude list
|
||||
// ignore it and get a new value.
|
||||
while (randElements.includes(rand)) {
|
||||
rand = nodes[Math.floor((Math.random() * max))];
|
||||
}
|
||||
randElements.push(rand);
|
||||
}
|
||||
|
||||
var remainder = nodes.filter(function (node) {
|
||||
return !randElements.includes(node);
|
||||
});
|
||||
|
||||
return {
|
||||
sample: randElements,
|
||||
remainder: remainder
|
||||
};
|
||||
}
|
||||
195
src/interpolation.js
Normal file
195
src/interpolation.js
Normal file
@@ -0,0 +1,195 @@
|
||||
export default function(sampleSet, remainderSet, interpSubset, distanceFunction) {
|
||||
var distance = distanceFunction;
|
||||
// var distance = calculateEuclideanDistance;
|
||||
|
||||
// console.log("Brute-force");
|
||||
|
||||
for (var i = 0; i < remainderSet.length; i++) {
|
||||
var node = remainderSet[i],
|
||||
minNode = sampleSet[0],
|
||||
minDist = 0,
|
||||
sampleCache = [];
|
||||
|
||||
minDist = distance(node, minNode, props, norm);
|
||||
|
||||
for (var j = 1, sample; j < sampleSet.length; j++) {
|
||||
sample = sampleSet[j];
|
||||
if ((sample !== node) && (distance(node, sample, props, norm) < minDist)) {
|
||||
minDist = distance(node, sample, props, norm);
|
||||
minNode = sample;
|
||||
}
|
||||
}
|
||||
|
||||
// console.log()
|
||||
|
||||
for (var k = 0; k < interpSubset.length; k++) {
|
||||
sampleCache[k] = distance(node, interpSubset[k], props, norm);
|
||||
}
|
||||
var radius = distance(node, minNode, props, norm);
|
||||
placeNearToNearestNeighbour(node, minNode, interpSubset, sampleCache, radius);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function placeNearToNearestNeighbour(node, minNode, sample, sampleCache, radius) {
|
||||
var
|
||||
dist0 = 0.0,
|
||||
dist90 = 0.0,
|
||||
dist180 = 0.0,
|
||||
dist270 = 0.0,
|
||||
lowBound = 0.0,
|
||||
highBound = 0.0;
|
||||
|
||||
dist0 = sumDistToSample(node, centerPoint(0, radius, minNode.x, minNode.y), sample, sampleCache);
|
||||
dist90 = sumDistToSample(node, centerPoint(90, radius, minNode.x, minNode.y), sample, sampleCache);
|
||||
dist180 = sumDistToSample(node, centerPoint(180, radius, minNode.x, minNode.y), sample, sampleCache);
|
||||
dist270 = sumDistToSample(node, centerPoint(270, radius, minNode.x, minNode.y), sample, sampleCache);
|
||||
|
||||
// console.log(dist0, dist90, dist180, dist270);
|
||||
|
||||
// Determine the closest quadrant
|
||||
|
||||
if (dist0 == dist180) {
|
||||
if (dist90 > dist270)
|
||||
lowBound = highBound = 270;
|
||||
else
|
||||
lowBound = highBound = 90;
|
||||
|
||||
} else if (dist90 == dist270) {
|
||||
if (dist0 > dist180)
|
||||
lowBound = highBound = 180;
|
||||
else
|
||||
lowBound = highBound = 0;
|
||||
} else if (dist0 > dist180) {
|
||||
if (dist90 > dist270) {
|
||||
lowBound = 180;
|
||||
highBound = 270;
|
||||
} else {
|
||||
lowBound = 90;
|
||||
highBound = 180;
|
||||
}
|
||||
} else {
|
||||
if (dist90 > dist270) {
|
||||
lowBound = 270;
|
||||
highBound = 360;
|
||||
} else {
|
||||
lowBound = 0;
|
||||
highBound = 90;
|
||||
}
|
||||
}
|
||||
|
||||
var angle = binarySearch(lowBound, highBound, minNode.x, minNode.y, radius, node, sample, sampleCache);
|
||||
var newPoint = centerPoint(angle, radius, minNode.x, minNode.y);
|
||||
|
||||
// console.log(newPoint);
|
||||
node.x = newPoint.x;
|
||||
node.y = newPoint.y;
|
||||
|
||||
// for (var i = 0; i < 20; i++) {
|
||||
// var forces = sumForcesToSample(node, sample, sampleCache);
|
||||
// // console.log(forces);
|
||||
// node.x += forces.x;
|
||||
// node.y += forces.y;
|
||||
// }
|
||||
|
||||
}
|
||||
|
||||
|
||||
function centerPoint(angle, radius, posX, posY) {
|
||||
var x = posX + Math.cos(toRadians(angle) * radius);
|
||||
var y = posY + Math.sin(toRadians(angle) * radius);
|
||||
|
||||
return {
|
||||
x: x,
|
||||
y: y
|
||||
};
|
||||
}
|
||||
|
||||
function toRadians(degrees) {
|
||||
return degrees * (Math.PI / 180);
|
||||
}
|
||||
|
||||
function sumDistToSample(node, point, sample, sampleCache) {
|
||||
var total = 0.0;
|
||||
// console.log(total, sample);
|
||||
|
||||
for (var i = 0; i < sample.length; i++) {
|
||||
var s = sample[i];
|
||||
var realDist = Math.hypot(s.x - point.x, s.y - point.y);
|
||||
var desDist = sampleCache[i];
|
||||
total += Math.abs(realDist - desDist);
|
||||
}
|
||||
|
||||
return total;
|
||||
}
|
||||
|
||||
|
||||
function sumForcesToSample(node, sample, sampleCache) {
|
||||
var x = 0,
|
||||
y = 0,
|
||||
// len = 0,
|
||||
dist = 0,
|
||||
force,
|
||||
SPRING_FORCE = 0.7;
|
||||
|
||||
for (var i = 0, unitX, unitY; i < sample.length; i++) {
|
||||
var s = sample[i];
|
||||
if (s !== node) {
|
||||
unitX = s.x - node.x;
|
||||
unitY = s.y - node.y;
|
||||
|
||||
// Normalize coordinates
|
||||
len = Math.sqrt(unitX * unitX + unitY * unitY);
|
||||
unitX /= len;
|
||||
unitY /= len;
|
||||
|
||||
console.log(unitX, unitY);
|
||||
|
||||
var realDist = Math.sqrt(unitX * unitX + unitY * unitY);
|
||||
var desDist = sampleCache[i];
|
||||
dist += realDist - desDist;
|
||||
force = (SPRING_FORCE * dist);
|
||||
|
||||
x += unitX * force;
|
||||
y += unitY * force;
|
||||
}
|
||||
|
||||
x *= (1.0 / sample.length);
|
||||
y *= (1.0 / sample.length);
|
||||
|
||||
return {
|
||||
x: x,
|
||||
y: y
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function binarySearch(lb, hb, x, y, r, node, sample, sampleCache) {
|
||||
while (lb <= hb) {
|
||||
var mid = Math.round((lb + hb) / 2);
|
||||
|
||||
if ((mid === lb) || (mid === hb)) {
|
||||
if (sumDistToSample(node, centerPoint(lb, r, x, y), sample, sampleCache) >=
|
||||
sumDistToSample(node, centerPoint(hb, r, x, y), sample, sampleCache)) {
|
||||
return hb;
|
||||
} else {
|
||||
return lb;
|
||||
}
|
||||
} else {
|
||||
var distMidLeft = sumDistToSample(node, centerPoint(mid + 1, r, x, y), sample, sampleCache);
|
||||
var distMidRight = sumDistToSample(node, centerPoint(mid - 1, r, x, y), sample, sampleCache);
|
||||
var distMid = sumDistToSample(node, centerPoint(mid, r, x, y), sample, sampleCache);
|
||||
|
||||
if (distMid > distMidLeft) {
|
||||
lb = mid + 1;
|
||||
} else if (distMid > distMidRight) {
|
||||
hb = mid - 1;
|
||||
} else {
|
||||
return mid;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
287
src/interpolationPivots.js
Normal file
287
src/interpolationPivots.js
Normal file
@@ -0,0 +1,287 @@
|
||||
export default function(sampleSet, remainderSet, interpSubset, nPivots, distanceFunction) {
|
||||
var distance = distanceFunction;
|
||||
|
||||
// Pivot based parent finding
|
||||
|
||||
var numBuckets = Math.floor(Math.sqrt(sampleSet.length)),
|
||||
numPivots = nPivots,
|
||||
parents = [],
|
||||
maxDists = [],
|
||||
bucketWidths = [],
|
||||
pivotsBuckets = [];
|
||||
|
||||
console.log("Parents, pivots=", numPivots);
|
||||
|
||||
var pivots = createRandomSample(sampleSet.concat(remainderSet), sampleSet.length, numPivots);
|
||||
|
||||
for (var i = 0; i < numPivots; i++) {
|
||||
pivotsBuckets[i] = [];
|
||||
for (var j = 0; j < numBuckets; j++) {
|
||||
pivotsBuckets[i][j] = [];
|
||||
}
|
||||
}
|
||||
|
||||
// Pre-processing
|
||||
var fullDists = []
|
||||
for (var i = 0; i < sampleSet.length; i++) {
|
||||
fullDists[i] = [];
|
||||
}
|
||||
|
||||
for (var j = 0, maxDist = -1; j < numPivots; j++) {
|
||||
var c1 = pivots[j];
|
||||
for (var i = 0; i < sampleSet.length; i++) {
|
||||
var c2 = sampleSet[i];
|
||||
if (c1 !== c2) {
|
||||
var dist = distance(c1, c2, props, norm);
|
||||
// console.log(dist, c1, c2);
|
||||
if (dist > maxDist) {
|
||||
maxDist = dist;
|
||||
}
|
||||
fullDists[i][j] = dist;
|
||||
} else {
|
||||
fullDists[i][j] = 0.0001;
|
||||
}
|
||||
}
|
||||
maxDists.push(maxDist);
|
||||
bucketWidths.push(maxDist / numBuckets);
|
||||
}
|
||||
|
||||
// console.log(fullDists);
|
||||
|
||||
for (var j = 0; j < numPivots; j++) {
|
||||
var bucketWidth = bucketWidths[j];
|
||||
for (var i = 0; i < sampleSet.length; i++) {
|
||||
var tmp = pivotsBuckets[j][Math.floor((fullDists[i][j] - 0.0001) / bucketWidth)];
|
||||
// pivotsBuckets[j][Math.floor((fullDists[i][j] - 0.0001) / bucketWidth)].push(sampleSet[i]);
|
||||
// console.log(tmp, i, j, bucketWidth, Math.floor((fullDists[i][j] - 0.0001) / bucketWidth));
|
||||
tmp.push(sampleSet[i]);
|
||||
}
|
||||
}
|
||||
|
||||
for (var i = 0; i < remainderSet.length; i++) {
|
||||
var node = remainderSet[i],
|
||||
minNode = sampleSet[0],
|
||||
minDist = 0,
|
||||
sampleCache = [];
|
||||
|
||||
// Pivot based parent search
|
||||
|
||||
var node = remainderSet[i];
|
||||
var clDist = Number.MAX_VALUE;
|
||||
for (var p = 0; p < numPivots; p++) {
|
||||
var comp = pivots[p];
|
||||
var bucketWidth = bucketWidths[p];
|
||||
if (node !== comp) {
|
||||
var dist = distance(node, comp, props, norm);
|
||||
var bNum = Math.floor((dist - 0.0001) / bucketWidth);
|
||||
if (bNum >= numBuckets) {
|
||||
bNum = numBuckets - 1;
|
||||
} else if (bNum < 0) {
|
||||
bNum = 0;
|
||||
}
|
||||
var bucketContents = pivotsBuckets[p][bNum];
|
||||
for (var w = 0; w < bucketContents.length; w++) {
|
||||
var c1 = bucketContents[w];
|
||||
if (c1 != node) {
|
||||
dist = distance(c1, node, props, norm);
|
||||
if (dist <= clDist) {
|
||||
clDist = dist;
|
||||
minNode = bucketContents[w];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
for (var k = 0; k < interpSubset.length; k++) {
|
||||
sampleCache[k] = distance(node, interpSubset[k], props, norm);
|
||||
}
|
||||
var radius = distance(node, minNode, props, norm);
|
||||
placeNearToNearestNeighbour(node, minNode, interpSubset, sampleCache, radius);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function placeNearToNearestNeighbour(node, minNode, sample, sampleCache, radius) {
|
||||
var
|
||||
dist0 = 0.0,
|
||||
dist90 = 0.0,
|
||||
dist180 = 0.0,
|
||||
dist270 = 0.0,
|
||||
lowBound = 0.0,
|
||||
highBound = 0.0;
|
||||
|
||||
dist0 = sumDistToSample(node, centerPoint(0, radius, minNode.x, minNode.y), sample, sampleCache);
|
||||
dist90 = sumDistToSample(node, centerPoint(90, radius, minNode.x, minNode.y), sample, sampleCache);
|
||||
dist180 = sumDistToSample(node, centerPoint(180, radius, minNode.x, minNode.y), sample, sampleCache);
|
||||
dist270 = sumDistToSample(node, centerPoint(270, radius, minNode.x, minNode.y), sample, sampleCache);
|
||||
|
||||
// console.log(dist0, dist90, dist180, dist270);
|
||||
|
||||
// Determine the closest quadrant
|
||||
|
||||
if (dist0 == dist180) {
|
||||
if (dist90 > dist270)
|
||||
lowBound = highBound = 270;
|
||||
else
|
||||
lowBound = highBound = 90;
|
||||
|
||||
} else if (dist90 == dist270) {
|
||||
if (dist0 > dist180)
|
||||
lowBound = highBound = 180;
|
||||
else
|
||||
lowBound = highBound = 0;
|
||||
} else if (dist0 > dist180) {
|
||||
if (dist90 > dist270) {
|
||||
lowBound = 180;
|
||||
highBound = 270;
|
||||
} else {
|
||||
lowBound = 90;
|
||||
highBound = 180;
|
||||
}
|
||||
} else {
|
||||
if (dist90 > dist270) {
|
||||
lowBound = 270;
|
||||
highBound = 360;
|
||||
} else {
|
||||
lowBound = 0;
|
||||
highBound = 90;
|
||||
}
|
||||
}
|
||||
|
||||
var angle = binarySearch(lowBound, highBound, minNode.x, minNode.y, radius, node, sample, sampleCache);
|
||||
var newPoint = centerPoint(angle, radius, minNode.x, minNode.y);
|
||||
|
||||
// console.log(newPoint);
|
||||
node.x = newPoint.x;
|
||||
node.y = newPoint.y;
|
||||
|
||||
// for (var i = 0; i < 20; i++) {
|
||||
// var forces = sumForcesToSample(node, sample, sampleCache);
|
||||
// // console.log(forces);
|
||||
// node.x += forces.x;
|
||||
// node.y += forces.y;
|
||||
// }
|
||||
|
||||
}
|
||||
|
||||
|
||||
function centerPoint(angle, radius, posX, posY) {
|
||||
var x = posX + Math.cos(toRadians(angle) * radius);
|
||||
var y = posY + Math.sin(toRadians(angle) * radius);
|
||||
|
||||
return {
|
||||
x: x,
|
||||
y: y
|
||||
};
|
||||
}
|
||||
|
||||
function toRadians(degrees) {
|
||||
return degrees * (Math.PI / 180);
|
||||
}
|
||||
|
||||
function sumDistToSample(node, point, sample, sampleCache) {
|
||||
var total = 0.0;
|
||||
// console.log(total, sample);
|
||||
|
||||
for (var i = 0; i < sample.length; i++) {
|
||||
var s = sample[i];
|
||||
var realDist = Math.hypot(s.x - point.x, s.y - point.y);
|
||||
var desDist = sampleCache[i];
|
||||
total += Math.abs(realDist - desDist);
|
||||
}
|
||||
|
||||
return total;
|
||||
}
|
||||
|
||||
|
||||
function sumForcesToSample(node, sample, sampleCache) {
|
||||
var x = 0,
|
||||
y = 0,
|
||||
// len = 0,
|
||||
dist = 0,
|
||||
force,
|
||||
SPRING_FORCE = 0.7;
|
||||
|
||||
for (var i = 0, unitX, unitY; i < sample.length; i++) {
|
||||
var s = sample[i];
|
||||
if (s !== node) {
|
||||
unitX = s.x - node.x;
|
||||
unitY = s.y - node.y;
|
||||
|
||||
// Normalize coordinates
|
||||
len = Math.sqrt(unitX * unitX + unitY * unitY);
|
||||
unitX /= len;
|
||||
unitY /= len;
|
||||
|
||||
console.log(unitX, unitY);
|
||||
|
||||
var realDist = Math.sqrt(unitX * unitX + unitY * unitY);
|
||||
var desDist = sampleCache[i];
|
||||
dist += realDist - desDist;
|
||||
force = (SPRING_FORCE * dist);
|
||||
|
||||
x += unitX * force;
|
||||
y += unitY * force;
|
||||
}
|
||||
|
||||
x *= (1.0 / sample.length);
|
||||
y *= (1.0 / sample.length);
|
||||
|
||||
return {
|
||||
x: x,
|
||||
y: y
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function createRandomSample(nodes, max, size) {
|
||||
var randElements = [];
|
||||
|
||||
for (var i = 0; i < size; ++i) {
|
||||
// Stop when no new elements can be found.
|
||||
if (randElements.size >= nodes.length) {
|
||||
break;
|
||||
}
|
||||
var rand = Math.floor((Math.random() * max));
|
||||
// If the rand is already in random list or in exclude list
|
||||
// ignore it and get a new value.
|
||||
while (randElements.includes(rand)) {
|
||||
rand = Math.floor((Math.random() * max));
|
||||
}
|
||||
randElements.push(nodes[rand]);
|
||||
}
|
||||
|
||||
return randElements;
|
||||
}
|
||||
|
||||
|
||||
function binarySearch(lb, hb, x, y, r, node, sample, sampleCache) {
|
||||
while (lb <= hb) {
|
||||
var mid = Math.round((lb + hb) / 2);
|
||||
|
||||
if ((mid === lb) || (mid === hb)) {
|
||||
if (sumDistToSample(node, centerPoint(lb, r, x, y), sample, sampleCache) >=
|
||||
sumDistToSample(node, centerPoint(hb, r, x, y), sample, sampleCache)) {
|
||||
return hb;
|
||||
} else {
|
||||
return lb;
|
||||
}
|
||||
} else {
|
||||
var distMidLeft = sumDistToSample(node, centerPoint(mid + 1, r, x, y), sample, sampleCache);
|
||||
var distMidRight = sumDistToSample(node, centerPoint(mid - 1, r, x, y), sample, sampleCache);
|
||||
var distMid = sumDistToSample(node, centerPoint(mid, r, x, y), sample, sampleCache);
|
||||
|
||||
if (distMid > distMidLeft) {
|
||||
lb = mid + 1;
|
||||
} else if (distMid > distMidRight) {
|
||||
hb = mid - 1;
|
||||
} else {
|
||||
return mid;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
6
src/jiggle.js
Normal file
6
src/jiggle.js
Normal file
@@ -0,0 +1,6 @@
|
||||
/**
|
||||
* @return {number} a random number.
|
||||
*/
|
||||
export default function() {
|
||||
return (Math.random() - 0.5) * 1e-6;
|
||||
}
|
||||
150
src/link.js
Normal file
150
src/link.js
Normal file
@@ -0,0 +1,150 @@
|
||||
import constant from "./constant";
|
||||
import jiggle from "./jiggle";
|
||||
import {map} from "d3-collection";
|
||||
|
||||
/**
|
||||
* Extended link force algorithm to include the stress metric for
|
||||
* comparisons between the different algorithms.
|
||||
* Everything else is the same as in D3 force module.
|
||||
*/
|
||||
|
||||
function index(d, i) {
|
||||
return i;
|
||||
}
|
||||
|
||||
function find(nodeById, nodeId) {
|
||||
var node = nodeById.get(nodeId);
|
||||
if (!node) throw new Error("missing: " + nodeId);
|
||||
return node;
|
||||
}
|
||||
|
||||
export default function(links) {
|
||||
var id = index,
|
||||
strength = defaultStrength,
|
||||
strengths,
|
||||
distance = constant(30),
|
||||
distances,
|
||||
nodes,
|
||||
count,
|
||||
bias,
|
||||
iterations = 1;
|
||||
|
||||
if (links == null) links = [];
|
||||
|
||||
function defaultStrength(link) {
|
||||
return 1 / Math.min(count[link.source.index], count[link.target.index]);
|
||||
}
|
||||
|
||||
function force(alpha) {
|
||||
for (var k = 0, n = links.length; k < iterations; ++k) {
|
||||
for (var i = 0, link, source, target, x, y, l, b; i < n; ++i) {
|
||||
link = links[i], source = link.source, target = link.target;
|
||||
x = target.x + target.vx - source.x - source.vx || jiggle();
|
||||
y = target.y + target.vy - source.y - source.vy || jiggle();
|
||||
l = Math.sqrt(x * x + y * y);
|
||||
l = (l - distances[i]) / l * alpha/* * strengths[i]*/;
|
||||
x *= l, y *= l;
|
||||
target.vx -= x * (b = bias[i]);
|
||||
target.vy -= y * b;
|
||||
source.vx += x * (b = 1 - b);
|
||||
source.vy += y * b;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function initialize() {
|
||||
if (!nodes) return;
|
||||
|
||||
var i,
|
||||
n = nodes.length,
|
||||
m = links.length,
|
||||
nodeById = map(nodes, id),
|
||||
link;
|
||||
|
||||
for (i = 0, count = new Array(n); i < n; ++i) {
|
||||
count[i] = 0;
|
||||
}
|
||||
|
||||
for (i = 0; i < m; ++i) {
|
||||
link = links[i], link.index = i;
|
||||
if (typeof link.source !== "object") link.source = find(nodeById, link.source);
|
||||
if (typeof link.target !== "object") link.target = find(nodeById, link.target);
|
||||
++count[link.source.index], ++count[link.target.index];
|
||||
}
|
||||
|
||||
for (i = 0, bias = new Array(m); i < m; ++i) {
|
||||
link = links[i], bias[i] = count[link.source.index] / (count[link.source.index] + count[link.target.index]);
|
||||
}
|
||||
|
||||
strengths = new Array(m), initializeStrength();
|
||||
distances = new Array(m), initializeDistance();
|
||||
}
|
||||
|
||||
function initializeStrength() {
|
||||
if (!nodes) return;
|
||||
|
||||
for (var i = 0, n = links.length; i < n; ++i) {
|
||||
strengths[i] = +strength(links[i], i, links);
|
||||
}
|
||||
}
|
||||
|
||||
function initializeDistance() {
|
||||
if (!nodes) return;
|
||||
|
||||
for (var i = 0, n = links.length; i < n; ++i) {
|
||||
distances[i] = +distance(links[i], i, links);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the stress. Basically, it computes the difference between
|
||||
* high dimensional distance and real distance. The lower the stress is,
|
||||
* the better layout.
|
||||
* @return {number} - stress of the layout.
|
||||
*/
|
||||
function getStress() {
|
||||
var m = links.length,
|
||||
totalDiffSq = 0,
|
||||
totalHighDistSq = 0,
|
||||
link;
|
||||
for (var i = 0, source, target, realDist, highDist; i < m; i++) {
|
||||
link = links[i], source = link.source, target = link.target;
|
||||
realDist = Math.hypot(target.x-source.x, target.y-source.y);
|
||||
highDist = distances[i];
|
||||
totalDiffSq += Math.pow(realDist-highDist, 2);
|
||||
totalHighDistSq += highDist * highDist;
|
||||
}
|
||||
return Math.sqrt(totalDiffSq/totalHighDistSq);
|
||||
}
|
||||
|
||||
force.initialize = function(_) {
|
||||
nodes = _;
|
||||
initialize();
|
||||
};
|
||||
|
||||
force.links = function(_) {
|
||||
return arguments.length ? (links = _, initialize(), force) : links;
|
||||
};
|
||||
|
||||
force.id = function(_) {
|
||||
return arguments.length ? (id = _, force) : id;
|
||||
};
|
||||
|
||||
force.iterations = function(_) {
|
||||
return arguments.length ? (iterations = +_, force) : iterations;
|
||||
};
|
||||
|
||||
force.strength = function(_) {
|
||||
return arguments.length ? (strength = typeof _ === "function" ? _ : constant(+_), initializeStrength(), force) : strength;
|
||||
};
|
||||
|
||||
force.distance = function(_) {
|
||||
return arguments.length ? (distance = typeof _ === "function" ? _ : constant(+_), initializeDistance(), force) : distance;
|
||||
};
|
||||
|
||||
force.stress = function() {
|
||||
return getStress();
|
||||
}
|
||||
|
||||
return force;
|
||||
}
|
||||
257
src/neighbourSampling.js
Normal file
257
src/neighbourSampling.js
Normal file
@@ -0,0 +1,257 @@
|
||||
import constant from "./constant";
|
||||
import jiggle from "./jiggle";
|
||||
|
||||
/**
|
||||
* Set the node id accessor to the specified i.
|
||||
* @param {node} d - node.
|
||||
* @param {accessor} i - id accessor.
|
||||
* @return {accessor} - node id accessor.
|
||||
*/
|
||||
function index(d, i) {
|
||||
return i;
|
||||
}
|
||||
|
||||
/**
|
||||
* The implementation of Chalmers' 1996 Neighbour and Sampling algorithm.
|
||||
* It uses random sampling to find the most suited neighbours from the
|
||||
* data set.
|
||||
* @return {force} calculated forces.
|
||||
*/
|
||||
export default function () {
|
||||
var id = index,
|
||||
neighbours = [],
|
||||
samples = new Array(),
|
||||
distance = constant(300),
|
||||
nodes,
|
||||
neighbourSize = 6,
|
||||
sampleSize = 3,
|
||||
freeness = 0.85,
|
||||
springForce = 0.7,
|
||||
dampingFactor = 0.3,
|
||||
velocity,
|
||||
multiplier = 50;
|
||||
|
||||
/**
|
||||
* Calculates the forces at each iteration between the node and the
|
||||
* objects in neighbour and sample sets.
|
||||
* @param {number} alpha - controls the stopping of the
|
||||
* particle simulations.
|
||||
*/
|
||||
function force(alpha) {
|
||||
velocity = 0;
|
||||
for (var i = 0, n = nodes.length; i < n; ++i) {
|
||||
// Randomize the samples for every node.
|
||||
samples[i] = randomizeSample(i);
|
||||
// Calculate the forces between node and its neighbours.
|
||||
for (var [keyN, valueN] of neighbours[i]) {
|
||||
setVelocity(i, keyN, valueN, alpha);
|
||||
}
|
||||
// Calculate the forces between node and its sample set.
|
||||
for (var [keyS, valueS] of samples[i]) {
|
||||
setVelocity(i, keyS, valueS, alpha);
|
||||
}
|
||||
// Check if there are a better neighbours in a sample array
|
||||
// for each node.
|
||||
findNewNeighbours(i);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the velocities of the source and target nodes.
|
||||
* @param {number} sourceId - source node id.
|
||||
* @param {number} targetId - target node id.
|
||||
* @param {number} dist - high dimensional distance between
|
||||
* the two nodes.
|
||||
* @param {number} alpha - controls the speed of simulation.
|
||||
*/
|
||||
function setVelocity(sourceId, targetId, dist, alpha) {
|
||||
var source, target, x, y, l;
|
||||
source = nodes[sourceId], target = nodes[targetId];
|
||||
// If x or y coordinates not defined, add some randomness.
|
||||
x = target.x + target.vx - source.x - source.vx || jiggle();
|
||||
y = target.y + target.vy - source.y - source.vy || jiggle();
|
||||
l = Math.sqrt(x * x + y * y);
|
||||
l = (l - dist * multiplier) / l * alpha;
|
||||
x *= l, y *= l;
|
||||
velocity += x + y;
|
||||
// Set the calculated velocites for both nodes.
|
||||
target.vx -= x;
|
||||
target.vy -= y;
|
||||
source.vx += x;
|
||||
source.vy += y;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the neighbour and sample set at the start.
|
||||
*/
|
||||
function initialize() {
|
||||
if (!nodes) return;
|
||||
|
||||
// Initialize for each node a neighbour and sample arrays
|
||||
// with random values.
|
||||
for (var i = 0, n = nodes.length; i < n; ++i) {
|
||||
var exclude = []; // Array that keeps the indices of nodes to ignore.
|
||||
exclude.push(i);
|
||||
|
||||
var neighbs = createRandomSample(i, exclude, n, neighbourSize);
|
||||
// Sort the neighbour set by the distances.
|
||||
neighbs = new Map([...neighbs.entries()].sort(sortDistances));
|
||||
neighbours[i] = neighbs;
|
||||
|
||||
exclude.concat(neighbs);
|
||||
samples[i] = createRandomSample(i, exclude, n, sampleSize);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Function that compares to map elements by its values.
|
||||
* @param {object} a
|
||||
* @param {object} b
|
||||
* @return {number} - 0, if values are equal, positive number if b > a,
|
||||
* negative otherwise.
|
||||
*/
|
||||
function sortDistances(a, b) {
|
||||
return b[1] - a[1];
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an array of random integers, all different, with maximum
|
||||
* value max and with size elements. No elements from exlucde should
|
||||
* be included.
|
||||
* @param {number} index - index of current node.
|
||||
* @param {array} exclude - indices of the nodes to ignore.
|
||||
* @param {number} max - maximum value.
|
||||
* @param {number} size - the number of elements in map to return.
|
||||
* @return {map} - a created map that contains random elements from
|
||||
* data set.
|
||||
*/
|
||||
function createRandomSample(index, exclude, max, size) {
|
||||
var randElements = new Map();
|
||||
|
||||
for (var i = 0; i < size; ++i) {
|
||||
// Stop when no new elements can be found.
|
||||
if (randElements.size + exclude.length >= nodes.length) {
|
||||
break;
|
||||
}
|
||||
var rand = Math.floor((Math.random() * max));
|
||||
// If the rand is already in random list or in exclude list
|
||||
// ignore it and get a new value.
|
||||
while (randElements.has(rand) || exclude.includes(rand)) {
|
||||
rand = Math.floor((Math.random() * max));
|
||||
}
|
||||
randElements.set(rand, +distance(nodes[index], nodes[rand]));
|
||||
}
|
||||
|
||||
return randElements;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new map of random numbers to be used by the samples list.
|
||||
* @param {number} index - index of current node.
|
||||
* @return {map} - map that contains random elements from data set.
|
||||
*/
|
||||
function randomizeSample(index) {
|
||||
// Ignore the current neighbours of the node and itself.
|
||||
var exclude = [index];
|
||||
exclude = exclude.concat(Array.from(neighbours[index].keys()));
|
||||
return createRandomSample(index, exclude, nodes.length, sampleSize);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares the elements from sample set to the neighbour set and
|
||||
* replaces the elements from neighbour set if better neighbours are
|
||||
* found in sample set.
|
||||
* @param {number} index - index of current node.
|
||||
*/
|
||||
function findNewNeighbours(index) {
|
||||
var sample = samples[index];
|
||||
|
||||
for (var [key, value] of sample) {
|
||||
var neighbMax = neighbours[index].entries().next().value;
|
||||
|
||||
// Check if a value from sample could be a better neighbour
|
||||
// if so, replace it.
|
||||
if (value < neighbMax[1]) {
|
||||
neighbours[index].delete(neighbMax[0]);
|
||||
neighbours[index].set(key, value)
|
||||
neighbours[index] = new Map([...neighbours[index].entries()].sort(sortDistances));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the stress. Basically, it computes the difference between
|
||||
* high dimensional distance and real distance. The lower the stress is,
|
||||
* the better layout.
|
||||
* @return {number} - stress of the layout.
|
||||
*/
|
||||
function getStress() {
|
||||
var totalDiffSq = 0, totalHighDistSq = 0;
|
||||
for (var i = 0, source, target, realDist, highDist; i < nodes.length; i++) {
|
||||
for (var j = 0; j < nodes.length; j++) {
|
||||
if (i !== j) {
|
||||
source = nodes[i], target = nodes[j];
|
||||
realDist = Math.hypot(target.x - source.x, target.y - source.y);
|
||||
highDist = +distance(source, target);
|
||||
totalDiffSq += Math.pow(realDist - highDist, 2);
|
||||
totalHighDistSq += highDist * highDist;
|
||||
}
|
||||
}
|
||||
}
|
||||
return Math.sqrt(totalDiffSq / totalHighDistSq);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the average velocity of the force calculation at the
|
||||
* current iteration.
|
||||
* @return {number} - average velocity.
|
||||
*/
|
||||
function getAvgVelocity() {
|
||||
return velocity / ((neighbourSize + sampleSize) * nodes.length);
|
||||
}
|
||||
|
||||
// API for initializing the algorithm, setting parameters and querying
|
||||
// metrics.
|
||||
force.initialize = function (_) {
|
||||
nodes = _;
|
||||
initialize();
|
||||
};
|
||||
|
||||
force.id = function (_) {
|
||||
return arguments.length ? (id = _, force) : id;
|
||||
};
|
||||
|
||||
force.neighbourSize = function (_) {
|
||||
return arguments.length ? (neighbourSize = +_, force) : neighbourSize;
|
||||
};
|
||||
|
||||
force.sampleSize = function (_) {
|
||||
return arguments.length ? (sampleSize = +_, force) : sampleSize;
|
||||
};
|
||||
|
||||
force.distance = function (_) {
|
||||
return arguments.length ? (distance = typeof _ === "function" ? _ : constant(+_), force) : distance;
|
||||
};
|
||||
|
||||
force.stress = function () {
|
||||
return getStress();
|
||||
};
|
||||
|
||||
force.velocity = function () {
|
||||
return getAvgVelocity();
|
||||
};
|
||||
|
||||
force.freeness = function (_) {
|
||||
return arguments.length ? (freeness = +_, force) : freeness;
|
||||
};
|
||||
|
||||
force.nodeNeighbours = function (_) {
|
||||
return arguments.length ? neighbours[+_] : [];
|
||||
};
|
||||
|
||||
force.multiplier = function (_) {
|
||||
return arguments.length ? (multiplier = +_, force) : multiplier;
|
||||
};
|
||||
|
||||
return force;
|
||||
}
|
||||
300
src/neighbourSamplingDistance.js
Normal file
300
src/neighbourSamplingDistance.js
Normal file
@@ -0,0 +1,300 @@
|
||||
import constant from "./constant";
|
||||
import jiggle from "./jiggle";
|
||||
|
||||
/**
|
||||
* Set the node id accessor to the specified i.
|
||||
* @param {node} d - node.
|
||||
* @param {accessor} i - id accessor.
|
||||
* @return {accessor} - node id accessor.
|
||||
*/
|
||||
function index(d, i) {
|
||||
return i;
|
||||
}
|
||||
|
||||
/**
|
||||
* The implementation of Chalmers' 1996 Neighbour and Sampling algorithm.
|
||||
* It uses random sampling to find the most suited neighbours from the
|
||||
* data set.
|
||||
* @return {force} calculated forces.
|
||||
*/
|
||||
export default function () {
|
||||
var id = index,
|
||||
neighbours = [],
|
||||
samples = new Array(),
|
||||
distance = constant(300),
|
||||
distanceRange = 10,
|
||||
nodes,
|
||||
neighbourSize = 6,
|
||||
sampleSize = 3,
|
||||
freeness = 0.85,
|
||||
springForce = 0.7,
|
||||
dampingFactor = 0.3,
|
||||
velocity,
|
||||
multiplier = 50;
|
||||
|
||||
/**
|
||||
* Calculates the forces at each iteration between the node and the
|
||||
* objects in neighbour and sample sets.
|
||||
* @param {number} alpha - controls the stopping of the
|
||||
* particle simulations.
|
||||
*/
|
||||
function force(alpha) {
|
||||
velocity = 0;
|
||||
for (var i = 0, n = nodes.length; i < n; ++i) {
|
||||
// Randomize the samples for every node.
|
||||
samples[i] = randomizeSample(i);
|
||||
// Calculate the forces between node and its neighbours.
|
||||
for (var [keyN, valueN] of neighbours[i]) {
|
||||
setVelocity(i, keyN, valueN, alpha);
|
||||
}
|
||||
// Calculate the forces between node and its sample set.
|
||||
for (var [keyS, valueS] of samples[i]) {
|
||||
setVelocity(i, keyS, valueS, alpha);
|
||||
}
|
||||
// Check if there are a better neighbours in a sample array
|
||||
// for each node.
|
||||
findNewNeighbours(i);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the velocities of the source and target nodes.
|
||||
* @param {number} sourceId - source node id.
|
||||
* @param {number} targetId - target node id.
|
||||
* @param {number} dist - high dimensional distance between
|
||||
* the two nodes.
|
||||
* @param {number} alpha - controls the speed of simulation.
|
||||
*/
|
||||
function setVelocity(sourceId, targetId, dist, alpha) {
|
||||
var source, target, x, y, l;
|
||||
source = nodes[sourceId], target = nodes[targetId];
|
||||
// If x or y coordinates not defined, add some randomness.
|
||||
x = target.x + target.vx - source.x - source.vx || jiggle();
|
||||
y = target.y + target.vy - source.y - source.vy || jiggle();
|
||||
l = Math.sqrt(x * x + y * y);
|
||||
l = (l - dist * multiplier) / l * alpha;
|
||||
x *= l, y *= l;
|
||||
velocity += x + y;
|
||||
// Set the calculated velocites for both nodes.
|
||||
target.vx -= x;
|
||||
target.vy -= y;
|
||||
source.vx += x;
|
||||
source.vy += y;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the neighbour and sample set at the start.
|
||||
*/
|
||||
function initialize() {
|
||||
if (!nodes) return;
|
||||
|
||||
// Initialize for each node a neighbour and sample arrays
|
||||
// with random values.
|
||||
for (var i = 0, n = nodes.length; i < n; ++i) {
|
||||
var exclude = []; // Array that keeps the indices of nodes to ignore.
|
||||
exclude.push(i);
|
||||
|
||||
var neighbs = createRandomNeighbours(i, exclude, n, neighbourSize);
|
||||
// Sort the neighbour set by the distances.
|
||||
neighbs = new Map([...neighbs.entries()].sort(sortDistances));
|
||||
neighbours[i] = neighbs;
|
||||
|
||||
exclude.concat(neighbs);
|
||||
samples[i] = createRandomSample(i, exclude, n, sampleSize);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Function that compares to map elements by its values.
|
||||
* @param {object} a
|
||||
* @param {object} b
|
||||
* @return {number} - 0, if values are equal, positive number if b > a,
|
||||
* negative otherwise.
|
||||
*/
|
||||
function sortDistances(a, b) {
|
||||
return b[1] - a[1];
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an array of random integers, all different, with maximum
|
||||
* value max and with size elements. No elements from exlucde should
|
||||
* be included.
|
||||
* @param {number} index - index of current node.
|
||||
* @param {array} exclude - indices of the nodes to ignore.
|
||||
* @param {number} max - maximum value.
|
||||
* @param {number} size - the number of elements in map to return.
|
||||
* @return {map} - a created map that contains random elements from
|
||||
* data set.
|
||||
*/
|
||||
function createRandomNeighbours(index, exclude, max, size) {
|
||||
var randElements = new Map();
|
||||
var triedElements = 0;
|
||||
|
||||
while ((randElements.size < size) && (randElements.size + exclude.length + triedElements < nodes.length)) {
|
||||
var rand = Math.floor((Math.random() * max));
|
||||
// If the rand is already in random list or in exclude list
|
||||
// ignore it and get a new value.
|
||||
while (randElements.has(rand) || exclude.includes(rand)) {
|
||||
rand = Math.floor((Math.random() * max));
|
||||
}
|
||||
var dist = +distance(nodes[index], nodes[rand]);
|
||||
if (dist <= distanceRange) {
|
||||
randElements.set(rand, dist);
|
||||
} else {
|
||||
triedElements++;
|
||||
}
|
||||
}
|
||||
|
||||
return randElements;
|
||||
}
|
||||
|
||||
|
||||
function createRandomSample(index, exclude, max, size) {
|
||||
var randElements = new Map();
|
||||
|
||||
for (var i = 0; i < size; ++i) {
|
||||
// Stop when no new elements can be found.
|
||||
if (randElements.size + exclude.length >= nodes.length) {
|
||||
break;
|
||||
}
|
||||
var rand = Math.floor((Math.random() * max));
|
||||
// If the rand is already in random list or in exclude list
|
||||
// ignore it and get a new value.
|
||||
while (randElements.has(rand) || exclude.includes(rand)) {
|
||||
rand = Math.floor((Math.random() * max));
|
||||
}
|
||||
randElements.set(rand, +distance(nodes[index], nodes[rand]));
|
||||
}
|
||||
|
||||
return randElements;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new map of random numbers to be used by the samples list.
|
||||
* @param {number} index - index of current node.
|
||||
* @return {map} - map that contains random elements from data set.
|
||||
*/
|
||||
function randomizeSample(index) {
|
||||
// Ignore the current neighbours of the node and itself.
|
||||
var exclude = [index];
|
||||
exclude = exclude.concat(Array.from(neighbours[index].keys()));
|
||||
return createRandomSample(index, exclude, nodes.length, sampleSize);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares the elements from sample set to the neighbour set and
|
||||
* replaces the elements from neighbour set if better neighbours are
|
||||
* found in sample set.
|
||||
* @param {number} index - index of current node.
|
||||
*/
|
||||
function findNewNeighbours(index) {
|
||||
var sample = samples[index];
|
||||
|
||||
if (neighbours[index].size > 0) {
|
||||
for (var [key, value] of sample) {
|
||||
var neighbMax = neighbours[index].entries().next().value;
|
||||
|
||||
// Check if a value from sample could be a better neighbour
|
||||
// if so, replace it.
|
||||
if (value < neighbMax[1] && value <= distanceRange) {
|
||||
neighbours[index].delete(neighbMax[0]);
|
||||
neighbours[index].set(key, value)
|
||||
neighbours[index] = new Map([...neighbours[index].entries()].sort(sortDistances));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the stress. Basically, it computes the difference between
|
||||
* high dimensional distance and real distance. The lower the stress is,
|
||||
* the better layout.
|
||||
* @return {number} - stress of the layout.
|
||||
*/
|
||||
function getStress() {
|
||||
var totalDiffSq = 0, totalHighDistSq = 0;
|
||||
for (var i = 0, source, target, realDist, highDist; i < nodes.length; i++) {
|
||||
for (var j = 0; j < nodes.length; j++) {
|
||||
if (i !== j) {
|
||||
source = nodes[i], target = nodes[j];
|
||||
realDist = Math.hypot(target.x - source.x, target.y - source.y);
|
||||
highDist = +distance(source, target) * multiplier;
|
||||
totalDiffSq += Math.pow(realDist - highDist, 2);
|
||||
totalHighDistSq += highDist * highDist;
|
||||
}
|
||||
}
|
||||
}
|
||||
return Math.sqrt(totalDiffSq / totalHighDistSq);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the average velocity of the force calculation at the
|
||||
* current iteration.
|
||||
* @return {number} - average velocity.
|
||||
*/
|
||||
function getAvgVelocity() {
|
||||
return velocity / ((neighbourSize + sampleSize) * nodes.length);
|
||||
}
|
||||
|
||||
|
||||
function getDistributionData() {
|
||||
var d = [];
|
||||
for (var i = 0; i < nodes.length; i++) {
|
||||
d.push({ "index": i, "size": neighbours[i].size });
|
||||
}
|
||||
return { "maxSize": neighbourSize, "l": nodes.length, "distribution": d };
|
||||
}
|
||||
|
||||
// API for initializing the algorithm, setting parameters and querying
|
||||
// metrics.
|
||||
force.initialize = function (_) {
|
||||
nodes = _;
|
||||
initialize();
|
||||
};
|
||||
|
||||
force.id = function (_) {
|
||||
return arguments.length ? (id = _, force) : id;
|
||||
};
|
||||
|
||||
force.neighbourSize = function (_) {
|
||||
return arguments.length ? (neighbourSize = +_, force) : neighbourSize;
|
||||
};
|
||||
|
||||
force.sampleSize = function (_) {
|
||||
return arguments.length ? (sampleSize = +_, force) : sampleSize;
|
||||
};
|
||||
|
||||
force.distance = function (_) {
|
||||
return arguments.length ? (distance = typeof _ === "function" ? _ : constant(+_), force) : distance;
|
||||
};
|
||||
|
||||
force.stress = function () {
|
||||
return getStress();
|
||||
};
|
||||
|
||||
force.velocity = function () {
|
||||
return getAvgVelocity();
|
||||
};
|
||||
|
||||
force.freeness = function (_) {
|
||||
return arguments.length ? (freeness = +_, force) : freeness;
|
||||
};
|
||||
|
||||
force.distanceRange = function (_) {
|
||||
return arguments.length ? (distanceRange = +_, force) : distanceRange;
|
||||
};
|
||||
|
||||
force.multiplier = function (_) {
|
||||
return arguments.length ? (multiplier = +_, initialize(), force) : multiplier;
|
||||
};
|
||||
|
||||
force.nodeNeighbours = function (_) {
|
||||
return arguments.length ? neighbours[+_] : neighbours;
|
||||
};
|
||||
|
||||
force.distributionData = function () {
|
||||
return getDistributionData();
|
||||
};
|
||||
|
||||
return force;
|
||||
}
|
||||
197
src/neighbourSamplingPre.js
Normal file
197
src/neighbourSamplingPre.js
Normal file
@@ -0,0 +1,197 @@
|
||||
import constant from "./constant";
|
||||
import jiggle from "./jiggle";
|
||||
|
||||
/**
|
||||
* Set the node id accessor to the specified i.
|
||||
* @param {node} d - node.
|
||||
* @param {accessor} i - id accessor.
|
||||
* @return {accessor} - node id accessor.
|
||||
*/
|
||||
function index(d, i) {
|
||||
return i;
|
||||
}
|
||||
|
||||
/**
|
||||
* The implementation of Chalmers' 1996 Neighbour and Sampling algorithm.
|
||||
* It uses random sampling to find the most suited neighbours from the
|
||||
* data set.
|
||||
* @return {force} calculated forces.
|
||||
*/
|
||||
export default function () {
|
||||
var id = index,
|
||||
neighbours = [],
|
||||
worst = [],
|
||||
samples = new Array(),
|
||||
distance = constant(300),
|
||||
nodes,
|
||||
neighbourSize = 6,
|
||||
sampleSize = 3,
|
||||
freeness = 0.85,
|
||||
springForce = 0.7,
|
||||
dampingFactor = 0.3;
|
||||
|
||||
/**
|
||||
* Calculates the forces at each iteration between the node and the
|
||||
* objects in neighbour and sample sets.
|
||||
* @param {number} alpha - controls the stopping of the
|
||||
* particle simulations.
|
||||
*/
|
||||
function force(alpha) {
|
||||
for (var i = 0, n = nodes.length; i < n; ++i) {
|
||||
// Randomize the samples for every node.
|
||||
// Calculate the forces between node and its neighbours.
|
||||
for (var [keyN, valueN] of neighbours[i]) {
|
||||
setVelocity(i, keyN, valueN, alpha);
|
||||
}
|
||||
// Calculate the forces between node and its sample set.
|
||||
// for (var [keyS, valueS] of worst[i]) {
|
||||
// setVelocity(i, keyS, valueS, alpha);
|
||||
// }
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the velocities of the source and target nodes.
|
||||
* @param {number} sourceId - source node id.
|
||||
* @param {number} targetId - target node id.
|
||||
* @param {number} dist - high dimensional distance between
|
||||
* the two nodes.
|
||||
* @param {number} alpha - controls the speed of simulation.
|
||||
*/
|
||||
function setVelocity(sourceId, targetId, dist, alpha) {
|
||||
var source, target, x, y, l;
|
||||
source = nodes[sourceId], target = nodes[targetId];
|
||||
// If x or y coordinates not defined, add some randomness.
|
||||
x = target.x + target.vx - source.x - source.vx || jiggle();
|
||||
y = target.y + target.vy - source.y - source.vy || jiggle();
|
||||
l = Math.sqrt(x * x + y * y);
|
||||
l = (l - dist) / l * alpha;
|
||||
x *= l, y *= l;
|
||||
// Set the calculated velocites for both nodes.
|
||||
target.vx -= x;
|
||||
target.vy -= y;
|
||||
source.vx += x;
|
||||
source.vy += y;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the neighbour and sample set at the start.
|
||||
*/
|
||||
function initialize() {
|
||||
if (!nodes) return;
|
||||
|
||||
findNeighbours();
|
||||
}
|
||||
|
||||
/**
|
||||
* Function that compares to map elements by its values.
|
||||
* @param {object} a
|
||||
* @param {object} b
|
||||
* @return {number} - 0, if values are equal, positive number if b > a,
|
||||
* negative otherwise.
|
||||
*/
|
||||
function sortDistances(a, b) {
|
||||
return b[1] - a[1];
|
||||
}
|
||||
|
||||
|
||||
function findNeighbours() {
|
||||
// Initialize for each node a neighbour and sample arrays
|
||||
// with random values.
|
||||
for (var i = 0, n = nodes.length; i < n; ++i) {
|
||||
neighbours[i] = new Map();
|
||||
for (var j = 0; j < n; j++) {
|
||||
if (i !== j) {
|
||||
var dist = +distance(nodes[i], nodes[j]);
|
||||
|
||||
if (neighbours[i].size < neighbourSize) {
|
||||
neighbours[i].set(j, dist);
|
||||
neighbours[i] = new Map([...neighbours[i].entries()].sort(sortDistances));
|
||||
} else {
|
||||
var neighbMax = neighbours[i].entries().next().value;
|
||||
|
||||
if (dist < neighbMax[1]) {
|
||||
neighbours[i].delete(neighbMax[0]);
|
||||
neighbours[i].set(j, dist);
|
||||
neighbours[i] = new Map([...neighbours[i].entries()].sort(sortDistances));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Calculates the stress. Basically, it computes the difference between
|
||||
* high dimensional distance and real distance. The lower the stress is,
|
||||
* the better layout.
|
||||
* @return {number} - stress of the layout.
|
||||
*/
|
||||
function getStress() {
|
||||
var totalDiffSq = 0, totalHighDistSq = 0;
|
||||
for (var i = 0, source, target, realDist, highDist; i < nodes.length; i++) {
|
||||
for (var j = 0; j < nodes.length; j++) {
|
||||
if (i !== j) {
|
||||
source = nodes[i], target = nodes[j];
|
||||
realDist = Math.hypot(target.x - source.x, target.y - source.y);
|
||||
highDist = +distance(source, target);
|
||||
totalDiffSq += Math.pow(realDist - highDist, 2);
|
||||
totalHighDistSq += highDist * highDist;
|
||||
}
|
||||
}
|
||||
}
|
||||
return Math.sqrt(totalDiffSq / totalHighDistSq);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the average velocity of the force calculation at the
|
||||
* current iteration.
|
||||
* @return {number} - average velocity.
|
||||
*/
|
||||
function getAvgVelocity() {
|
||||
return velocity / ((neighbourSize + sampleSize) * nodes.length);
|
||||
}
|
||||
|
||||
// API for initializing the algorithm, setting parameters and querying
|
||||
// metrics.
|
||||
force.initialize = function (_) {
|
||||
nodes = _;
|
||||
initialize();
|
||||
};
|
||||
|
||||
force.id = function (_) {
|
||||
return arguments.length ? (id = _, force) : id;
|
||||
};
|
||||
|
||||
force.neighbourSize = function (_) {
|
||||
return arguments.length ? (neighbourSize = +_, force) : neighbourSize;
|
||||
};
|
||||
|
||||
force.sampleSize = function (_) {
|
||||
return arguments.length ? (sampleSize = +_, force) : sampleSize;
|
||||
};
|
||||
|
||||
force.distance = function (_) {
|
||||
return arguments.length ? (distance = typeof _ === "function" ? _ : constant(+_), force) : distance;
|
||||
};
|
||||
|
||||
force.stress = function () {
|
||||
return getStress();
|
||||
};
|
||||
|
||||
force.velocity = function () {
|
||||
return getAvgVelocity();
|
||||
};
|
||||
|
||||
force.freeness = function (_) {
|
||||
return arguments.length ? (freeness = +_, force) : freeness;
|
||||
};
|
||||
|
||||
force.nodeNeighbours = function (_) {
|
||||
return arguments.length ? neighbours[+_] : [];
|
||||
};
|
||||
|
||||
return force;
|
||||
}
|
||||
374
src/t-sne.js
Normal file
374
src/t-sne.js
Normal file
@@ -0,0 +1,374 @@
|
||||
import constant from "./constant";
|
||||
|
||||
/**
|
||||
* Set the node id accessor to the specified i.
|
||||
* @param {node} d - node.
|
||||
* @param {accessor} i - id accessor.
|
||||
* @return {accessor} - node id accessor.
|
||||
*/
|
||||
function index(d, i) {
|
||||
return i;
|
||||
}
|
||||
|
||||
/**
|
||||
* t-SNE implementation in D3 by using the code existing in tsnejs
|
||||
* (https://github.com/karpathy/tsnejs) to compute the solution.
|
||||
*/
|
||||
export default function() {
|
||||
var id = index,
|
||||
distance = constant(300),
|
||||
nodes,
|
||||
perplexity = 30,
|
||||
learningRate = 10,
|
||||
iteration = 0,
|
||||
dim = 2,
|
||||
N, // length of the nodes.
|
||||
P, // probability matrix.
|
||||
Y, // solution.
|
||||
gains,
|
||||
ystep;
|
||||
|
||||
/**
|
||||
* Make a step in t-SNE algorithm and set the velocities for the nodes
|
||||
* to accumulate the values from solution.
|
||||
*/
|
||||
function force() {
|
||||
// Make a step at each iteration.
|
||||
step();
|
||||
var solution = getSolution();
|
||||
|
||||
// Set the velocity for each node using the solution.
|
||||
for (var i = 0; i < nodes.length; i++) {
|
||||
nodes[i].vx += solution[i][0];
|
||||
nodes[i].vy += solution[i][1];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the random number from Gaussian distribution.
|
||||
* @return {number} random number.
|
||||
*/
|
||||
function gaussRandom() {
|
||||
let u = 2 * Math.random() - 1;
|
||||
let v = 2 * Math.random() - 1;
|
||||
let r = u * u + v * v;
|
||||
if (r == 0 || r > 1) return gaussRandom();
|
||||
return u * Math.sqrt(-2 * Math.log(r) / r);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the normalized number.
|
||||
* @return {number} normalized random number from Gaussian distribution.
|
||||
*/
|
||||
function randomN() {
|
||||
return gaussRandom() * 1e-4;
|
||||
}
|
||||
|
||||
function sign(x) {
|
||||
return x > 0 ? 1 : x < 0 ? -1 : 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an array of length n filled with zeros.
|
||||
* @param {number} n - length of array.
|
||||
* @return {Float64Array} - array of zeros with length n.
|
||||
*/
|
||||
function zeros(n) {
|
||||
if (typeof(n) === 'undefined' || isNaN(n)) {
|
||||
return [];
|
||||
}
|
||||
return new Float64Array(n); // typed arrays are faster
|
||||
}
|
||||
|
||||
// Returns a 2d array of random numbers
|
||||
/**
|
||||
* Creates a 2d array filled with random numbers.
|
||||
* @param {number} n - rows.
|
||||
* @param {number} d - columns.
|
||||
* @return {array} - 2d array
|
||||
*/
|
||||
function random2d(n, d) {
|
||||
var x = [];
|
||||
for (var i = 0; i < n; i++) {
|
||||
var y = [];
|
||||
for (var j = 0; j < d; j++) {
|
||||
y.push(randomN());
|
||||
}
|
||||
x.push(y);
|
||||
}
|
||||
return x;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the probability matrix using the provided data.
|
||||
* @param {array} data - nodes.
|
||||
* @param {number} perplexity - used to calculate entropy of distribution.
|
||||
* @param {number} tol - limit for entropy difference.
|
||||
* @return {2d array} - 2d matrix containing probabilities.
|
||||
*/
|
||||
function d2p(data, perplexity, tol) {
|
||||
N = Math.floor(data.length);
|
||||
var Htarget = Math.log(perplexity); // target entropy of distribution.
|
||||
var P1 = zeros(N * N); // temporary probability matrix.
|
||||
|
||||
var prow = zeros(N); // a temporary storage compartment.
|
||||
for (var i = 0; i < N; i++) {
|
||||
var betamin = -Infinity;
|
||||
var betamax = Infinity;
|
||||
var beta = 1; // initial value of precision.
|
||||
var done = false;
|
||||
var maxtries = 50;
|
||||
|
||||
// Perform binary search to find a suitable precision beta
|
||||
// so that the entropy of the distribution is appropriate.
|
||||
var num = 0;
|
||||
while (!done) {
|
||||
// Compute entropy and kernel row with beta precision.
|
||||
var psum = 0.0;
|
||||
for (var j = 0; j < N; j++) {
|
||||
var pj = Math.exp(-distance(data[i], data[j]) * beta);
|
||||
// Ignore the diagonals
|
||||
if (i === j) {
|
||||
pj = 0;
|
||||
}
|
||||
prow[j] = pj;
|
||||
psum += pj;
|
||||
}
|
||||
// Normalize p and compute entropy.
|
||||
var Hhere = 0.0;
|
||||
for (j = 0; j < N; j++) {
|
||||
if (psum == 0) {
|
||||
pj = 0;
|
||||
} else {
|
||||
pj = prow[j] / psum;
|
||||
}
|
||||
prow[j] = pj;
|
||||
if (pj > 1e-7) {
|
||||
Hhere -= pj * Math.log(pj);
|
||||
}
|
||||
}
|
||||
|
||||
// Adjust beta based on result.
|
||||
if (Hhere > Htarget) {
|
||||
// Entropy was too high (distribution too diffuse)
|
||||
// so we need to increase the precision for more peaky distribution.
|
||||
betamin = beta; // move up the bounds.
|
||||
if (betamax === Infinity) {
|
||||
beta = beta * 2;
|
||||
} else {
|
||||
beta = (beta + betamax) / 2;
|
||||
}
|
||||
|
||||
} else {
|
||||
// Converse case. Make distrubtion less peaky.
|
||||
betamax = beta;
|
||||
if (betamin === -Infinity) {
|
||||
beta = beta / 2;
|
||||
} else {
|
||||
beta = (beta + betamin) / 2;
|
||||
}
|
||||
}
|
||||
|
||||
// Stopping conditions: too many tries or got a good precision.
|
||||
num++;
|
||||
if (Math.abs(Hhere - Htarget) < tol || num >= maxtries) {
|
||||
done = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Copy over the final prow to P1 at row i
|
||||
for (j = 0; j < N; j++) {
|
||||
P1[i * N + j] = prow[j];
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Symmetrize P and normalize it to sum to 1 over all ij
|
||||
var Pout = zeros(N * N);
|
||||
var N2 = N * 2;
|
||||
for (i = 0; i < N; i++) {
|
||||
for (j = 0; j < N; j++) {
|
||||
Pout[i * N + j] = Math.max((P1[i * N + j] + P1[j * N + i]) / N2, 1e-100);
|
||||
}
|
||||
}
|
||||
return Pout;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize a starting (random) solution.
|
||||
*/
|
||||
function initSolution() {
|
||||
Y = random2d(N, dim);
|
||||
// Step gains to accelerate progress in unchanging directions.
|
||||
gains = random2d(N, dim, 1.0);
|
||||
// Momentum accumulator.
|
||||
ystep = random2d(N, dim, 0.0);
|
||||
iteration = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {2d array} the solution.
|
||||
*/
|
||||
function getSolution() {
|
||||
return Y;
|
||||
}
|
||||
|
||||
/**
|
||||
* Do a single step (iteration) for the layout.
|
||||
* @return {number} the current cost.
|
||||
*/
|
||||
function step() {
|
||||
iteration += 1;
|
||||
|
||||
var cg = costGrad(Y); // Evaluate gradient.
|
||||
var cost = cg.cost;
|
||||
var grad = cg.grad;
|
||||
|
||||
// Perform gradient step.
|
||||
var ymean = zeros(dim);
|
||||
for (var i = 0; i < N; i++) {
|
||||
for (var d = 0; d < dim; d++) {
|
||||
var gid = grad[i][d];
|
||||
var sid = ystep[i][d];
|
||||
var gainid = gains[i][d];
|
||||
|
||||
// Compute gain update.
|
||||
var newgain = sign(gid) === sign(sid) ? gainid * 0.8 : gainid + 0.2;
|
||||
if (newgain < 0.01) {
|
||||
newgain = 0.01;
|
||||
}
|
||||
gains[i][d] = newgain;
|
||||
|
||||
// Compute momentum step direction.
|
||||
var momval = iteration < 250 ? 0.5 : 0.8;
|
||||
var newsid = momval * sid - learningRate * newgain * grad[i][d];
|
||||
ystep[i][d] = newsid;
|
||||
|
||||
// Do the step.
|
||||
Y[i][d] += newsid;
|
||||
|
||||
// Accumulate mean so that we can center later.
|
||||
ymean[d] += Y[i][d];
|
||||
}
|
||||
}
|
||||
|
||||
// Reproject Y to have the zero mean.
|
||||
for (i = 0; i < N; i++) {
|
||||
for (d = 0; d < dim; d++) {
|
||||
Y[i][d] -= ymean[d] / N;
|
||||
}
|
||||
}
|
||||
return cost;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the cost and the gradient.
|
||||
* @param {2d array} Y - the current solution to evaluate.
|
||||
* @return {object} that contains a cost and a gradient.
|
||||
*/
|
||||
function costGrad(Y) {
|
||||
|
||||
var pmul = iteration < 100 ? 4 : 1;
|
||||
|
||||
// Compute current Q distribution, unnormalized first.
|
||||
var Qu = zeros(N * N);
|
||||
var qsum = 0.0;
|
||||
for (var i = 0; i < N; i++) {
|
||||
for (var j = i + 1; j < N; j++) {
|
||||
var dsum = 0.0;
|
||||
for (var d = 0; d < dim; d++) {
|
||||
var dhere = Y[i][d] - Y[j][d];
|
||||
dsum += dhere * dhere;
|
||||
}
|
||||
var qu = 1.0 / (1.0 + dsum); // Student t-distribution.
|
||||
Qu[i * N + j] = qu;
|
||||
Qu[j * N + i] = qu;
|
||||
qsum += 2 * qu;
|
||||
}
|
||||
}
|
||||
// Normalize Q distribution to sum to 1.
|
||||
var NN = N * N;
|
||||
var Q = zeros(NN);
|
||||
for (var q = 0; q < NN; q++) {
|
||||
Q[q] = Math.max(Qu[q] / qsum, 1e-100);
|
||||
}
|
||||
|
||||
var cost = 0.0;
|
||||
var grad = [];
|
||||
for (i = 0; i < N; i++) {
|
||||
var gsum = new Array(dim); // Initialize gradiet for point i.
|
||||
for (d = 0; d < dim; d++) {
|
||||
gsum[d] = 0.0;
|
||||
}
|
||||
for (j = 0; j < N; j++) {
|
||||
// Accumulate the cost.
|
||||
cost += -P[i * N + j] * Math.log(Q[i * N + j]);
|
||||
var premult = 4 * (pmul * P[i * N + j] - Q[i * N + j]) * Qu[i * N + j];
|
||||
for (d = 0; d < dim; d++) {
|
||||
gsum[d] += premult * (Y[i][d] - Y[j][d]);
|
||||
}
|
||||
}
|
||||
grad.push(gsum);
|
||||
}
|
||||
|
||||
return {
|
||||
cost: cost,
|
||||
grad: grad
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the stress. Basically, it computes the difference between
|
||||
* high dimensional distance and real distance. The lower the stress is,
|
||||
* the better layout.
|
||||
* @return {number} - stress of the layout.
|
||||
*/
|
||||
function getStress() {
|
||||
var totalDiffSq = 0,
|
||||
totalHighDistSq = 0;
|
||||
for (var i = 0, source, target, realDist, highDist; i < nodes.length; i++) {
|
||||
for (var j = 0; j < nodes.length; j++) {
|
||||
if (i !== j) {
|
||||
source = nodes[i], target = nodes[j];
|
||||
realDist = Math.hypot(target.x - source.x, target.y - source.y);
|
||||
highDist = +distance(nodes[i], nodes[j]);
|
||||
totalDiffSq += Math.pow(realDist - highDist, 2);
|
||||
totalHighDistSq += highDist * highDist;
|
||||
}
|
||||
}
|
||||
}
|
||||
return Math.sqrt(totalDiffSq / totalHighDistSq);
|
||||
}
|
||||
|
||||
// API for initializing the algorithm, setting parameters and querying
|
||||
// metrics.
|
||||
force.initialize = function(_) {
|
||||
nodes = _;
|
||||
N = nodes.length;
|
||||
// Initialize the probability matrix.
|
||||
P = d2p(nodes, perplexity, 1e-4);
|
||||
initSolution();
|
||||
};
|
||||
|
||||
force.id = function(_) {
|
||||
return arguments.length ? (id = _, force) : id;
|
||||
};
|
||||
|
||||
force.distance = function(_) {
|
||||
return arguments.length ? (distance = typeof _ === "function" ? _ : constant(+_), force) : distance;
|
||||
};
|
||||
|
||||
force.stress = function() {
|
||||
return getStress();
|
||||
};
|
||||
|
||||
force.learningRate = function(_) {
|
||||
return arguments.length ? (learningRate = +_, force) : learningRate;
|
||||
};
|
||||
|
||||
force.perplexity = function(_) {
|
||||
return arguments.length ? (perplexity = +_, force) : perplexity;
|
||||
};
|
||||
|
||||
return force;
|
||||
}
|
||||
Reference in New Issue
Block a user