24 Commits

Author SHA1 Message Date
Pitchaya Boonsarngsuk
7ac1fb1ebc แก้ ลืมเปลี่ยนจำนวน its ใน js 2018-03-22 18:00:21 +00:00
Pitchaya Boonsarngsuk
e6073a86d3 Lower range of iterations 2018-03-22 17:44:00 +00:00
Pitchaya Boonsarngsuk
fa5d34f96e แก้ เผลอคอมเมนต์เกิน 2018-03-22 16:53:39 +00:00
Pitchaya Boonsarngsuk
7c2900653e แก้ coding style ภาค 12 2018-03-22 16:47:10 +00:00
Pitchaya Boonsarngsuk
d59e4066d3 แก้ coding style ภาค 11 2018-03-22 16:44:57 +00:00
Pitchaya Boonsarngsuk
7b322b3ea8 แก้ตัวปิดบรรทัด 2018-03-22 16:42:28 +00:00
Pitchaya Boonsarngsuk
b2f5993513 แก้ coding style ภาค 9 (reverted from commit 400eab7e80) 2018-03-22 16:40:35 +00:00
Pitchaya Boonsarngsuk
cd0f3687cb แก้ coding style ภาค 10 2018-03-22 16:40:27 +00:00
Pitchaya Boonsarngsuk
400eab7e80 แก้ coding style ภาค 9 2018-03-22 16:40:14 +00:00
Pitchaya Boonsarngsuk
294cc7724e เพิ่ม script eslint fix 2018-03-22 16:36:27 +00:00
Pitchaya Boonsarngsuk
0d1fa385f8 แก้ coding style ภาค 8 2018-03-22 16:35:48 +00:00
Pitchaya Boonsarngsuk
93af0e646a แก้ coding style ภาค 7 2018-03-22 16:35:00 +00:00
Pitchaya Boonsarngsuk
684878b1fd แก้ coding style ภาค 6 2018-03-22 16:30:41 +00:00
Pitchaya Boonsarngsuk
21ee710468 แก้ coding style ภาค 5 2018-03-22 16:29:10 +00:00
Pitchaya Boonsarngsuk
8e34697d89 แก้ coding style ภาค 4 2018-03-22 16:22:43 +00:00
Pitchaya Boonsarngsuk
f3a6656c8f แก้ coding style ภาค 3 2018-03-22 16:17:48 +00:00
Pitchaya Boonsarngsuk
0cdd927444 แก้ coding style ภาค 2 2018-03-22 16:12:25 +00:00
Pitchaya Boonsarngsuk
2256af7448 แก้ coding style 2018-03-22 16:02:45 +00:00
Pitchaya Boonsarngsuk
b601af68b4 แก้ชื่อตัวแปล 2018-03-22 15:49:22 +00:00
Pitchaya Boonsarngsuk
f316d2755a แก้วงเล็บ 2018-03-22 15:48:30 +00:00
Pitchaya Boonsarngsuk
d31951fa85 Eslint allow console 2018-03-22 15:41:44 +00:00
Pitchaya Boonsarngsuk
7c1886dcc6 Eslint ใช้ ES6 2018-03-22 15:33:16 +00:00
Pitchaya Boonsarngsuk
66da3eb15b Add eslint run script 2018-03-22 15:32:09 +00:00
Pitchaya Boonsarngsuk
6a46342afd เพิ่ม eslint 2018-03-22 15:24:45 +00:00
32 changed files with 1504 additions and 1502 deletions

19
.eslintrc Normal file
View File

@@ -0,0 +1,19 @@
parserOptions:
sourceType: module
extends:
"standard"
rules:
no-cond-assign: 0
no-console: 0
semi:
- error
- always
no-return-assign: 0
one-var: 0
env:
es6: true
globals:
console: false
performance: false

View File

@@ -101,8 +101,8 @@
<br/>
<label title="Number of iterations before the simulation is stopped">
Iterations
<output id="iterationsSliderOutput">300</output>
<input type="range" min="5" max="5000" value="300" step="5" oninput="d3.select('#iterationsSliderOutput').text(value); ITERATIONS=value;">
<output id="iterationsSliderOutput">100</output>
<input type="range" min="5" max="1000" value="100" step="5" oninput="d3.select('#iterationsSliderOutput').text(value); ITERATIONS=value;">
</label>
<br/>
<label title="Attribute used for coloring nodes">

View File

@@ -1,10 +1,10 @@
/**
* Initialize the hybrid layout algorithm and start simulation.
*/
function startHybridSimulation() {
console.log("startHybridSimulation");
function startHybridSimulation () {
console.log('startHybridSimulation');
springForce = false;
d3.selectAll(".nodes").remove();
d3.selectAll('.nodes').remove();
manualStop = false;
simulation.stop();
p1 = performance.now();
@@ -17,33 +17,33 @@ function startHybridSimulation() {
.neighbourSize(NEIGHBOUR_SIZE)
.sampleSize(SAMPLE_SIZE)
.stableVelocity(0) // Change here
.distance(distance)
.distance(distance);
let forceFull = d3.forceNeighbourSampling()
.neighbourSize(FULL_NEIGHBOUR_SIZE)
.sampleSize(FULL_SAMPLE_SIZE)
.stableVelocity(0) // Change here
.distance(distance)
.distance(distance);
let hybridSimulation = d3.hybridSimulation(simulation, forceSample, forceFull)
.sampleIterations(ITERATIONS)
.fullIterations(FULL_ITERATIONS)
.numPivots(PIVOTS ? NUM_PIVOTS:-1)
.numPivots(PIVOTS ? NUM_PIVOTS : -1)
.interpFindTuneIts(INTERP_ENDING_ITS)
.interpDistanceFn(distance)
.on("sampleTick", ticked)
.on("fullTick", ticked)
.on("startInterp", startedFull)
.on("end", ended);
.on('sampleTick', ticked)
.on('fullTick', ticked)
.on('startInterp', startedFull)
.on('end', ended);
let sample = hybridSimulation.subSet();
addNodesToDOM(sample);
hybridSimulation.restart();
function startedFull() {
console.log("startedFull");
d3.selectAll(".nodes").remove();
function startedFull () {
console.log('startedFull');
d3.selectAll('.nodes').remove();
addNodesToDOM(nodes);
}
}

View File

@@ -1,8 +1,8 @@
/**
* Initialize the link force algorithm and start simulation.
*/
function startLinkSimulation() {
console.log("startLinkSimulation")
function startLinkSimulation () {
console.log('startLinkSimulation');
springForce = false;
alreadyRanIterations = 0;
manualStop = true;
@@ -12,26 +12,25 @@ function startLinkSimulation() {
if (tweakedVerOfLink) {
force = d3.forceLinkCompleteGraph()
.distance(function (n, m) {
return distanceFunction(n, m, props, norm);
})
.stableVelocity(0) // Change here
.onStableVelo(ended);
}
else {
for (i = nodes.length-1; i >= 1; i--) {
for (j = i-1; j >= 0; j--) {
.distance(function (n, m) {
return distanceFunction(n, m, props, norm);
})
.stableVelocity(0) // Change here
.onStableVelo(ended);
} else {
for (i = nodes.length - 1; i >= 1; i--) {
for (j = i - 1; j >= 0; j--) {
links.push({
source: nodes[i],
target: nodes[j],
target: nodes[j]
});
}
}
force = d3.forceLink()
.distance(function (n) {
return distanceFunction(n.source, n.target, props, norm);
})
.links(links);
.distance(function (n) {
return distanceFunction(n.source, n.target, props, norm);
})
.links(links);
}
/* Add force
@@ -51,9 +50,9 @@ function startLinkSimulation() {
simulation
.alphaDecay(0)
.alpha(1)
.on("tick", ticked)
.on("end", ended)
//.velocityDecay(0.8)
.force(forceName,force)
.on('tick', ticked)
.on('end', ended)
// .velocityDecay(0.8)
.force(forceName, force)
.restart();
}

View File

@@ -1,28 +1,28 @@
/**
* Initialize the Chalmers' 1996 algorithm and start simulation.
*/
function startNeighbourSamplingSimulation() {
console.log("startNeighbourSamplingSimulation");
//springForce = true;
function startNeighbourSamplingSimulation () {
console.log('startNeighbourSamplingSimulation');
// springForce = true;
alreadyRanIterations = 0;
manualStop = true;
simulation.stop();
p1 = performance.now();
let force = d3.forceNeighbourSampling()
.neighbourSize(NEIGHBOUR_SIZE)
.sampleSize(SAMPLE_SIZE)
.distance(function (s, t) {
return distanceFunction(s, t, props, norm);
})
.stableVelocity(0) // Change here
.onStableVelo(ended);
.neighbourSize(NEIGHBOUR_SIZE)
.sampleSize(SAMPLE_SIZE)
.distance(function (s, t) {
return distanceFunction(s, t, props, norm);
})
.stableVelocity(0) // Change here
.onStableVelo(ended);
simulation
.alphaDecay(0)
.alpha(1)
.on("tick", ticked)
.on("end", ended)
.on('tick', ticked)
.on('end', ended)
.force(forceName, force);
// Restart the simulation.
simulation.restart();

View File

@@ -1,7 +1,7 @@
/**
* Initialize the t-SNE algorithm and start simulation.
*/
function starttSNE() {
function starttSNE () {
springForce = false;
simulation.stop();
p1 = performance.now();
@@ -25,20 +25,20 @@ function starttSNE() {
/**
* Initialize the Barnes-Hut algorithm and start simulation.
*/
function startBarnesHutSimulation() {
console.log("startBarnesHutSimulation")
function startBarnesHutSimulation () {
console.log('startBarnesHutSimulation');
alreadyRanIterations = 0;
manualStop = false;
springForce = false;
p1 = performance.now();
simulation.alphaDecay(1 - Math.pow(0.001, 1 / ITERATIONS))
.on("tick", ticked)
.on("end", ended)
.force(forceName, d3.forceBarnesHut()
// The distance function that will be used to calculate distances
// between nodes.
.distance(function(s, t) { return distanceFunction(s, t, props, norm); }));
.on('tick', ticked)
.on('end', ended)
.force(forceName, d3.forceBarnesHut()
// The distance function that will be used to calculate distances
// between nodes.
.distance(function (s, t) { return distanceFunction(s, t, props, norm); }));
// Restart the simulation.
simulation.alpha(1).restart();
}

View File

@@ -5,14 +5,14 @@
* @param {array} properties - the properties of the nodes.
* @return {number} the distance between source and target nodes.
*/
function calculateCosineSimilarity(source, target, properties, normArgs) {
function calculateCosineSimilarity (source, target, properties, normArgs) {
var numerator = 0.0;
// console.log(properties);
// Iterate through every column of data
for (var i = 0; i < properties.length; i++) {
property = properties[i];
if (property.toLowerCase() !== "class" && property.toLowerCase() !== "app" && property.toLowerCase() !== "user" && property.toLowerCase() !== "weekday") {
if (property.toLowerCase() !== 'class' && property.toLowerCase() !== 'app' && property.toLowerCase() !== 'user' && property.toLowerCase() !== 'weekday') {
var s = source[property],
t = target[property];
@@ -26,7 +26,7 @@ function calculateCosineSimilarity(source, target, properties, normArgs) {
return Math.abs(numerator / denominator);
}
function squareRooted(node, properties, normArgs) {
function squareRooted (node, properties, normArgs) {
var sum = 0.0;
for (var i = 0, s; i < properties.length; i++) {

View File

@@ -5,14 +5,14 @@
* @param {array} properties - the properties of the nodes.
* @return {number} the distance between source and target nodes.
*/
function calculateDiceDissimilarity(source, target, properties, normArgs) {
function calculateDiceDissimilarity (source, target, properties, normArgs) {
var notShared = 0.0;
// console.log(properties);
// Iterate through every column of data
for (var i = 0; i < properties.length; i++) {
property = properties[i];
if (property.toLowerCase() !== "class" && property.toLowerCase() !== "app" && property.toLowerCase() !== "user" && property.toLowerCase() !== "weekday") {
if (property.toLowerCase() !== 'class' && property.toLowerCase() !== 'app' && property.toLowerCase() !== 'user' && property.toLowerCase() !== 'weekday') {
var s = source[property],
t = target[property];

View File

@@ -6,23 +6,23 @@
* @param {object} normArgs - the normalization arguments.
* @return {number} the distance between source and target nodes.
*/
function calculateDistance(source, target, properties, normArgs) {
function calculateDistance (source, target, properties, normArgs) {
var val1 = 0.0, val2 = 0.0,
sumDiff = 0.0,
ordDiff = 1.0,
ORD_FACTOR = 0.75,
cols = 0,
average = normArgs.avg,
sigma = normArgs.sig,
st_dev = normArgs.st_d;
sumDiff = 0.0,
ordDiff = 1.0,
ORD_FACTOR = 0.75,
cols = 0,
average = normArgs.avg,
sigma = normArgs.sig,
st_dev = normArgs.st_d;
// Iterate through every column of data
for (var i = 0; i < properties.length; i++) {
property = properties[i];
if (source.hasOwnProperty(property) && target.hasOwnProperty(property)
&& property.toLowerCase() !== "index" && property.toLowerCase() !== "type") {
if (source.hasOwnProperty(property) && target.hasOwnProperty(property) &&
property.toLowerCase() !== 'index' && property.toLowerCase() !== 'type') {
var s = source[property],
t = target[property];
t = target[property];
// Comparing Floats and Integers
if ((isNumeric(s) && isNumeric(t))) {
@@ -32,7 +32,7 @@ function calculateDistance(source, target, properties, normArgs) {
val1 = (val1 - average[i]) / (st_dev[i] * sigma[i]);
val2 = (val2 - average[i]) / (st_dev[i] * sigma[i]);
}
sumDiff += (val1-val2) * (val1-val2);
sumDiff += (val1 - val2) * (val1 - val2);
cols++;
// Comparing strings
} else if (/[a-zA-Z]/.test(s) && /[a-zA-Z]/.test(t) && s === t) {
@@ -42,9 +42,8 @@ function calculateDistance(source, target, properties, normArgs) {
// Comparing Dates
var parsedDateS = Date.parse(s);
var parsedDateT = Date.parse(t);
if (isNaN(s) && !isNaN(parsedDateS)
&& isNaN(t) && !isNaN(parsedDateT)) {
if (isNaN(s) && !isNaN(parsedDateS) &&
isNaN(t) && !isNaN(parsedDateT)) {
val1 = parsedDateS.valueOf(),
val2 = parsedDateT.valueOf();
@@ -52,7 +51,7 @@ function calculateDistance(source, target, properties, normArgs) {
val1 = (val1 - average[i]) / (st_dev[i] * sigma[i]);
val2 = (val2 - average[i]) / (st_dev[i] * sigma[i]);
}
sumDiff += (val1-val2) * (val1-val2);
sumDiff += (val1 - val2) * (val1 - val2);
cols++;
}
}
@@ -62,9 +61,9 @@ function calculateDistance(source, target, properties, normArgs) {
sumDiff *= ordDiff;
if (cols > 0) {
sumDiff *= properties.length/cols;
sumDiff *= properties.length / cols;
}
//console.log(sumDiff);
// console.log(sumDiff);
return sumDiff;
}

View File

@@ -6,33 +6,33 @@
* @param {node} target
* @return {number} the distance between source and target nodes.
*/
function calculateDistancePoker(source, target) {
function calculateDistancePoker (source, target) {
var sumDiff = 0.0,
ordDiff = 1.0,
ORD_FACTOR = 1.5,
cards = ["C1", "C2", "C3", "C4", "C5"],
cols = 0;
ordDiff = 1.0,
ORD_FACTOR = 1.5,
cards = ['C1', 'C2', 'C3', 'C4', 'C5'],
cols = 0;
// Iterate through cards
for (var i = 0; i < cards.length; i++) {
card = cards[i];
if (source.hasOwnProperty(card) && target.hasOwnProperty(card)) {
var s = parseInt(source[card]),
t = parseInt(target[card]);
t = parseInt(target[card]);
// Calculate the squared difference.
sumDiff += (s-t) * (s-t);
sumDiff += (s - t) * (s - t);
}
}
// Class of poker hands describes the similarities the best
// so give it more priority than checking the differences between cards.
if (source.hasOwnProperty("CLASS") && target.hasOwnProperty("CLASS")) {
var s = parseInt(source["CLASS"]),
t = parseInt(target["CLASS"]);
if (source.hasOwnProperty('CLASS') && target.hasOwnProperty('CLASS')) {
var s = parseInt(source['CLASS']),
t = parseInt(target['CLASS']);
// If classes differ, then scale them by a factor.
if (s !== t) {
ordDiff *= (ORD_FACTOR * (Math.abs(s-t)))
ordDiff *= (ORD_FACTOR * (Math.abs(s - t)));
}
}
@@ -40,4 +40,4 @@ function calculateDistancePoker(source, target) {
sumDiff *= ordDiff;
return sumDiff;
}
}

View File

@@ -5,14 +5,14 @@
* @param {array} properties - the properties of the nodes.
* @return {number} the distance between source and target nodes.
*/
function calculateEuclideanDistance(source, target, properties, normArgs) {
function calculateEuclideanDistance (source, target, properties, normArgs) {
var sumDiff = 0.0;
// console.log(normArgs);
// Iterate through every column of data
for (var i = 0; i < properties.length; i++) {
property = properties[i];
if (property.toLowerCase() !== "class" && property.toLowerCase() !== "app" && property.toLowerCase() !== "user" && property.toLowerCase() !== "weekday" && property.toLowerCase() !== "type") {
if (property.toLowerCase() !== 'class' && property.toLowerCase() !== 'app' && property.toLowerCase() !== 'user' && property.toLowerCase() !== 'weekday' && property.toLowerCase() !== 'type') {
var s = source[property],
t = target[property];

View File

@@ -5,7 +5,7 @@
* @param {array} properties - the properties of the nodes.
* @return {number} the distance between source and target nodes.
*/
function calculateEuclideanDistanceTSNE(source, target, properties, normArgs) {
function calculateEuclideanDistanceTSNE (source, target, properties, normArgs) {
var dotProduct = 0.0,
sumX = 0.0,
sumY = 0.0;
@@ -15,7 +15,7 @@ function calculateEuclideanDistanceTSNE(source, target, properties, normArgs) {
for (var i = 0; i < properties.length; i++) {
property = properties[i];
if (source.hasOwnProperty(property) && target.hasOwnProperty(property) &&
property.toLowerCase() !== "class") {
property.toLowerCase() !== 'class') {
var s = source[property],
t = target[property];

View File

@@ -5,14 +5,14 @@
* @param {array} properties - the properties of the nodes.
* @return {number} the distance between source and target nodes.
*/
function calculateJaccardDissimilarity(source, target, properties, normArgs) {
function calculateJaccardDissimilarity (source, target, properties, normArgs) {
var notShared = 0.0;
// console.log(properties);
// Iterate through every column of data
for (var i = 0; i < properties.length; i++) {
property = properties[i];
if (property.toLowerCase() !== "class" && property.toLowerCase() !== "app" && property.toLowerCase() !== "user" && property.toLowerCase() !== "weekday") {
if (property.toLowerCase() !== 'class' && property.toLowerCase() !== 'app' && property.toLowerCase() !== 'user' && property.toLowerCase() !== 'weekday') {
var s = source[property],
t = target[property];

View File

@@ -5,7 +5,7 @@
* @param {array} properties - the properties of the nodes.
* @return {number} the distance between source and target nodes.
*/
function calculateManhattanDistance(source, target, properties, normArgs) {
function calculateManhattanDistance (source, target, properties, normArgs) {
var sum = 0.0,
cols = 0;
@@ -13,7 +13,7 @@ function calculateManhattanDistance(source, target, properties, normArgs) {
// Iterate through every column of data
for (var i = 0; i < properties.length; i++) {
property = properties[i];
if (property.toLowerCase() !== "class" && property.toLowerCase() !== "app" && property.toLowerCase() !== "user" && property.toLowerCase() !== "weekday") {
if (property.toLowerCase() !== 'class' && property.toLowerCase() !== 'app' && property.toLowerCase() !== 'user' && property.toLowerCase() !== 'weekday') {
var s = source[property],
t = target[property];

View File

@@ -3,7 +3,7 @@
* @param {array} nodes
* @return {object} that contains the normalization parameters.
*/
function calculateNormalization(nodes) {
function calculateNormalization (nodes) {
var STANDARD_DEV = 2.0,
properties = Object.keys(nodes[0]),
sums = calculateSums(nodes, properties),
@@ -23,10 +23,8 @@ function calculateNormalization(nodes) {
};
}
function standardDevation(nodes, properties, avg) {
var stDev = new Array(properties.length).fill(0)
function standardDevation (nodes, properties, avg) {
var stDev = new Array(properties.length).fill(0);
for (var i = 0; i < properties.length; i++) {
var sum = 0;
@@ -48,11 +46,10 @@ function standardDevation(nodes, properties, avg) {
sum += Math.pow(val - propAvg, 2);
});
stDev[i] = Math.sqrt(sum/nodes.length);
stDev[i] = Math.sqrt(sum / nodes.length);
}
return stDev;
return stDev;
}
// Calculate the sum of values and the squared sum
@@ -63,7 +60,7 @@ function standardDevation(nodes, properties, avg) {
* @return {object} that contains arrays with sum of values
* and the squared sums.
*/
function calculateSums(nodes, properties) {
function calculateSums (nodes, properties) {
var sumOfValues = new Array(properties.length).fill(0),
sumOfSquares = new Array(properties.length).fill(0);
@@ -90,4 +87,4 @@ function calculateSums(nodes, properties) {
sumOfVal: sumOfValues,
sumOfSq: sumOfSquares
};
}
}

View File

@@ -3,6 +3,6 @@
* @param {object} n - object to check.
* @return {Boolean} true, if it is a number, false otherwise.
*/
function isNumeric(n) {
function isNumeric (n) {
return !isNaN(parseFloat(n)) && isFinite(n);
}
}

View File

@@ -1,409 +1,400 @@
// Get the width and heigh of the SVG element.
var width = +document.getElementById('svg').clientWidth,
height = +document.getElementById('svg').clientHeight;
var svg = d3.select("svg")
.call(d3.zoom().scaleExtent([0.0001, 1000000]).on("zoom", function () {
svg.attr("transform", d3.event.transform);
}))
.append("g");
var div = d3.select("body").append("div")
.attr("class", "tooltip")
.style("opacity", 0);
var brush = d3.brush()
.extent([[-9999999, -9999999], [9999999, 9999999]])
.on("end", brushEnded);
svg.append("g")
.attr("class", "brush")
.call(brush);
//var intercom = Intercom.getInstance();
//intercom.on("select", unSelectNodes);
var nodes, // as in Data points
node, // as in SVG object that have all small circles on screen
props,
norm,
p1 = 0,
p2 = 0,
size,
distanceFunction,
simulation,
velocities = [],
rendering = true, // Rendering during the execution.
forceName = "forces",
springForce = false,
tooltipWidth = 0,
fileName = "",
selectedData,
clickedIndex = -1,
paused = false,
alreadyRanIterations,
tweakedVerOfLink,
manualStop = false;
// Default parameters
var MULTIPLIER = 50,
PERPLEXITY = 30,
LEARNING_RATE = 10,
NEIGHBOUR_SIZE = 10,
SAMPLE_SIZE = 10,
PIVOTS = false,
NUM_PIVOTS = 3,
ITERATIONS = 300,
FULL_ITERATIONS = 20,
NODE_SIZE = 10,
COLOR_ATTRIBUTE = "",
FULL_NEIGHBOUR_SIZE = 10,
FULL_SAMPLE_SIZE = 10,
INTERP_ENDING_ITS = 20;
// Create a color scheme for a range of numbers.
var color = d3.scaleOrdinal(d3.schemeCategory10);
$(document).ready(function() {
distanceFunction = calculateDistance;
d3.select('#startSimulation').on('click', startHybridSimulation);
$("#HLParameters").show();
});
/**
* Parse the data from the provided csv file using Papa Parse library
* @param {file} evt - csv file.
*/
function parseFile(evt) {
// Clear the previous nodes
d3.selectAll(".nodes").remove();
springForce = false;
fileName = evt.target.files[0].name;
Papa.parse(evt.target.files[0], {
header: true,
dynamicTyping: true,
skipEmptyLines: true,
complete: function (results) {
processData(results.data, results.error);
}
});
}
/**
* Process the data and pass it into D3 force simulation.
* @param {array} data
* @param {object} error
*/
function processData(data, error) {
if (error) throw error.message;
nodes = data;
size = nodes.length;
simulation = d3.forceSimulation();
// Calculate normalization parameters for distance fns
norm = calculateNormalization(nodes);
props = Object.keys(nodes[0]); // Properties to consider by distance fn
COLOR_ATTRIBUTE = props[props.length-1];
var opts = document.getElementById('color_attr').options;
props.forEach(function (d) {
opts.add(new Option(d, d, (d === COLOR_ATTRIBUTE) ? true : false));
});
opts.selectedIndex = props.length-1;
//props.pop(); //Hide Iris index / last column from the distance function
//Put the nodes at (0,0)
nodes.forEach(function (d) {
d.x = 0;
d.y = 0;
});
addNodesToDOM(nodes);
// Pass the nodes to the D3 force simulation.
simulation
.nodes(nodes)
.stop();
ticked();
};
function addNodesToDOM(data) {
node = svg.append("g")
.attr("class", "nodes")
.selectAll("circle")
.data(data)
.enter().append("circle")
.attr("r", NODE_SIZE)
.attr("transform", "translate(" + width / 2 + "," + height / 2 + ")")
// Color code the data points by a property (for Poker Hands,
// it is a CLASS property).
.attr("fill", function (d) {
return color(d[COLOR_ATTRIBUTE]);
})
.on("mouseover", function (d) {
div.transition()
.duration(200)
.style("opacity", .9);
div.html(formatTooltip(d))
.style("left", (d3.event.pageX) + "px")
.style("top", (d3.event.pageY - (15 * props.length)) + "px")
.style("width", (6 * tooltipWidth) + "px")
.style("height", (14 * props.length) + "px");
highlightOnHover(d[COLOR_ATTRIBUTE]);
})
.on("mouseout", function (d) {
div.transition()
.duration(500)
.style("opacity", 0);
node.attr("opacity", 1);
})
.on("click", function (d) {
console.log("click", clickedIndex);
if (clickedIndex !== d.index) {
if (springForce) {
highlightNeighbours(Array.from(simulation.force(forceName).nodeNeighbours(d.index).keys()));
clickedIndex = d.index;
}
} else {
node.attr("r", NODE_SIZE).attr("stroke-width", 0);
clickedIndex = -1;
}
});
if (selectedData)
unSelectNodes(selectedData);
}
function ticked() {
alreadyRanIterations++;
// If rendering is selected, then draw at every iteration.
if (rendering === true) {
node // Each sub-circle in the SVG, update cx and cy
.attr("cx", function (d) {
return d.x*MULTIPLIER;
})
.attr("cy", function (d) {
return d.y*MULTIPLIER;
});
}
// Legacy: Emit the distribution data to allow the drawing of the bar graph
//if (springForce) {
// intercom.emit("passedData", simulation.force(forceName).distributionData());
//}
if(manualStop && alreadyRanIterations == ITERATIONS) {
ended();
}
}
function ended() {
simulation.stop();
simulation.force(forceName, null);
if (rendering !== true) { // Never drawn anything before? Now it's time.
node
.attr("cx", function (d) {
return d.x*MULTIPLIER;
})
.attr("cy", function (d) {
return d.y*MULTIPLIER;
});
}
if (p1 !== 0) {
// Performance time measurement
p2 = performance.now();
console.log("Execution time: " + (p2 - p1));
p1 = 0;
p2 = 0;
}
}
function brushEnded() {
var s = d3.event.selection,
results = [];
if (s) {
var x0 = s[0][0] - width / 2,
y0 = s[0][1] - height / 2,
x1 = s[1][0] - width / 2,
y1 = s[1][1] - height / 2;
if (nodes) {
var sel = node.filter(function (d) {
if (d.x > x0 && d.x < x1 && d.y > y0 && d.y < y1) {
return true;
}
return false;
}).data();
results = sel.map(function (a) { return a.index; });
}
//intercom.emit("select", { name: fileName, indices: results });
d3.select(".brush").call(brush.move, null);
}
}
/**
* Format the tooltip for the data
* @param {*} node
*/
function formatTooltip(node) {
var textString = "",
temp = "";
tooltipWidth = 0;
props.forEach(function (element) {
temp = element + ": " + node[element] + "<br/>";
textString += temp;
if (temp.length > tooltipWidth) {
tooltipWidth = temp.length;
}
});
return textString;
}
/**
* Halt the execution.
*/
function stopSimulation() {
simulation.stop();
if (typeof hybridSimulation !== 'undefined') {
hybridSimulation.stop();
}
}
/**
* Calculate the average values of the array.
* @param {array} array
* @return {number} the mean of the array.
*/
function getAverage(array) {
console.log("getAverage", array);
var total = 0;
for (var i = 0; i < array.length; i++) {
total += array[i];
}
return total / array.length;
}
/**
* Deselect the nodes to match the selection from other window.
* @param {*} data
*/
function unSelectNodes(data) {
selectedData = data;
if (fileName === data.name && nodes) {
node
.classed("notSelected", function (d) {
if (data.indices.indexOf(d.index) < 0) {
return true;
}
return false;
});
}
}
/**
* Highlight the neighbours for neighbour and sampling algorithm
* @param {*} indices
*/
function highlightNeighbours(indices) {
node
.attr("r", function (d) {
if (indices.indexOf(d.index) >= 0) {
return NODE_SIZE * 2;
}
return NODE_SIZE;
})
.attr("stroke-width", function (d) {
if (indices.indexOf(d.index) >= 0) {
return NODE_SIZE * 0.2 + "px";
}
return "0px";
})
.attr("stroke", "white");
}
/**
* Highlight all the nodes with the same class on hover
* @param {*} highlighValue
*/
function highlightOnHover(highlighValue) {
node.attr("opacity", function (d) {
return (highlighValue === d[COLOR_ATTRIBUTE]) ? 1 : 0.3;
});
}
/**
* Color the nodes according to given attribute.
*/
function colorToAttribute() {
node.attr("fill", function (d) {
return color(d[COLOR_ATTRIBUTE])
});
}
/**
* Update the distance range.
function updateDistanceRange() {
if (springForce) {
simulation.force(forceName).distanceRange(SELECTED_DISTANCE);
}
}
/**
* Implemented pause/resume functionality
*/
function pauseUnPause() {
if (simulation) {
if (paused) {
simulation.force(forceName);
simulation.restart();
d3.select("#pauseButton").text("Pause");
paused = false;
} else {
simulation.stop();
d3.select("#pauseButton").text("Resume");
paused = true;
}
}
}
/**
* Average distances for each node.
* @param {*} dataNodes
* @param {*} properties
* @param {*} normalization
function calculateAverageDistance(dataNodes, properties, normalization) {
var sum = 0,
n = nodes.length;
for (var i = 0; i < n; i++) {
var sumNode = 0;
for (var j = 0; j < n; j++) {
if (i !== j) {
sumNode += distanceFunction(nodes[i], nodes[j], properties, normalization);
// console.log(sumNode);
}
}
sum += sumNode / (n - 1);
}
return sum / n;
}*/
// Get the width and heigh of the SVG element.
var width = +document.getElementById('svg').clientWidth,
height = +document.getElementById('svg').clientHeight;
var svg = d3.select('svg')
.call(d3.zoom().scaleExtent([0.0001, 1000000]).on('zoom', function () {
svg.attr('transform', d3.event.transform);
}))
.append('g');
var div = d3.select('body').append('div')
.attr('class', 'tooltip')
.style('opacity', 0);
var brush = d3.brush()
.extent([[-9999999, -9999999], [9999999, 9999999]])
.on('end', brushEnded);
svg.append('g')
.attr('class', 'brush')
.call(brush);
// var intercom = Intercom.getInstance();
// intercom.on("select", unSelectNodes);
var nodes, // as in Data points
node, // as in SVG object that have all small circles on screen
props,
norm,
p1 = 0,
p2 = 0,
size,
distanceFunction,
simulation,
velocities = [],
rendering = true, // Rendering during the execution.
forceName = 'forces',
springForce = false,
tooltipWidth = 0,
fileName = '',
selectedData,
clickedIndex = -1,
paused = false,
alreadyRanIterations,
tweakedVerOfLink,
manualStop = false;
// Default parameters
var MULTIPLIER = 50,
PERPLEXITY = 30,
LEARNING_RATE = 10,
NEIGHBOUR_SIZE = 10,
SAMPLE_SIZE = 10,
PIVOTS = false,
NUM_PIVOTS = 3,
ITERATIONS = 100,
FULL_ITERATIONS = 20,
NODE_SIZE = 10,
COLOR_ATTRIBUTE = '',
FULL_NEIGHBOUR_SIZE = 10,
FULL_SAMPLE_SIZE = 10,
INTERP_ENDING_ITS = 20;
// Create a color scheme for a range of numbers.
var color = d3.scaleOrdinal(d3.schemeCategory10);
$(document).ready(function () {
distanceFunction = calculateDistance;
d3.select('#startSimulation').on('click', startHybridSimulation);
$('#HLParameters').show();
});
/**
* Parse the data from the provided csv file using Papa Parse library
* @param {file} evt - csv file.
*/
function parseFile (evt) {
// Clear the previous nodes
d3.selectAll('.nodes').remove();
springForce = false;
fileName = evt.target.files[0].name;
Papa.parse(evt.target.files[0], {
header: true,
dynamicTyping: true,
skipEmptyLines: true,
complete: function (results) {
processData(results.data, results.error);
}
});
}
/**
* Process the data and pass it into D3 force simulation.
* @param {array} data
* @param {object} error
*/
function processData (data, error) {
if (error) throw error.message;
nodes = data;
size = nodes.length;
simulation = d3.forceSimulation();
// Calculate normalization parameters for distance fns
norm = calculateNormalization(nodes);
props = Object.keys(nodes[0]); // Properties to consider by distance fn
COLOR_ATTRIBUTE = props[props.length - 1];
var opts = document.getElementById('color_attr').options;
props.forEach(function (d) {
opts.add(new Option(d, d, (d === COLOR_ATTRIBUTE)));
});
opts.selectedIndex = props.length - 1;
// props.pop(); //Hide Iris index / last column from the distance function
// Put the nodes at (0,0)
nodes.forEach(function (d) {
d.x = 0;
d.y = 0;
});
addNodesToDOM(nodes);
// Pass the nodes to the D3 force simulation.
simulation
.nodes(nodes)
.stop();
ticked();
};
function addNodesToDOM (data) {
node = svg.append('g')
.attr('class', 'nodes')
.selectAll('circle')
.data(data)
.enter().append('circle')
.attr('r', NODE_SIZE)
.attr('transform', 'translate(' + width / 2 + ',' + height / 2 + ')')
// Color code the data points by a property (for Poker Hands,
// it is a CLASS property).
.attr('fill', function (d) {
return color(d[COLOR_ATTRIBUTE]);
})
.on('mouseover', function (d) {
div.transition()
.duration(200)
.style('opacity', 0.9);
div.html(formatTooltip(d))
.style('left', (d3.event.pageX) + 'px')
.style('top', (d3.event.pageY - (15 * props.length)) + 'px')
.style('width', (6 * tooltipWidth) + 'px')
.style('height', (14 * props.length) + 'px');
highlightOnHover(d[COLOR_ATTRIBUTE]);
})
.on('mouseout', function (d) {
div.transition()
.duration(500)
.style('opacity', 0);
node.attr('opacity', 1);
})
.on('click', function (d) {
console.log('click', clickedIndex);
if (clickedIndex !== d.index) {
if (springForce) {
highlightNeighbours(Array.from(simulation.force(forceName).nodeNeighbours(d.index).keys()));
clickedIndex = d.index;
}
} else {
node.attr('r', NODE_SIZE).attr('stroke-width', 0);
clickedIndex = -1;
}
});
if (selectedData) { unSelectNodes(selectedData); }
}
function ticked () {
alreadyRanIterations++;
// If rendering is selected, then draw at every iteration.
if (rendering === true) {
node // Each sub-circle in the SVG, update cx and cy
.attr('cx', function (d) {
return d.x * MULTIPLIER;
})
.attr('cy', function (d) {
return d.y * MULTIPLIER;
});
}
// Legacy: Emit the distribution data to allow the drawing of the bar graph
// if (springForce) {
// intercom.emit("passedData", simulation.force(forceName).distributionData());
// }
if (manualStop && alreadyRanIterations === ITERATIONS) {
ended();
}
}
function ended () {
simulation.stop();
simulation.force(forceName, null);
if (rendering !== true) { // Never drawn anything before? Now it's time.
node
.attr('cx', function (d) {
return d.x * MULTIPLIER;
})
.attr('cy', function (d) {
return d.y * MULTIPLIER;
});
}
if (p1 !== 0) {
// Performance time measurement
p2 = performance.now();
console.log('Execution time: ' + (p2 - p1));
p1 = 0;
p2 = 0;
}
}
function brushEnded () {
var s = d3.event.selection,
results = [];
if (s) {
var x0 = s[0][0] - width / 2,
y0 = s[0][1] - height / 2,
x1 = s[1][0] - width / 2,
y1 = s[1][1] - height / 2;
if (nodes) {
var sel = node.filter(function (d) {
if (d.x > x0 && d.x < x1 && d.y > y0 && d.y < y1) {
return true;
}
return false;
}).data();
results = sel.map(function (a) { return a.index; });
}
// intercom.emit("select", { name: fileName, indices: results });
d3.select('.brush').call(brush.move, null);
}
}
/**
* Format the tooltip for the data
* @param {*} node
*/
function formatTooltip (node) {
var textString = '',
temp = '';
tooltipWidth = 0;
props.forEach(function (element) {
temp = element + ': ' + node[element] + '<br/>';
textString += temp;
if (temp.length > tooltipWidth) {
tooltipWidth = temp.length;
}
});
return textString;
}
/**
* Halt the execution.
*/
function stopSimulation () {
simulation.stop();
if (typeof hybridSimulation !== 'undefined') {
hybridSimulation.stop();
}
}
/**
* Calculate the average values of the array.
* @param {array} array
* @return {number} the mean of the array.
*/
function getAverage (array) {
console.log('getAverage', array);
var total = 0;
for (var i = 0; i < array.length; i++) {
total += array[i];
}
return total / array.length;
}
/**
* Deselect the nodes to match the selection from other window.
* @param {*} data
*/
function unSelectNodes (data) {
selectedData = data;
if (fileName === data.name && nodes) {
node
.classed('notSelected', function (d) {
if (data.indices.indexOf(d.index) < 0) {
return true;
}
return false;
});
}
}
/**
* Highlight the neighbours for neighbour and sampling algorithm
* @param {*} indices
*/
function highlightNeighbours (indices) {
node
.attr('r', function (d) {
if (indices.indexOf(d.index) >= 0) {
return NODE_SIZE * 2;
}
return NODE_SIZE;
})
.attr('stroke-width', function (d) {
if (indices.indexOf(d.index) >= 0) {
return NODE_SIZE * 0.2 + 'px';
}
return '0px';
})
.attr('stroke', 'white');
}
/**
* Highlight all the nodes with the same class on hover
* @param {*} highlighValue
*/
function highlightOnHover (highlighValue) {
node.attr('opacity', function (d) {
return (highlighValue === d[COLOR_ATTRIBUTE]) ? 1 : 0.3;
});
}
/**
* Color the nodes according to given attribute.
*/
function colorToAttribute () {
node.attr('fill', function (d) {
return color(d[COLOR_ATTRIBUTE]);
});
}
/**
* Update the distance range.
function updateDistanceRange() {
if (springForce) {
simulation.force(forceName).distanceRange(SELECTED_DISTANCE);
}
}
/**
* Implemented pause/resume functionality
*/
function pauseUnPause () {
if (simulation) {
if (paused) {
simulation.force(forceName);
simulation.restart();
d3.select('#pauseButton').text('Pause');
paused = false;
} else {
simulation.stop();
d3.select('#pauseButton').text('Resume');
paused = true;
}
}
}
/**
* Average distances for each node.
* @param {*} dataNodes
* @param {*} properties
* @param {*} normalization
function calculateAverageDistance(dataNodes, properties, normalization) {
var sum = 0,
n = nodes.length;
for (var i = 0; i < n; i++) {
var sumNode = 0;
for (var j = 0; j < n; j++) {
if (i !== j) {
sumNode += distanceFunction(nodes[i], nodes[j], properties, normalization);
// console.log(sumNode);
}
}
sum += sumNode / (n - 1);
}
return sum / n;
} */

View File

@@ -1,17 +1,17 @@
export {default as forceNeighbourSampling}
from "./src/neighbourSampling";
from './src/neighbourSampling';
export { default as forceBarnesHut}
from "./src/barnesHut";
export {default as forceBarnesHut}
from './src/barnesHut';
export { default as tSNE}
from "./src/t-sne";
export {default as tSNE}
from './src/t-sne';
export { default as forceLinkCompleteGraph}
from "./src/link";
export {default as forceLinkCompleteGraph}
from './src/link';
export { default as hybridSimulation}
from "./src/hybridSimulation";
export {default as hybridSimulation}
from './src/hybridSimulation';
export { getStress as calculateStress }
from "./src/stress";
export {getStress as calculateStress}
from './src/stress';

View File

@@ -12,11 +12,19 @@
"main": "build/d3-spring-model.js",
"jsnext:main": "index",
"scripts": {
"lintcheck": "eslint index.js src",
"lintfix": "eslint index.js src --fix",
"build": "rm -rf build && mkdir build && rollup -g d3-force:d3,d3-dispatch:d3,d3-quadtree:d3,d3-collection:d3 -f umd -n d3 -o build/d3-spring-model.js -- index.js",
"minify": "node_modules/uglify-es/bin/uglifyjs build/d3-spring-model.js -c -m -o build/d3-spring-model.min.js",
"zip": "zip -j build/d3-spring-model.zip -- LICENSE README.md build/d3-spring-model.js build/d3-spring-model.min.js"
},
"devDependencies": {
"eslint": "4",
"eslint-config-standard": "^11.0.0",
"eslint-plugin-import": "^2.9.0",
"eslint-plugin-node": "^6.0.1",
"eslint-plugin-promise": "^3.7.0",
"eslint-plugin-standard": "^3.0.1",
"rollup": "0.36",
"uglify-js": "git+https://github.com/mishoo/UglifyJS2.git#harmony"
},

View File

@@ -1,158 +1,153 @@
import constant from "./constant";
import jiggle from "./jiggle";
import {x, y} from "./xy";
import {quadtree} from "d3-quadtree";
/**
* The refinement of the existing Barnes-Hut implementation in D3
* to fit the use case of the project. Previously the algorithm stored
* strength as internal node, now the random child is stored as internal
* node and the force calculations are done between the node and that internal
* object if they are sufficiently far away.
* The check to see if the nodes are far away was also changed to the one described in original Barnes-Hut paper.
* @return {force} calculated forces.
*/
export default function() {
var nodes,
node,
alpha,
distance = constant(300),
theta = 0.5;
/**
* Constructs a quadtree at every iteration and apply the forces by visiting
* each node in a tree.
* @param {number} _ - controls the stopping of the
* particle simulations.
*/
function force(_) {
var i, n = nodes.length, tree = quadtree(nodes, x, y).visitAfter(accumulate);
for (alpha = _, i = 0; i < n; ++i) {
node = nodes[i], tree.visit(apply);
}
}
/**
* Function used during the tree construction to fill out the nodes with
* correct data. Internal nodes acquire the random child while the leaf
* nodes accumulate forces from coincident quadrants.
* @param {quadrant} quad - node representing the quadrant in quadtree.
*/
function accumulate(quad) {
var q, d, children = [];
// For internal nodes, accumulate forces from child quadrants.
if (quad.length) {
for (var i = 0; i < 4; ++i) {
if ((q = quad[i]) && (d = q.data)) {
children.push(d);
}
}
// Choose a random child.
quad.data = children[Math.floor(Math.random() * children.length)];
quad.x = quad.data.x;
quad.y = quad.data.y;
}
// For leaf nodes, accumulate forces from coincident quadrants.
else {
q = quad;
q.x = q.data.x;
q.y = q.data.y;
}
}
/**
* Function that applies the forces for each node. If the objects are
* far away, the approximation is made. Otherwise, forces are calculated
* directly between the nodes.
* @param {quadrant} quad - node representing the quadrant in quadtree.
* @param {number} x1 - lower x bound of the node.
* @param {number} _ - lower y bound of the node.
* @param {number} x2 - upper x bound of the node.
* @return {boolean} - true if the approximation was applied.
*/
function apply(quad, x1, _, x2) {
var x = quad.data.x + quad.data.vx - node.x - node.vx,
y = quad.data.y + quad.data.vy - node.y - node.vy,
w = x2 - x1,
l = Math.sqrt(x * x + y * y);
// Apply the Barnes-Hut approximation if possible.
// Limit forces for very close nodes; randomize direction if coincident.
if (w / l < theta) {
if (x === 0) x = jiggle(), l += x * x;
if (y === 0) y = jiggle(), l += y * y;
if (quad.data) {
l = (l - +distance(node, quad.data)) / l * alpha;
x *= l, y *= l;
quad.data.vx -= x;
quad.data.vy -= y;
node.vx += x;
node.vy += y;
}
return true;
}
// Otherwise, process points directly.
else if (quad.length) return;
// Limit forces for very close nodes; randomize direction if coincident.
if (quad.data !== node || quad.next) {
if (x === 0) x = jiggle(), l += x * x;
if (y === 0) y = jiggle(), l += y * y;
}
do if (quad.data !== node) {
l = (l - +distance(node, quad.data)) / l * alpha;
x *= l, y *= l;
quad.data.vx -= x;
quad.data.vy -= y;
node.vx += x;
node.vy += y;
} while (quad = quad.next);
}
/**
* Calculates the stress. Basically, it computes the difference between
* high dimensional distance and real distance. The lower the stress is,
* the better layout.
* @return {number} - stress of the layout.
*/
function getStress() {
var totalDiffSq = 0, totalHighDistSq = 0;
for (var i = 0, source, target, realDist, highDist; i < nodes.length; i++) {
for (var j = 0; j < nodes.length; j++) {
if (i !== j) {
source = nodes[i], target = nodes[j];
realDist = Math.hypot(target.x-source.x, target.y-source.y);
highDist = +distance(nodes[i], nodes[j]);
totalDiffSq += Math.pow(realDist-highDist, 2);
totalHighDistSq += highDist * highDist;
}
}
}
return Math.sqrt(totalDiffSq/totalHighDistSq);
}
// API for initializing the algorithm, setting parameters and querying
// metrics.
force.initialize = function(_) {
nodes = _;
};
force.distance = function(_) {
return arguments.length ? (distance = typeof _ === "function" ? _ : constant(+_), force) : distance;
};
force.theta = function(_) {
return arguments.length ? (theta = _, force) : theta;
};
force.stress = function() {
return getStress();
};
return force;
}
import constant from './constant';
import jiggle from './jiggle';
import {x, y} from './xy';
import {quadtree} from 'd3-quadtree';
/**
* The refinement of the existing Barnes-Hut implementation in D3
* to fit the use case of the project. Previously the algorithm stored
* strength as internal node, now the random child is stored as internal
* node and the force calculations are done between the node and that internal
* object if they are sufficiently far away.
* The check to see if the nodes are far away was also changed to the one described in original Barnes-Hut paper.
* @return {force} calculated forces.
*/
export default function () {
var nodes,
node,
alpha,
distance = constant(300),
theta = 0.5;
/**
* Constructs a quadtree at every iteration and apply the forces by visiting
* each node in a tree.
* @param {number} _ - controls the stopping of the
* particle simulations.
*/
function force (_) {
var i, n = nodes.length, tree = quadtree(nodes, x, y).visitAfter(accumulate);
for (alpha = _, i = 0; i < n; ++i) {
node = nodes[i]; tree.visit(apply);
}
}
/**
* Function used during the tree construction to fill out the nodes with
* correct data. Internal nodes acquire the random child while the leaf
* nodes accumulate forces from coincident quadrants.
* @param {quadrant} quad - node representing the quadrant in quadtree.
*/
function accumulate (quad) {
var q, d, children = [];
// For internal nodes, accumulate forces from child quadrants.
if (quad.length) {
for (var i = 0; i < 4; ++i) {
if ((q = quad[i]) && (d = q.data)) {
children.push(d);
}
}
// Choose a random child.
quad.data = children[Math.floor(Math.random() * children.length)];
quad.x = quad.data.x;
quad.y = quad.data.y;
} else { // For leaf nodes, accumulate forces from coincident quadrants.
q = quad;
q.x = q.data.x;
q.y = q.data.y;
}
}
/**
* Function that applies the forces for each node. If the objects are
* far away, the approximation is made. Otherwise, forces are calculated
* directly between the nodes.
* @param {quadrant} quad - node representing the quadrant in quadtree.
* @param {number} x1 - lower x bound of the node.
* @param {number} _ - lower y bound of the node.
* @param {number} x2 - upper x bound of the node.
* @return {boolean} - true if the approximation was applied.
*/
function apply (quad, x1, _, x2) {
var x = quad.data.x + quad.data.vx - node.x - node.vx,
y = quad.data.y + quad.data.vy - node.y - node.vy,
w = x2 - x1,
l = Math.sqrt(x * x + y * y);
// Apply the Barnes-Hut approximation if possible.
// Limit forces for very close nodes; randomize direction if coincident.
if (w / l < theta) {
if (x === 0) { x = jiggle(); l += x * x; }
if (y === 0) { y = jiggle(); l += y * y; }
if (quad.data) {
l = (l - +distance(node, quad.data)) / l * alpha;
x *= l; y *= l;
quad.data.vx -= x;
quad.data.vy -= y;
node.vx += x;
node.vy += y;
}
return true;
} else if (quad.length) return; // Otherwise, process points directly.
// Limit forces for very close nodes; randomize direction if coincident.
if (quad.data !== node || quad.next) {
if (x === 0) { x = jiggle(); l += x * x; }
if (y === 0) { y = jiggle(); l += y * y; }
}
do {
if (quad.data !== node) {
l = (l - +distance(node, quad.data)) / l * alpha;
x *= l; y *= l;
quad.data.vx -= x;
quad.data.vy -= y;
node.vx += x;
node.vy += y;
}
} while (quad = quad.next);
}
/**
* Calculates the stress. Basically, it computes the difference between
* high dimensional distance and real distance. The lower the stress is,
* the better layout.
* @return {number} - stress of the layout.
*/
function getStress () {
var totalDiffSq = 0, totalHighDistSq = 0;
for (var i = 0, source, target, realDist, highDist; i < nodes.length; i++) {
for (var j = 0; j < nodes.length; j++) {
if (i !== j) {
source = nodes[i]; target = nodes[j];
realDist = Math.hypot(target.x - source.x, target.y - source.y);
highDist = +distance(nodes[i], nodes[j]);
totalDiffSq += Math.pow(realDist - highDist, 2);
totalHighDistSq += highDist * highDist;
}
}
}
return Math.sqrt(totalDiffSq / totalHighDistSq);
}
// API for initializing the algorithm, setting parameters and querying
// metrics.
force.initialize = function (_) {
nodes = _;
};
force.distance = function (_) {
return arguments.length ? (distance = typeof _ === 'function' ? _ : constant(+_), force) : distance;
};
force.theta = function (_) {
return arguments.length ? (theta = _, force) : theta;
};
force.stress = function () {
return getStress();
};
return force;
}

View File

@@ -1,8 +1,8 @@
/**
* @return a constant defined by x.
*/
export default function(x) {
return function() {
export default function (x) {
return function () {
return x;
};
}

View File

@@ -1,203 +1,204 @@
import {dispatch} from "d3-dispatch";
import constant from "./constant";
import interpBruteForce from "./interpolation/interpBruteForce";
import interpolationPivots from "./interpolation/interpolationPivots";
import {takeSampleFrom} from "./interpolation/helpers";
/**
* An implementation of Chalmers, Morrison, and Ross' 2002 hybrid layout
* algorithm with an option to use the 2003 pivot-based near neighbour searching
* method.
* It performs the 1996 neighbour sampling spring simulation model, on a
* "sample set", sqrt(n) samples of the data.
* Other data points are then interpolated into the model.
* Finally, another spring simulation may be performed on the entire dataset to
* clean up the model.
* @param {object} sim - D3 Simulation object
* @param {object} forceS - Pre-configured D3 force object for the sample set.
The ending handler will be re-configured.
Neighbour sampling force is expected, but other D3
forces may also work.
* @param {object} forceF - Pre-configured D3 force object for the simultion ran
on the entire dataset at the end.
Neighbour sampling force is expected, but other D3
forces may also work.
The force should not have any ending condition.
*/
export default function (sim, forceS, forceF) {
var
SAMPLE_ITERATIONS = 300,
FULL_ITERATIONS = 20,
interpDistanceFn,
NUM_PIVOTS = 0,
INTERP_FINE_ITS = 20,
sample = [],
remainder = [],
simulation = sim,
forceSample = forceS,
forceFull = forceF,
event = d3.dispatch("sampleTick", "fullTick", "startInterp", "end"),
initAlready = false,
nodes,
alreadyRanIterations,
hybrid;
if(simulation != undefined) initSimulation();
if(forceS != undefined || forceF != undefined) initForces();
// Performed on first run
function initialize() {
initAlready = true;
alreadyRanIterations = 0;
simulation
.on("tick", sampleTick)
.on("end", sampleEnded)
.nodes(sample)
.force("Sample force", forceSample);
console.log("Initialized Simulation for Hybrid");
}
function initForces(){
if (forceSample.onStableVelo) {
forceSample.onStableVelo(sampleEnded);
}
if (forceFull.onStableVelo) {
forceFull.onStableVelo(fullEnded);
}
// Set default value for interpDistanceFn if not been specified yet
if(interpDistanceFn === undefined) {
if(forceFull.distance == 'function')
interpDistanceFn = forceFull.distance();
else
interpDistanceFn = constant(300);
}
}
function initSimulation(){
nodes = simulation.nodes();
simulation
.stop()
.alphaDecay(0)
.alpha(1)
let sets = takeSampleFrom(nodes, Math.sqrt(nodes.length));
sample = sets.sample;
remainder = sets.remainder;
}
// Sample simulation ticked 1 frame, keep track of number of iterations here.
function sampleTick() {
event.call("sampleTick");
if(alreadyRanIterations++ >= SAMPLE_ITERATIONS){
sampleEnded();
}
}
// Full simulation ticked 1 frame, keep track of number of iterations here.
function fullTick() {
event.call("fullTick");
if(alreadyRanIterations++ >= FULL_ITERATIONS){
fullEnded();
}
}
function fullEnded() {
simulation.stop();
initAlready = false;
simulation.force("Full force", null);
event.call("end");
}
function sampleEnded() {
simulation.stop();
simulation.force("Sample force", null);
// Reset velocity of all nodes
for (let i=sample.length-1; i>=0; i--){
sample[i].vx=0;
sample[i].vy=0;
}
event.call("startInterp");
if (NUM_PIVOTS>=1) {
interpolationPivots(sample, remainder, NUM_PIVOTS, interpDistanceFn, INTERP_FINE_ITS);
} else {
interpBruteForce(sample, remainder, interpDistanceFn, INTERP_FINE_ITS);
}
event.call("fullTick");
alreadyRanIterations = 0;
simulation
.on("tick", null)
.on("end", null) // The ending condition should be iterations count
.nodes(nodes);
if (FULL_ITERATIONS<1 || forceF === undefined || forceF === null) {
event.call("end");
return;
}
simulation
.on("tick", fullTick)
.force("Full force", forceFull)
.restart();
}
return hybrid = {
restart: function () {
if(!initAlready) initialize();
simulation.restart();
return hybrid;
},
stop: function () {
simulation.stop();
return hybrid;
},
numPivots: function (_) {
return arguments.length ? (NUM_PIVOTS = +_, hybrid) : NUM_PIVOTS;
},
sampleIterations: function (_) {
return arguments.length ? (SAMPLE_ITERATIONS = +_, hybrid) : SAMPLE_ITERATIONS;
},
fullIterations: function (_) {
return arguments.length ? (FULL_ITERATIONS = +_, hybrid) : FULL_ITERATIONS;
},
interpFindTuneIts: function (_) {
return arguments.length ? (INTERP_FINE_ITS = +_, hybrid) : INTERP_FINE_ITS;
},
on: function (name, _) {
return arguments.length > 1 ? (event.on(name, _), hybrid) : event.on(name);
},
subSet: function (_) {
return arguments.length ? (sample = _, hybrid) : sample;
},
nonSubSet: function (_) {
return arguments.length ? (remainder = _, hybrid) : remainder;
},
interpDistanceFn: function (_) {
return arguments.length ? (interpDistanceFn = typeof _ === "function" ? _ : constant(+_), hybrid) : interpDistanceFn;
},
simulation: function (_) {
return arguments.length ? (toInit = true, simulation = _, hybrid) : simulation;
},
forceSample: function (_) {
return arguments.length ? (forceSample = _, initForces(), hybrid) : forceSample;
},
forceFull: function (_) {
return arguments.length ? (forceFull = _, initForces(), hybrid) : forceFull;
},
};
}
import {dispatch} from 'd3-dispatch';
import constant from './constant';
import interpBruteForce from './interpolation/interpBruteForce';
import interpolationPivots from './interpolation/interpolationPivots';
import {takeSampleFrom} from './interpolation/helpers';
/**
* An implementation of Chalmers, Morrison, and Ross' 2002 hybrid layout
* algorithm with an option to use the 2003 pivot-based near neighbour searching
* method.
* It performs the 1996 neighbour sampling spring simulation model, on a
* "sample set", sqrt(n) samples of the data.
* Other data points are then interpolated into the model.
* Finally, another spring simulation may be performed on the entire dataset to
* clean up the model.
* @param {object} sim - D3 Simulation object
* @param {object} forceS - Pre-configured D3 force object for the sample set.
The ending handler will be re-configured.
Neighbour sampling force is expected, but other D3
forces may also work.
* @param {object} forceF - Pre-configured D3 force object for the simultion ran
on the entire dataset at the end.
Neighbour sampling force is expected, but other D3
forces may also work.
The force should not have any ending condition.
*/
export default function (sim, forceS, forceF) {
var
SAMPLE_ITERATIONS = 300,
FULL_ITERATIONS = 20,
interpDistanceFn,
NUM_PIVOTS = 0,
INTERP_FINE_ITS = 20,
sample = [],
remainder = [],
simulation = sim,
forceSample = forceS,
forceFull = forceF,
event = dispatch('sampleTick', 'fullTick', 'startInterp', 'end'),
initAlready = false,
nodes,
alreadyRanIterations,
hybrid;
if (simulation !== undefined) initSimulation();
if (forceS !== undefined || forceF !== undefined) initForces();
// Performed on first run
function initialize () {
initAlready = true;
alreadyRanIterations = 0;
simulation
.on('tick', sampleTick)
.on('end', sampleEnded)
.nodes(sample)
.force('Sample force', forceSample);
console.log('Initialized Simulation for Hybrid');
}
function initForces () {
if (forceSample.onStableVelo) {
forceSample.onStableVelo(sampleEnded);
}
if (forceFull.onStableVelo) {
forceFull.onStableVelo(fullEnded);
}
// Set default value for interpDistanceFn if not been specified yet
if (interpDistanceFn === undefined) {
if (forceFull.distance === 'function') {
interpDistanceFn = forceFull.distance();
} else {
interpDistanceFn = constant(300);
}
}
}
function initSimulation () {
nodes = simulation.nodes();
simulation
.stop()
.alphaDecay(0)
.alpha(1);
let sets = takeSampleFrom(nodes, Math.sqrt(nodes.length));
sample = sets.sample;
remainder = sets.remainder;
}
// Sample simulation ticked 1 frame, keep track of number of iterations here.
function sampleTick () {
event.call('sampleTick');
if (alreadyRanIterations++ >= SAMPLE_ITERATIONS) {
sampleEnded();
}
}
// Full simulation ticked 1 frame, keep track of number of iterations here.
function fullTick () {
event.call('fullTick');
if (alreadyRanIterations++ >= FULL_ITERATIONS) {
fullEnded();
}
}
function fullEnded () {
simulation.stop();
initAlready = false;
simulation.force('Full force', null);
event.call('end');
}
function sampleEnded () {
simulation.stop();
simulation.force('Sample force', null);
// Reset velocity of all nodes
for (let i = sample.length - 1; i >= 0; i--) {
sample[i].vx = 0;
sample[i].vy = 0;
}
event.call('startInterp');
if (NUM_PIVOTS >= 1) {
interpolationPivots(sample, remainder, NUM_PIVOTS, interpDistanceFn, INTERP_FINE_ITS);
} else {
interpBruteForce(sample, remainder, interpDistanceFn, INTERP_FINE_ITS);
}
event.call('fullTick');
alreadyRanIterations = 0;
simulation
.on('tick', null)
.on('end', null) // The ending condition should be iterations count
.nodes(nodes);
if (FULL_ITERATIONS < 1 || forceF === undefined || forceF === null) {
event.call('end');
return;
}
simulation
.on('tick', fullTick)
.force('Full force', forceFull)
.restart();
}
return hybrid = {
restart: function () {
if (!initAlready) initialize();
simulation.restart();
return hybrid;
},
stop: function () {
simulation.stop();
return hybrid;
},
numPivots: function (_) {
return arguments.length ? (NUM_PIVOTS = +_, hybrid) : NUM_PIVOTS;
},
sampleIterations: function (_) {
return arguments.length ? (SAMPLE_ITERATIONS = +_, hybrid) : SAMPLE_ITERATIONS;
},
fullIterations: function (_) {
return arguments.length ? (FULL_ITERATIONS = +_, hybrid) : FULL_ITERATIONS;
},
interpFindTuneIts: function (_) {
return arguments.length ? (INTERP_FINE_ITS = +_, hybrid) : INTERP_FINE_ITS;
},
on: function (name, _) {
return arguments.length > 1 ? (event.on(name, _), hybrid) : event.on(name);
},
subSet: function (_) {
return arguments.length ? (sample = _, hybrid) : sample;
},
nonSubSet: function (_) {
return arguments.length ? (remainder = _, hybrid) : remainder;
},
interpDistanceFn: function (_) {
return arguments.length ? (interpDistanceFn = typeof _ === 'function' ? _ : constant(+_), hybrid) : interpDistanceFn;
},
simulation: function (_) {
return arguments.length ? (initAlready = false, simulation = _, hybrid) : simulation;
},
forceSample: function (_) {
return arguments.length ? (forceSample = _, initForces(), hybrid) : forceSample;
},
forceFull: function (_) {
return arguments.length ? (forceFull = _, initForces(), hybrid) : forceFull;
}
};
}

View File

@@ -8,26 +8,26 @@
sample is the list of selected objects while
remainder is the list of those unselected.
*/
export function takeSampleFrom(sourceList, amount) {
export function takeSampleFrom (sourceList, amount) {
let randElements = [],
max = sourceList.length,
swap = false;
max = sourceList.length,
swap = false;
if (amount >= max) {
return {sample: sourceList, remainder: {}};
}
// If picking more than half of the entire set, random to pick the remainder instead
if (amount > Math.ceil(max/2)){
if (amount > Math.ceil(max / 2)) {
amount = max - amount;
swap = true;
}
for (let i = 0; i < amount; ++i) {
let rand = sourceList[Math.floor((Math.random() * max))];
let rand = sourceList[Math.floor(Math.random() * max)];
// Re-random until suitable value is found.
while (randElements.includes(rand)) {
rand = sourceList[Math.floor((Math.random() * max))];
rand = sourceList[Math.floor(Math.random() * max)];
}
randElements.push(rand);
}
@@ -35,13 +35,12 @@ export function takeSampleFrom(sourceList, amount) {
return !randElements.includes(obj);
});
if(swap) {
if (swap) {
return {
sample: remainder,
remainder: randElements
};
}
else {
} else {
return {
sample: randElements,
remainder: remainder
@@ -58,14 +57,14 @@ export function takeSampleFrom(sourceList, amount) {
* @param {number} r
* @return {object} - coordinate {x: number, y: number} of the point
*/
export function pointOnCircle(h, k, angle, r) {
export function pointOnCircle (h, k, angle, r) {
return {
x: h + r*Math.cos(toRadians(angle)),
y: k + r*Math.sin(toRadians(angle))
x: h + r * Math.cos(toRadians(angle)),
y: k + r * Math.sin(toRadians(angle))
};
}
function toRadians(degrees) {
function toRadians (degrees) {
return degrees * (Math.PI / 180);
}
@@ -80,7 +79,7 @@ function toRadians(degrees) {
that of samples.
* @return {number} - Sum of distances differences
*/
export function sumDistError(node, samples, realDistances) {
export function sumDistError (node, samples, realDistances) {
let total = 0.0;
for (let i = 0; i < samples.length; i++) {
let sample = samples[i];

View File

@@ -1,5 +1,5 @@
import {takeSampleFrom} from "./helpers";
import {placeNearToNearestNeighbour} from "./interpCommon";
import {takeSampleFrom} from './helpers';
import {placeNearToNearestNeighbour} from './interpCommon';
/**
* Perform interpolation where the "parent" node is found by brute-force.
@@ -18,19 +18,19 @@ import {placeNearToNearestNeighbour} from "./interpCommon";
* @param {number} endingIts - for phase 3, how many iterations to refine the
* placement of each interpolated point
*/
export default function(sampleSet, remainderSet, distanceFn, endingIts) {
export default function (sampleSet, remainderSet, distanceFn, endingIts) {
let
sampleSubset = takeSampleFrom(sampleSet, Math.sqrt(sampleSet.length)).sample,
sampleSubsetDistanceCache = [];
// For each datapoint "node" to be interpolated
for (let i = remainderSet.length-1; i>=0; i--) {
// For each datapoint "node" to be interpolated
for (let i = remainderSet.length - 1; i >= 0; i--) {
let
node = remainderSet[i],
nearestSample, minDist, sample, dist, index;
// For each datapoint "sample" in the sample set
for (let j = sampleSet.length-1; j>=0; j--) {
for (let j = sampleSet.length - 1; j >= 0; j--) {
sample = sampleSet[j];
dist = distanceFn(node, sample);
if (nearestSample === undefined || dist < minDist) {
@@ -39,8 +39,7 @@ export default function(sampleSet, remainderSet, distanceFn, endingIts) {
}
index = sampleSubset.indexOf(sample);
if (index !== -1)
sampleSubsetDistanceCache[index] = dist;
if (index !== -1) { sampleSubsetDistanceCache[index] = dist; }
}
placeNearToNearestNeighbour(node, nearestSample, minDist, sampleSubset, sampleSubsetDistanceCache, endingIts);

View File

@@ -1,5 +1,5 @@
import {pointOnCircle, sumDistError} from "./helpers";
import jiggle from "../jiggle";
import {pointOnCircle, sumDistError} from './helpers';
import jiggle from '../jiggle';
/**
* Phase 2 and 3 of each node to be interpolated.
@@ -24,9 +24,9 @@ import jiggle from "../jiggle";
index must correspond to sampleSubset
* @param {Integer} endingIts - Number of iterations for phase 3
*/
export function placeNearToNearestNeighbour(node, nearNeighbour, radius, sampleSubset, realDistances, endingIts) {
export function placeNearToNearestNeighbour (node, nearNeighbour, radius, sampleSubset, realDistances, endingIts) {
let
sumDistErrorByAngle = function(angle){
sumDistErrorByAngle = function (angle) {
return sumDistError(pointOnCircle(nearNeighbour.x, nearNeighbour.y, angle, radius), sampleSubset, realDistances);
},
dist0 = sumDistErrorByAngle(0),
@@ -36,17 +36,11 @@ export function placeNearToNearestNeighbour(node, nearNeighbour, radius, sampleS
lowBound = 0.0,
highBound = 0.0;
// Determine the closest quadrant
if (dist0 == dist180) {
if (dist90 > dist270)
lowBound = highBound = 270;
else
lowBound = highBound = 90;
} else if (dist90 == dist270) {
if (dist0 > dist180)
lowBound = highBound = 180;
else
lowBound = highBound = 0;
// Determine the closest quadrant
if (dist0 === dist180) {
if (dist90 > dist270) { lowBound = highBound = 270; } else { lowBound = highBound = 90; }
} else if (dist90 === dist270) {
if (dist0 > dist180) { lowBound = highBound = 180; } else { lowBound = highBound = 0; }
} else if (dist0 > dist180) {
if (dist90 > dist270) {
lowBound = 180;
@@ -55,14 +49,12 @@ export function placeNearToNearestNeighbour(node, nearNeighbour, radius, sampleS
lowBound = 90;
highBound = 180;
}
} else if (dist90 > dist270) {
lowBound = 270;
highBound = 360;
} else {
if (dist90 > dist270) {
lowBound = 270;
highBound = 360;
} else {
lowBound = 0;
highBound = 90;
}
lowBound = 0;
highBound = 90;
}
// Determine the angle
@@ -73,21 +65,21 @@ export function placeNearToNearestNeighbour(node, nearNeighbour, radius, sampleS
// Phase 3
let
multiplier = 1/sampleSubset.length,
multiplier = 1 / sampleSubset.length,
sumForces;
for (let i = 0; i < endingIts; i++) {
sumForces = sumForcesToSample(node, sampleSubset, realDistances);
node.x += sumForces.x*multiplier;
node.y += sumForces.y*multiplier;
node.x += sumForces.x * multiplier;
node.y += sumForces.y * multiplier;
}
}
function sumForcesToSample(node, samples, sampleCache) {
function sumForcesToSample (node, samples, sampleCache) {
let nodeVx = 0,
nodeVy = 0,
x, y, l, i, sample;
nodeVy = 0,
x, y, l, i, sample;
for (i = samples.length-1; i >=0 ; i--) {
for (i = samples.length - 1; i >= 0; i--) {
sample = samples[i];
// jiggle so l won't be zero and divide by zero error after this
@@ -95,7 +87,7 @@ function sumForcesToSample(node, samples, sampleCache) {
y = node.y - sample.y || jiggle();
l = Math.sqrt(x * x + y * y);
l = (l - sampleCache[i]) / l;
x *= l, y *= l;
x *= l; y *= l;
nodeVx -= x;
nodeVy -= y;
}
@@ -110,27 +102,27 @@ function sumForcesToSample(node, samples, sampleCache) {
* @param {function(x)} fn - function that takes in a number x and returns a number
* @return {integer} - an integer x where f(x) is minimum
*/
function binarySearchMin(lb, hb, fn) {
function binarySearchMin (lb, hb, fn) {
while (lb <= hb) {
if(lb === hb) return lb;
if (lb === hb) return lb;
if(hb-lb == 1) {
if (hb - lb === 1) {
if (fn(lb) >= fn(hb)) return hb;
else return lb;
}
let
range = hb-lb,
valLowerHalf = fn(lb + range/4),
valHigherHalf = fn(lb + range*3/4);
range = hb - lb,
valLowerHalf = fn(lb + range / 4),
valHigherHalf = fn(lb + range * 3 / 4);
if (valLowerHalf > valHigherHalf)
if (valLowerHalf > valHigherHalf) {
lb = Math.floor((lb + hb) / 2);
else if (valLowerHalf < valHigherHalf)
} else if (valLowerHalf < valHigherHalf) {
hb = Math.ceil((lb + hb) / 2);
else {
lb += Math.floor(range/4);
hb -= Math.ceil(range/4);
} else {
lb += Math.floor(range / 4);
hb -= Math.ceil(range / 4);
}
}
return -1;

View File

@@ -1,5 +1,5 @@
import {takeSampleFrom} from "./helpers";
import {placeNearToNearestNeighbour} from "./interpCommon";
import {takeSampleFrom} from './helpers';
import {placeNearToNearestNeighbour} from './interpCommon';
/**
* Perform interpolation where the "parent" node is is estimated by pivot-based searching.
@@ -23,7 +23,7 @@ import {placeNearToNearestNeighbour} from "./interpCommon";
* @param {number} endingIts - for phase 3, how many iterations to refine the
* placement of each interpolated point
*/
export default function(sampleSet, remainderSet, numPivots, distanceFn, endingIts) {
export default function (sampleSet, remainderSet, numPivots, distanceFn, endingIts) {
// Pivot based parent finding
let numBuckets = Math.floor(Math.sqrt(sampleSet.length));
let numNonPivots = sampleSet.length - numPivots;
@@ -44,8 +44,9 @@ export default function(sampleSet, remainderSet, numPivots, distanceFn, endingIt
let distCache = []; // [ For each non-pivot sample:[For each Pivot: distance] ]
let bucketWidths = []; // [ For each Pivot: width of each bucket ]
for (let i = 0; i < nonPivotSamples.length; i++)
for (let i = 0; i < nonPivotSamples.length; i++) {
distCache[i] = [];
}
for (let j = 0; j < numPivots; j++) {
let pivot = pivots[j];
@@ -54,8 +55,9 @@ export default function(sampleSet, remainderSet, numPivots, distanceFn, endingIt
for (let i = 0; i < numNonPivots; i++) {
let sample = nonPivotSamples[i];
distCache[i][j] = distanceFn(pivot, sample);
if (distCache[i][j] > maxDist)
if (distCache[i][j] > maxDist) {
maxDist = distCache[i][j];
}
}
bucketWidths.push(maxDist / numBuckets);
@@ -69,7 +71,7 @@ export default function(sampleSet, remainderSet, numPivots, distanceFn, endingIt
let bucketNumber = Math.floor(distCache[i][j] / bucketWidth);
if (bucketNumber >= numBuckets) {
bucketNumber = numBuckets - 1;
} else if (bucketNumber < 0) { // Should never be negative anyway
} else if (bucketNumber < 0) { // Should never be negative anyway
bucketNumber = 0;
}
pivotsBuckets[j][bucketNumber].push(sample);
@@ -77,10 +79,9 @@ export default function(sampleSet, remainderSet, numPivots, distanceFn, endingIt
}
// ---------------------------------------------------------------------
let sampleSubset = takeSampleFrom(sampleSet, Math.sqrt(sampleSet.length)).sample;
//Plot each of the remainder nodes
for (let i = remainderSet.length-1; i>=0; i--) {
// Plot each of the remainder nodes
for (let i = remainderSet.length - 1; i >= 0; i--) {
let node = remainderSet[i];
let sampleSubsetDistanceCache = [],
minDist, nearSample;
@@ -95,7 +96,7 @@ export default function(sampleSet, remainderSet, numPivots, distanceFn, endingIt
if (index !== -1) {
sampleSubsetDistanceCache[index] = dist;
}
if (minDist === undefined || dist < minDist){
if (minDist === undefined || dist < minDist) {
minDist = dist;
nearSample = pivot;
}
@@ -103,22 +104,21 @@ export default function(sampleSet, remainderSet, numPivots, distanceFn, endingIt
let bucketNumber = Math.floor(dist / bucketWidth);
if (bucketNumber >= numBuckets) {
bucketNumber = numBuckets - 1;
} else if (bucketNumber < 0) { // Should never be negative anyway
} else if (bucketNumber < 0) { // Should never be negative anyway
bucketNumber = 0;
}
for (let j = pivotsBuckets[p][bucketNumber].length-1; j>=0; j--) {
for (let j = pivotsBuckets[p][bucketNumber].length - 1; j >= 0; j--) {
let candidateNode = pivotsBuckets[p][bucketNumber][j];
let index = sampleSubset.indexOf(candidateNode);
if (index !== -1 && sampleSubsetDistanceCache[index] !== undefined)
dist = sampleSubsetDistanceCache[index]
else {
if (index !== -1 && sampleSubsetDistanceCache[index] !== undefined) {
dist = sampleSubsetDistanceCache[index];
} else {
dist = distanceFn(candidateNode, node);
if (index !== -1)
sampleSubsetDistanceCache[index] = dist;
if (index !== -1) { sampleSubsetDistanceCache[index] = dist; }
}
if (dist < minDist){
if (dist < minDist) {
minDist = dist;
nearSample = candidateNode;
}
@@ -127,8 +127,9 @@ export default function(sampleSet, remainderSet, numPivots, distanceFn, endingIt
// Fill in holes in cache
for (let k = 0; k < sampleSubset.length; k++) {
if (sampleSubsetDistanceCache[k] === undefined)
if (sampleSubsetDistanceCache[k] === undefined) {
sampleSubsetDistanceCache[k] = distanceFn(node, sampleSubset[k]);
}
}
placeNearToNearestNeighbour(node, nearSample, minDist, sampleSubset, sampleSubsetDistanceCache, endingIts);
}

View File

@@ -1,7 +1,7 @@
/**
* @return {number} a very small non-zero random number.
*/
export default function() {
export default function () {
let rand;
do {
rand = (Math.random() - 0.5) * 1e-6;

View File

@@ -1,108 +1,110 @@
import constant from "./constant";
import jiggle from "./jiggle";
/**
* Modified link force algorithm
* - simplify calculations for parameters locked for spring model
* - replace the use of links {} with loop. greatly reduce memory usage
* - removed other unused functions
* Alpha should be constant 1 for accurate simulation
*/
export default function() {
var dataSizeFactor,
distance = constant(30),
distances = [],
nodes,
stableVelocity = 0,
stableVeloHandler = null,
latestVelocityDiff = 0,
iterations = 1;
function force(alpha) {
let n = nodes.length;
// Cache old velocity for comparison later
if (stableVeloHandler!==null && stableVelocity>=0) {
for (let i = n-1, node; i>=0; i--) {
node = nodes[i];
node.oldvx = node.vx;
node.oldvy = node.vy;
}
}
// Each iteration in a tick
for (var k = 0, source, target, i, j, x, y, l; k < iterations; ++k) {
// For each link
for (i = 1; i < n; i++) for (j = 0; j < i; j++) {
// jiggle so l won't be zero and divide by zero error after this
source = nodes[i];
target = nodes[j];
x = target.x + target.vx - source.x - source.vx || jiggle();
y = target.y + target.vy - source.y - source.vy || jiggle();
l = Math.sqrt(x * x + y * y);
l = (l - distances[i*(i-1)/2+j]) / l * dataSizeFactor * alpha;
x *= l, y *= l;
target.vx -= x;
target.vy -= y;
source.vx += x;
source.vy += y;
}
}
// Calculate velocity changes, aka force applied.
if (stableVeloHandler!==null && stableVelocity>=0) {
let velocityDiff = 0;
for (let i = n-1, node; i>=0; i--) {
node = nodes[i];
velocityDiff += Math.abs(Math.hypot(node.vx-node.oldvx, node.vy-node.oldvy));
}
velocityDiff /= n;
latestVelocityDiff = velocityDiff;
if(velocityDiff<stableVelocity){
stableVeloHandler();
}
}
}
function initialize() {
if (!nodes) return;
// 0.5 to divide the force to two part for source and target node
dataSizeFactor = 0.5/(nodes.length-1);
initializeDistance();
}
function initializeDistance() {
if (!nodes) return;
for (let i = 1, n = nodes.length; i < n; i++) {
for (let j = 0; j < i; j++) {
distances.push(distance(nodes[i], nodes[j]));
}
}
}
force.initialize = function(_) {
nodes = _;
initialize();
};
force.iterations = function(_) {
return arguments.length ? (iterations = +_, force) : iterations;
};
force.distance = function(_) {
return arguments.length ? (distance = typeof _ === "function" ? _ : constant(+_), initializeDistance(), force) : distance;
};
force.latestAccel = function () {
return latestVelocityDiff;
};
force.onStableVelo = function (_) {
return arguments.length ? (stableVeloHandler = _, force) : stableVeloHandler;
};
force.stableVelocity = function (_) {
return arguments.length ? (stableVelocity = _, force) : stableVelocity;
};
return force;
}
import constant from './constant';
import jiggle from './jiggle';
/**
* Modified link force algorithm
* - simplify calculations for parameters locked for spring model
* - replace the use of links {} with loop. greatly reduce memory usage
* - removed other unused functions
* Alpha should be constant 1 for accurate simulation
*/
export default function () {
var dataSizeFactor,
distance = constant(30),
distances = [],
nodes,
stableVelocity = 0,
stableVeloHandler = null,
latestVelocityDiff = 0,
iterations = 1;
function force (alpha) {
let n = nodes.length;
// Cache old velocity for comparison later
if (stableVeloHandler !== null && stableVelocity >= 0) {
for (let i = n - 1, node; i >= 0; i--) {
node = nodes[i];
node.oldvx = node.vx;
node.oldvy = node.vy;
}
}
// Each iteration in a tick
for (var k = 0, source, target, i, j, x, y, l; k < iterations; ++k) {
// For each link
for (i = 1; i < n; i++) {
for (j = 0; j < i; j++) {
// jiggle so l won't be zero and divide by zero error after this
source = nodes[i];
target = nodes[j];
x = target.x + target.vx - source.x - source.vx || jiggle();
y = target.y + target.vy - source.y - source.vy || jiggle();
l = Math.sqrt(x * x + y * y);
l = (l - distances[i * (i - 1) / 2 + j]) / l * dataSizeFactor * alpha;
x *= l; y *= l;
target.vx -= x;
target.vy -= y;
source.vx += x;
source.vy += y;
}
}
}
// Calculate velocity changes, aka force applied.
if (stableVeloHandler !== null && stableVelocity >= 0) {
let velocityDiff = 0;
for (let i = n - 1, node; i >= 0; i--) {
node = nodes[i];
velocityDiff += Math.abs(Math.hypot(node.vx - node.oldvx, node.vy - node.oldvy));
}
velocityDiff /= n;
latestVelocityDiff = velocityDiff;
if (velocityDiff < stableVelocity) {
stableVeloHandler();
}
}
}
function initialize () {
if (!nodes) return;
// 0.5 to divide the force to two part for source and target node
dataSizeFactor = 0.5 / (nodes.length - 1);
initializeDistance();
}
function initializeDistance () {
if (!nodes) return;
for (let i = 1, n = nodes.length; i < n; i++) {
for (let j = 0; j < i; j++) {
distances.push(distance(nodes[i], nodes[j]));
}
}
}
force.initialize = function (_) {
nodes = _;
initialize();
};
force.iterations = function (_) {
return arguments.length ? (iterations = +_, force) : iterations;
};
force.distance = function (_) {
return arguments.length ? (distance = typeof _ === 'function' ? _ : constant(+_), initializeDistance(), force) : distance;
};
force.latestAccel = function () {
return latestVelocityDiff;
};
force.onStableVelo = function (_) {
return arguments.length ? (stableVeloHandler = _, force) : stableVeloHandler;
};
force.stableVelocity = function (_) {
return arguments.length ? (stableVelocity = _, force) : stableVelocity;
};
return force;
}

View File

@@ -1,12 +1,12 @@
import constant from "./constant";
import jiggle from "./jiggle";
import constant from './constant';
import jiggle from './jiggle';
/**
* An implementation of Chalmers' 1996 Neighbour and Sampling algorithm.
* It uses random sampling to find the most suited neighbours from the
* data set.
*/
function sortDistances(a, b) {
function sortDistances (a, b) {
return b[1] - a[1];
}
@@ -21,22 +21,22 @@ export default function () {
dataSizeFactor,
latestVelocityDiff = 0;
/**
/**
* Apply spring forces at each simulation iteration.
* @param {number} alpha - multiplier for amount of force applied
*/
function force(alpha) {
function force (alpha) {
let n = nodes.length;
// Cache old velocity for comparison later
if (stableVeloHandler!==null && stableVelocity>=0) {
for (let i = n-1, node; i>=0; i--) {
if (stableVeloHandler !== null && stableVelocity >= 0) {
for (let i = n - 1, node; i >= 0; i--) {
node = nodes[i];
node.oldvx = node.vx;
node.oldvy = node.vy;
}
}
for (let i = n-1, node, samples; i>=0; i--) {
for (let i = n - 1, node, samples; i >= 0; i--) {
node = nodes[i];
samples = createRandomSamples(i);
@@ -52,16 +52,16 @@ export default function () {
}
// Calculate velocity changes, aka force applied.
if (stableVeloHandler!==null && stableVelocity>=0) {
if (stableVeloHandler !== null && stableVelocity >= 0) {
let velocityDiff = 0;
for (let i = n-1, node; i>=0; i--) {
for (let i = n - 1, node; i >= 0; i--) {
node = nodes[i];
velocityDiff += Math.abs(Math.hypot(node.vx-node.oldvx, node.vy-node.oldvy));
velocityDiff += Math.abs(Math.hypot(node.vx - node.oldvx, node.vy - node.oldvy));
}
velocityDiff /= n;
latestVelocityDiff = velocityDiff;
if(velocityDiff<stableVelocity){
if (velocityDiff < stableVelocity) {
stableVeloHandler();
}
}
@@ -74,14 +74,14 @@ export default function () {
* @param {number} dist - high dimensional distance between the two nodes
* @param {number} alpha - multiplier for the amount of force applied
*/
function setVelocity(source, target, dist, alpha) {
function setVelocity (source, target, dist, alpha) {
let x, y, l;
// jiggle so l won't be zero and divide by zero error after this
x = target.x + target.vx - source.x - source.vx || jiggle();
y = target.y + target.vy - source.y - source.vy || jiggle();
l = Math.sqrt(x * x + y * y);
l = (l - dist) / l * dataSizeFactor * alpha;
x *= l, y *= l;
x *= l; y *= l;
// Set the calculated velocites for both nodes.
target.vx -= x;
target.vy -= y;
@@ -90,11 +90,11 @@ export default function () {
}
// Called on nodes change and added to a simulation
function initialize() {
function initialize () {
if (!nodes) return;
// Initialize for each node some random neighbours.
for (let i = nodes.length-1; i>=0; i--) {
for (let i = nodes.length - 1; i >= 0; i--) {
let neighbs = pickRandomNodesFor(i, [i], neighbourSize);
// Sort the neighbour set by the distances.
neighbours[i] = new Map(neighbs.sort(sortDistances));
@@ -103,8 +103,8 @@ export default function () {
initDataSizeFactor();
}
function initDataSizeFactor(){
dataSizeFactor = 0.5/(neighbourSize+sampleSize);
function initDataSizeFactor () {
dataSizeFactor = 0.5 / (neighbourSize + sampleSize);
}
/**
@@ -116,7 +116,7 @@ export default function () {
* @param {number} size - max number of elements in the map to return.
* @return {array}
*/
function pickRandomNodesFor(index, exclude, size) {
function pickRandomNodesFor (index, exclude, size) {
let randElements = [];
let max = nodes.length;
@@ -126,14 +126,14 @@ export default function () {
break;
}
let rand = Math.floor((Math.random() * max));
let rand = Math.floor(Math.random() * max);
// Re-random until suitable value is found.
while (randElements.includes(rand) || exclude.includes(rand)) {
rand = Math.floor((Math.random() * max));
rand = Math.floor(Math.random() * max);
}
randElements.push(rand);
}
for(let i=randElements.length-1, rand; i>=0; i--){
for (let i = randElements.length - 1, rand; i >= 0; i--) {
rand = randElements[i];
randElements[i] = [rand, distance(nodes[index], nodes[rand])];
}
@@ -146,7 +146,7 @@ export default function () {
* @param {number} index - index of the node to generate sample for
* @return {map}
*/
function createRandomSamples(index) {
function createRandomSamples (index) {
// Ignore the current neighbours of the node and itself.
let exclude = [index];
exclude = exclude.concat(Array.from(neighbours[index].keys()));
@@ -160,13 +160,12 @@ export default function () {
* @param {map} samples - map of samples
* @return {map} - new map of neighbours
*/
function findNewNeighbours(neighbours, samples) {
function findNewNeighbours (neighbours, samples) {
let combined = [...neighbours.entries()].concat([...samples.entries()]);
combined = combined.sort(sortDistances);
return new Map(combined.slice(0, neighbourSize));
}
// API for initializing the algorithm and setting parameters
force.initialize = function (_) {
nodes = _;
@@ -186,7 +185,7 @@ export default function () {
};
force.distance = function (_) {
return arguments.length ? (distance = typeof _ === "function" ? _ : constant(+_), force) : distance;
return arguments.length ? (distance = typeof _ === 'function' ? _ : constant(+_), force) : distance;
};
force.latestAccel = function () {

View File

@@ -4,12 +4,13 @@
* to the better layout.
* @return {number} - stress of the layout.
*/
export function getStress(nodes, distance) {
let sumDiffSq = 0
export function getStress (nodes, distance) {
let sumDiffSq = 0;
let sumLowDDistSq = 0;
for (let j = nodes.length-1; j >= 1; j--) {
for (let j = nodes.length - 1; j >= 1; j--) {
for (let i = 0; i < j; i++) {
let source = nodes[i], target = nodes[j];
let source = nodes[i];
let target = nodes[j];
let lowDDist = Math.hypot(target.x - source.x, target.y - source.y);
let highDDist = distance(source, target);
sumDiffSq += Math.pow(highDDist - lowDDist, 2);

View File

@@ -1,374 +1,374 @@
import constant from "./constant";
/**
* Set the node id accessor to the specified i.
* @param {node} d - node.
* @param {accessor} i - id accessor.
* @return {accessor} - node id accessor.
*/
function index(d, i) {
return i;
}
/**
* t-SNE implementation in D3 by using the code existing in tsnejs
* (https://github.com/karpathy/tsnejs) to compute the solution.
*/
export default function() {
var id = index,
distance = constant(300),
nodes,
perplexity = 30,
learningRate = 10,
iteration = 0,
dim = 2,
N, // length of the nodes.
P, // probability matrix.
Y, // solution.
gains,
ystep;
/**
* Make a step in t-SNE algorithm and set the velocities for the nodes
* to accumulate the values from solution.
*/
function force() {
// Make a step at each iteration.
step();
var solution = getSolution();
// Set the velocity for each node using the solution.
for (var i = 0; i < nodes.length; i++) {
nodes[i].vx += solution[i][0];
nodes[i].vy += solution[i][1];
}
}
/**
* Calculates the random number from Gaussian distribution.
* @return {number} random number.
*/
function gaussRandom() {
let u = 2 * Math.random() - 1;
let v = 2 * Math.random() - 1;
let r = u * u + v * v;
if (r == 0 || r > 1) return gaussRandom();
return u * Math.sqrt(-2 * Math.log(r) / r);
}
/**
* Return the normalized number.
* @return {number} normalized random number from Gaussian distribution.
*/
function randomN() {
return gaussRandom() * 1e-4;
}
function sign(x) {
return x > 0 ? 1 : x < 0 ? -1 : 0;
}
/**
* Create an array of length n filled with zeros.
* @param {number} n - length of array.
* @return {Float64Array} - array of zeros with length n.
*/
function zeros(n) {
if (typeof(n) === 'undefined' || isNaN(n)) {
return [];
}
return new Float64Array(n); // typed arrays are faster
}
// Returns a 2d array of random numbers
/**
* Creates a 2d array filled with random numbers.
* @param {number} n - rows.
* @param {number} d - columns.
* @return {array} - 2d array
*/
function random2d(n, d) {
var x = [];
for (var i = 0; i < n; i++) {
var y = [];
for (var j = 0; j < d; j++) {
y.push(randomN());
}
x.push(y);
}
return x;
}
/**
* Compute the probability matrix using the provided data.
* @param {array} data - nodes.
* @param {number} perplexity - used to calculate entropy of distribution.
* @param {number} tol - limit for entropy difference.
* @return {2d array} - 2d matrix containing probabilities.
*/
function d2p(data, perplexity, tol) {
N = Math.floor(data.length);
var Htarget = Math.log(perplexity); // target entropy of distribution.
var P1 = zeros(N * N); // temporary probability matrix.
var prow = zeros(N); // a temporary storage compartment.
for (var i = 0; i < N; i++) {
var betamin = -Infinity;
var betamax = Infinity;
var beta = 1; // initial value of precision.
var done = false;
var maxtries = 50;
// Perform binary search to find a suitable precision beta
// so that the entropy of the distribution is appropriate.
var num = 0;
while (!done) {
// Compute entropy and kernel row with beta precision.
var psum = 0.0;
for (var j = 0; j < N; j++) {
var pj = Math.exp(-distance(data[i], data[j]) * beta);
// Ignore the diagonals
if (i === j) {
pj = 0;
}
prow[j] = pj;
psum += pj;
}
// Normalize p and compute entropy.
var Hhere = 0.0;
for (j = 0; j < N; j++) {
if (psum == 0) {
pj = 0;
} else {
pj = prow[j] / psum;
}
prow[j] = pj;
if (pj > 1e-7) {
Hhere -= pj * Math.log(pj);
}
}
// Adjust beta based on result.
if (Hhere > Htarget) {
// Entropy was too high (distribution too diffuse)
// so we need to increase the precision for more peaky distribution.
betamin = beta; // move up the bounds.
if (betamax === Infinity) {
beta = beta * 2;
} else {
beta = (beta + betamax) / 2;
}
} else {
// Converse case. Make distrubtion less peaky.
betamax = beta;
if (betamin === -Infinity) {
beta = beta / 2;
} else {
beta = (beta + betamin) / 2;
}
}
// Stopping conditions: too many tries or got a good precision.
num++;
if (Math.abs(Hhere - Htarget) < tol || num >= maxtries) {
done = true;
}
}
// Copy over the final prow to P1 at row i
for (j = 0; j < N; j++) {
P1[i * N + j] = prow[j];
}
}
// Symmetrize P and normalize it to sum to 1 over all ij
var Pout = zeros(N * N);
var N2 = N * 2;
for (i = 0; i < N; i++) {
for (j = 0; j < N; j++) {
Pout[i * N + j] = Math.max((P1[i * N + j] + P1[j * N + i]) / N2, 1e-100);
}
}
return Pout;
}
/**
* Initialize a starting (random) solution.
*/
function initSolution() {
Y = random2d(N, dim);
// Step gains to accelerate progress in unchanging directions.
gains = random2d(N, dim, 1.0);
// Momentum accumulator.
ystep = random2d(N, dim, 0.0);
iteration = 0;
}
/**
* @return {2d array} the solution.
*/
function getSolution() {
return Y;
}
/**
* Do a single step (iteration) for the layout.
* @return {number} the current cost.
*/
function step() {
iteration += 1;
var cg = costGrad(Y); // Evaluate gradient.
var cost = cg.cost;
var grad = cg.grad;
// Perform gradient step.
var ymean = zeros(dim);
for (var i = 0; i < N; i++) {
for (var d = 0; d < dim; d++) {
var gid = grad[i][d];
var sid = ystep[i][d];
var gainid = gains[i][d];
// Compute gain update.
var newgain = sign(gid) === sign(sid) ? gainid * 0.8 : gainid + 0.2;
if (newgain < 0.01) {
newgain = 0.01;
}
gains[i][d] = newgain;
// Compute momentum step direction.
var momval = iteration < 250 ? 0.5 : 0.8;
var newsid = momval * sid - learningRate * newgain * grad[i][d];
ystep[i][d] = newsid;
// Do the step.
Y[i][d] += newsid;
// Accumulate mean so that we can center later.
ymean[d] += Y[i][d];
}
}
// Reproject Y to have the zero mean.
for (i = 0; i < N; i++) {
for (d = 0; d < dim; d++) {
Y[i][d] -= ymean[d] / N;
}
}
return cost;
}
/**
* Calculate the cost and the gradient.
* @param {2d array} Y - the current solution to evaluate.
* @return {object} that contains a cost and a gradient.
*/
function costGrad(Y) {
var pmul = iteration < 100 ? 4 : 1;
// Compute current Q distribution, unnormalized first.
var Qu = zeros(N * N);
var qsum = 0.0;
for (var i = 0; i < N; i++) {
for (var j = i + 1; j < N; j++) {
var dsum = 0.0;
for (var d = 0; d < dim; d++) {
var dhere = Y[i][d] - Y[j][d];
dsum += dhere * dhere;
}
var qu = 1.0 / (1.0 + dsum); // Student t-distribution.
Qu[i * N + j] = qu;
Qu[j * N + i] = qu;
qsum += 2 * qu;
}
}
// Normalize Q distribution to sum to 1.
var NN = N * N;
var Q = zeros(NN);
for (var q = 0; q < NN; q++) {
Q[q] = Math.max(Qu[q] / qsum, 1e-100);
}
var cost = 0.0;
var grad = [];
for (i = 0; i < N; i++) {
var gsum = new Array(dim); // Initialize gradiet for point i.
for (d = 0; d < dim; d++) {
gsum[d] = 0.0;
}
for (j = 0; j < N; j++) {
// Accumulate the cost.
cost += -P[i * N + j] * Math.log(Q[i * N + j]);
var premult = 4 * (pmul * P[i * N + j] - Q[i * N + j]) * Qu[i * N + j];
for (d = 0; d < dim; d++) {
gsum[d] += premult * (Y[i][d] - Y[j][d]);
}
}
grad.push(gsum);
}
return {
cost: cost,
grad: grad
};
}
/**
* Calculates the stress. Basically, it computes the difference between
* high dimensional distance and real distance. The lower the stress is,
* the better layout.
* @return {number} - stress of the layout.
*/
function getStress() {
var totalDiffSq = 0,
totalHighDistSq = 0;
for (var i = 0, source, target, realDist, highDist; i < nodes.length; i++) {
for (var j = 0; j < nodes.length; j++) {
if (i !== j) {
source = nodes[i], target = nodes[j];
realDist = Math.hypot(target.x - source.x, target.y - source.y);
highDist = +distance(nodes[i], nodes[j]);
totalDiffSq += Math.pow(realDist - highDist, 2);
totalHighDistSq += highDist * highDist;
}
}
}
return Math.sqrt(totalDiffSq / totalHighDistSq);
}
// API for initializing the algorithm, setting parameters and querying
// metrics.
force.initialize = function(_) {
nodes = _;
N = nodes.length;
// Initialize the probability matrix.
P = d2p(nodes, perplexity, 1e-4);
initSolution();
};
force.id = function(_) {
return arguments.length ? (id = _, force) : id;
};
force.distance = function(_) {
return arguments.length ? (distance = typeof _ === "function" ? _ : constant(+_), force) : distance;
};
force.stress = function() {
return getStress();
};
force.learningRate = function(_) {
return arguments.length ? (learningRate = +_, force) : learningRate;
};
force.perplexity = function(_) {
return arguments.length ? (perplexity = +_, force) : perplexity;
};
return force;
}
/* eslint-disable block-scoped-var */
import constant from './constant';
/**
* Set the node id accessor to the specified i.
* @param {node} d - node.
* @param {accessor} i - id accessor.
* @return {accessor} - node id accessor.
*/
function index (d, i) {
return i;
}
/**
* t-SNE implementation in D3 by using the code existing in tsnejs
* (https://github.com/karpathy/tsnejs) to compute the solution.
*/
export default function () {
var id = index,
distance = constant(300),
nodes,
perplexity = 30,
learningRate = 10,
iteration = 0,
dim = 2,
N, // length of the nodes.
P, // probability matrix.
Y, // solution.
gains,
ystep;
/**
* Make a step in t-SNE algorithm and set the velocities for the nodes
* to accumulate the values from solution.
*/
function force () {
// Make a step at each iteration.
step();
var solution = getSolution();
// Set the velocity for each node using the solution.
for (var i = 0; i < nodes.length; i++) {
nodes[i].vx += solution[i][0];
nodes[i].vy += solution[i][1];
}
}
/**
* Calculates the random number from Gaussian distribution.
* @return {number} random number.
*/
function gaussRandom () {
let u = 2 * Math.random() - 1;
let v = 2 * Math.random() - 1;
let r = u * u + v * v;
if (r === 0 || r > 1) {
return gaussRandom();
}
return u * Math.sqrt(-2 * Math.log(r) / r);
}
/**
* Return the normalized number.
* @return {number} normalized random number from Gaussian distribution.
*/
function randomN () {
return gaussRandom() * 1e-4;
}
function sign (x) {
return x > 0 ? 1 : x < 0 ? -1 : 0;
}
/**
* Create an array of length n filled with zeros.
* @param {number} n - length of array.
* @return {Float64Array} - array of zeros with length n.
*/
function zeros (n) {
if (typeof n === 'undefined' || isNaN(n)) {
return [];
}
return new Float64Array(n); // typed arrays are faster
}
// Returns a 2d array of random numbers
/**
* Creates a 2d array filled with random numbers.
* @param {number} n - rows.
* @param {number} d - columns.
* @return {array} - 2d array
*/
function random2d (n, d) {
var x = [];
for (var i = 0; i < n; i++) {
var y = [];
for (var j = 0; j < d; j++) {
y.push(randomN());
}
x.push(y);
}
return x;
}
/**
* Compute the probability matrix using the provided data.
* @param {array} data - nodes.
* @param {number} perplexity - used to calculate entropy of distribution.
* @param {number} tol - limit for entropy difference.
* @return {2d array} - 2d matrix containing probabilities.
*/
function d2p (data, perplexity, tol) {
N = Math.floor(data.length);
var Htarget = Math.log(perplexity); // target entropy of distribution.
var P1 = zeros(N * N); // temporary probability matrix.
var prow = zeros(N); // a temporary storage compartment.
for (var i = 0; i < N; i++) {
var betamin = -Infinity;
var betamax = Infinity;
var beta = 1; // initial value of precision.
var done = false;
var maxtries = 50;
// Perform binary search to find a suitable precision beta
// so that the entropy of the distribution is appropriate.
var num = 0;
while (!done) {
// Compute entropy and kernel row with beta precision.
var psum = 0.0;
for (var j = 0; j < N; j++) {
var pj = Math.exp(-distance(data[i], data[j]) * beta);
// Ignore the diagonals
if (i === j) {
pj = 0;
}
prow[j] = pj;
psum += pj;
}
// Normalize p and compute entropy.
var Hhere = 0.0;
for (j = 0; j < N; j++) {
if (psum === 0) {
pj = 0;
} else {
pj = prow[j] / psum;
}
prow[j] = pj;
if (pj > 1e-7) {
Hhere -= pj * Math.log(pj);
}
}
// Adjust beta based on result.
if (Hhere > Htarget) {
// Entropy was too high (distribution too diffuse)
// so we need to increase the precision for more peaky distribution.
betamin = beta; // move up the bounds.
if (betamax === Infinity) {
beta = beta * 2;
} else {
beta = (beta + betamax) / 2;
}
} else {
// Converse case. Make distrubtion less peaky.
betamax = beta;
if (betamin === -Infinity) {
beta = beta / 2;
} else {
beta = (beta + betamin) / 2;
}
}
// Stopping conditions: too many tries or got a good precision.
num++;
if (Math.abs(Hhere - Htarget) < tol || num >= maxtries) {
done = true;
}
}
// Copy over the final prow to P1 at row i
for (j = 0; j < N; j++) {
P1[i * N + j] = prow[j];
}
}
// Symmetrize P and normalize it to sum to 1 over all ij
var Pout = zeros(N * N);
var N2 = N * 2;
for (i = 0; i < N; i++) {
for (j = 0; j < N; j++) {
Pout[i * N + j] = Math.max((P1[i * N + j] + P1[j * N + i]) / N2, 1e-100);
}
}
return Pout;
}
/**
* Initialize a starting (random) solution.
*/
function initSolution () {
Y = random2d(N, dim);
// Step gains to accelerate progress in unchanging directions.
gains = random2d(N, dim, 1.0);
// Momentum accumulator.
ystep = random2d(N, dim, 0.0);
iteration = 0;
}
/**
* @return {2d array} the solution.
*/
function getSolution () {
return Y;
}
/**
* Do a single step (iteration) for the layout.
* @return {number} the current cost.
*/
function step () {
iteration += 1;
var cg = costGrad(Y); // Evaluate gradient.
var cost = cg.cost;
var grad = cg.grad;
// Perform gradient step.
var ymean = zeros(dim);
for (var i = 0; i < N; i++) {
for (var d = 0; d < dim; d++) {
var gid = grad[i][d];
var sid = ystep[i][d];
var gainid = gains[i][d];
// Compute gain update.
var newgain = sign(gid) === sign(sid) ? gainid * 0.8 : gainid + 0.2;
if (newgain < 0.01) {
newgain = 0.01;
}
gains[i][d] = newgain;
// Compute momentum step direction.
var momval = iteration < 250 ? 0.5 : 0.8;
var newsid = momval * sid - learningRate * newgain * grad[i][d];
ystep[i][d] = newsid;
// Do the step.
Y[i][d] += newsid;
// Accumulate mean so that we can center later.
ymean[d] += Y[i][d];
}
}
// Reproject Y to have the zero mean.
for (i = 0; i < N; i++) {
for (d = 0; d < dim; d++) {
Y[i][d] -= ymean[d] / N;
}
}
return cost;
}
/**
* Calculate the cost and the gradient.
* @param {2d array} Y - the current solution to evaluate.
* @return {object} that contains a cost and a gradient.
*/
function costGrad (Y) {
var pmul = iteration < 100 ? 4 : 1;
// Compute current Q distribution, unnormalized first.
var Qu = zeros(N * N);
var qsum = 0.0;
for (var i = 0; i < N; i++) {
for (var j = i + 1; j < N; j++) {
var dsum = 0.0;
for (var d = 0; d < dim; d++) {
var dhere = Y[i][d] - Y[j][d];
dsum += dhere * dhere;
}
var qu = 1.0 / (1.0 + dsum); // Student t-distribution.
Qu[i * N + j] = qu;
Qu[j * N + i] = qu;
qsum += 2 * qu;
}
}
// Normalize Q distribution to sum to 1.
var NN = N * N;
var Q = zeros(NN);
for (var q = 0; q < NN; q++) {
Q[q] = Math.max(Qu[q] / qsum, 1e-100);
}
var cost = 0.0;
var grad = [];
for (i = 0; i < N; i++) {
var gsum = new Array(dim); // Initialize gradiet for point i.
for (d = 0; d < dim; d++) {
gsum[d] = 0.0;
}
for (j = 0; j < N; j++) {
// Accumulate the cost.
cost += -P[i * N + j] * Math.log(Q[i * N + j]);
var premult = 4 * (pmul * P[i * N + j] - Q[i * N + j]) * Qu[i * N + j];
for (d = 0; d < dim; d++) {
gsum[d] += premult * (Y[i][d] - Y[j][d]);
}
}
grad.push(gsum);
}
return {
cost: cost,
grad: grad
};
}
/**
* Calculates the stress. Basically, it computes the difference between
* high dimensional distance and real distance. The lower the stress is,
* the better layout.
* @return {number} - stress of the layout.
*/
function getStress () {
var totalDiffSq = 0,
totalHighDistSq = 0;
for (var i = 0, source, target, realDist, highDist; i < nodes.length; i++) {
for (var j = 0; j < nodes.length; j++) {
if (i !== j) {
source = nodes[i]; target = nodes[j];
realDist = Math.hypot(target.x - source.x, target.y - source.y);
highDist = +distance(nodes[i], nodes[j]);
totalDiffSq += Math.pow(realDist - highDist, 2);
totalHighDistSq += highDist * highDist;
}
}
}
return Math.sqrt(totalDiffSq / totalHighDistSq);
}
// API for initializing the algorithm, setting parameters and querying
// metrics.
force.initialize = function (_) {
nodes = _;
N = nodes.length;
// Initialize the probability matrix.
P = d2p(nodes, perplexity, 1e-4);
initSolution();
};
force.id = function (_) {
return arguments.length ? (id = _, force) : id;
};
force.distance = function (_) {
return arguments.length ? (distance = typeof _ === 'function' ? _ : constant(+_), force) : distance;
};
force.stress = function () {
return getStress();
};
force.learningRate = function (_) {
return arguments.length ? (learningRate = +_, force) : learningRate;
};
force.perplexity = function (_) {
return arguments.length ? (perplexity = +_, force) : perplexity;
};
return force;
}

View File

@@ -1,13 +1,13 @@
/**
* @return x value of a node
*/
export function x(d) {
return d.x;
}
/**
* @return y value of a node
*/
export function y(d) {
return d.y;
}
/**
* @return x value of a node
*/
export function x (d) {
return d.x;
}
/**
* @return y value of a node
*/
export function y (d) {
return d.y;
}