示例#1
0
function bivariate_normal(X,Y,sigma,mux,muy)
{
	var lensize = num.dim(X)[0];
	var rsize = [lensize,lensize];
	var Xmu = num.sub(X,mux);
	var Ymu = num.sub(Y,muy);
 	var le = num.div(num.pow(Xmu,2),sigma*sigma);
 	var re = num.div(num.pow(Ymu,2),sigma*sigma); 
	var z = num.add(le,re);
	var denom = 2*Math.PI*sigma*sigma;
	var res = num.div(num.exp(num.neg(num.div(z,2))),denom);
	return res;
}
示例#2
0
ElasticNets.prototype.weightenNeurons = function() {
    var _ = require('underscore'),
        numeric = require('numeric'),
        TSPCommon = require('./TSPCommon');


    // Calculates distances between each of the items and each of the neurons.
    var cp_coordinates_neurons = TSPCommon._cartesian_product_of(this.norm_coordinates, this.neurons);
    var diff = _.map(cp_coordinates_neurons, function (value) {
        return numeric.sub(value[0], value[1]);
    });

    var dist = _.map(diff, function (value) {
        return Math.pow(value[0], 2) + Math.pow(value[1], 2);
    });

    // Create [num_items] groups of distances. In each of the group the distances
    // represent distance between group id (item) and neurons.
    var dist_grouped = TSPCommon._grouper(dist, this.num_neurons);
    var worst_dist = Math.sqrt(_.reduce(dist_grouped, function (memo, value) {
        var min = _.min(value);
        return min > memo ? min : memo;
    }, 0));

    // exp(-d^2 / 2k^2)
    var weights = numeric.exp(numeric.div(numeric.neg(dist), (2 * Math.pow(this.k, 2))));
    var grouped_neurons = TSPCommon._grouper(weights, this.num_neurons);

    weights = _.map(grouped_neurons, function (value) {
        return numeric.div(value, numeric.sum(value));
    });

    return {
        weights: weights,
        worst_dist: worst_dist,
        diff: diff
    };
};
示例#3
0
文件: helpers.js 项目: tholum/AIGame
var meanNormalizeInput = function(input, mu, sigma){
  var sigmaInv = _.map(sigma, function(value){ return value === 0 ? 1 : 1 / value;});
  return numeric.mul(numeric.add(input, numeric.neg(mu)), sigmaInv);
};
示例#4
0
//Higher dimensional network flow:
//
//  Given a simplicial complex, let d be the differential, e and f be chains and alpha/w_i be cochains
//
//Then the n-dimensional flow problem is the following linear program:
//
//  Minimize alpha(f)
//  s.t.
//      d(f) = e
//      0 <= w_i(f) <= c_i
//
function ndflow(cells, capacities, e_cells, e_weights, alpha) {

  //Want to put problem into form:
  //
  //  Minimize c . x
  //  s.t.
  //        Ax <= b
  //
  var c = alpha
  var A = []
  var b = []
  
  //Assemble conservation constraint:
  //
  //    d(f) = e
  //
  var D = differential(cells)
  var Dd = D.toDense()
  var ep = numeric.rep([D.boundaryCells.length], 0.0)
  for(var i=0; i<e_cells.length; ++i) {
    var idx = top.findCell(D.boundaryCells, e_cells[i])
    if(idx < 0) {
      return {
        cells: [],
        weights: []
      };
    }
    var orientation = cooriented(D.boundaryCells[idx], e_cells[i])
    var v = -e_weights[i] * orientation
    if(v < 0) {
      ep[idx] = -v
    } else {
      Dd[idx] = numeric.neg(Dd[idx])
      ep[idx] = v
    }
  }
  A = A.concat(Dd).concat(numeric.neg(Dd))
  b = b.concat(ep).concat(numeric.add(EPSILON, numeric.neg(ep)))
  
  //Assemble capacity constraints:
  //
  //    0 <= w_i(f) <= c_i
  //
  for(var i=0; i<cells.length; ++i) {
    //Add 0 <= w_i(f) constraint
    var w_i = numeric.rep([cells.length], 0.0)
    w_i[i] = -1.0
    A.push(w_i)
    b.push(capacities[i])
    
    //Add w_i(f) <= c_i(f)
    w_i = numeric.rep([cells.length], 0.0)
    w_i[i] = 1.0
    A.push(w_i)
    b.push(capacities[i])
  }

  //Solve!
  var result = numeric.solveLP(c, A, b)
  
  //Unpack into a chain
  var r_cells = []
  var r_weights = []
  if(result.solution) {
    for(var i=0; i<cells.length; ++i) {
      if(Math.abs(result.solution[i]) > EPSILON) {
        r_cells.push(cells[i])
        r_weights.push(result.solution[i])
      }
    }
  }
  return {
    cells: r_cells,
    weights: r_weights
  }
}