var TestClient = Base.extend({
    initialize: function(options) {
        this.client = new cassandra.Client(options);
        Promise.promisifyAll(this.client);
        this.keyspaces = [];
    },

    connect: function (options) {
        return this.client.connectAsync(options);
    },

    execute: function() {
        return this.client.executeAsync.apply(this.client, arguments);
    },

    eachRow: function() {
        return this.client.eachRowAsync.apply(this.client, arguments);
    },

    prepare: function() {
        return this.client.prepareAsync.apply(this.client, arguments);
    },

    new_batch: function(style) {
        return this.client.new_batch(style);
    },

    metrics: function (reset) {
        return this.client.metrics(reset);
    },

    createKeyspace: function(name, replication) {
        replication = replication || 1;
        this.keyspaces.push(name);
        var cql = util.format("CREATE KEYSPACE %s WITH replication = " +
        "{'class': 'SimpleStrategy', 'replication_factor' : %d};",
        name, replication);

        return this.client.executeAsync(cql);
    },

    cleanKeyspace: function(name) {
        return this.client.executeAsync('DROP KEYSPACE ' + name)
        .catch(function(err) {
            if (/non existing keyspace/.test(err.message)) {
                return;
            }
            throw err;
        });
    },

    cleanup: function() {
        var self = this;
        return Promise.map(this.keyspaces, function(ks) {
            return self.client.executeAsync('DROP KEYSPACE ' + ks);
        }).then(function() {
            self.client = null;
        });
    },

    // Create a table with the given name in the current keyspace. Fields is an
    // object mapping the field name to the type.
    createTable: function(name, fields, key, opts) {
        opts = opts || "";
        var columns = _.map(fields, function(type, column) {
            return column + " " + type;
        });
        return this.execute(util.format("CREATE TABLE %s (%s, PRIMARY KEY(%s)) %s;",
            name, columns, key, opts));
    },

    _getInsertQuery: function(table, data, options) {
        var count = data.length;
        var keys = _.keys(data[0]);
        var cols = keys.join(',');

        var vars = _.map(keys, function(k) {
            return '?';
        }).join(',');

        var cql = util.format('INSERT INTO %s (%s) VALUES (%s)', table, cols, vars);

        var param_types = options.param_types || {};
        if (_.isObject(param_types) && !_.isArray(param_types)) {
            var param_typesArray = _.map(keys, function(k) {
                return param_types[k] || types.CASS_VALUE_TYPE_UNKNOWN;
            });
            param_types = param_typesArray;
        }

        if (options.timestamp && options.ttl) {
            cql = cql + ' USING TIMESTAMP ? AND TTL ?';
            param_types.push(types.CASS_VALUE_TYPE_TIMESTAMP);
            param_types.push(types.CASS_VALUE_TYPE_INT);

        } else if (options.timestamp || options.ttl) {
            throw new Error('xxx not implemented');
        }

        return {cql: cql, param_types: param_types};
    },

    // Insert n rows of data into the given table using the supplied generator
    // function.
    insertRows: function(table, data, options) {
        var self = this;
        options = options || {};
        var keys = _.keys(data[0]);
        var query = this._getInsertQuery(table, data, options);

        function insert(d) {
            var vals = _.map(keys, function(k) { return d[k]; });
            if (options.timestamp && options.ttl) {
                vals.push(options.timestamp);
                vals.push(options.ttl);
            }

            return self.execute(query.cql, vals, {param_types: query.param_types});
        }
        return Promise.map(data, insert, {concurrency: options.concurrency});
    },

    insertRowsPrepared: function(table, data, options) {
        var self = this;
        var prepared;
        var count = data.length;
        var keys = _.keys(data[0]);

        var query = this._getInsertQuery(table, data, options);

        function prepare() {
            return self.client.prepareAsync(query.cql)
            .then(function(p) {
                prepared = p;
            });
        }

        function _insert(d, i, n, cb) {
            var vals = _.map(keys, function(k) { return d[k]; });
            if (options.timestamp && options.ttl) {
                vals.push(options.timestamp);
                vals.push(options.ttl);
            }

            var q = prepared.query();
            q.bind(vals, {param_types: query.param_types});
            q.execute({}, cb);
        }

        var insert = Promise.promisify(_insert);
        return prepare()
        .then(function() {
            return Promise.map(data, insert, {concurrency: options.concurrency});
        });
    },

    insertRowsPreparedBatch: function(table, data, options) {
        var self = this;
        var prepared;

        var count = data.length;
        var keys = _.keys(data[0]);
        var batch_size = options.batch_size || 1;

        var query = this._getInsertQuery(table, data, options);
        function prepare() {
            return self.client.prepareAsync(query.cql)
            .then(function(p) {
                prepared = p;
            });
        }

        function _insert_batch(x, batch_i, n, cb) {
            var batch = self.client.new_batch("unlogged");

            _.times(batch_size, function(i) {
                var d = data[(batch_i * batch_size) + i];

                // The last batch might not be full
                if (d === undefined) {
                    return;
                }

                var vals = _.map(keys, function(k) { return d[k]; });
                if (options.timestamp && options.ttl) {
                    vals.push(options.timestamp);
                    vals.push(options.ttl);
                }
                batch.add_prepared(prepared, vals, {param_types: query.param_types});
            });
            batch.execute({}, cb);
        }

        // Create an array with a dummy entry for each batch just to be able to
        // use Promise.map
        var batches = _.times(Math.ceil(count / batch_size), _.noop);

        var insert_batch = Promise.promisify(_insert_batch);
        return prepare()
        .then(function() {
            return Promise.map(batches, insert_batch, {concurrency: options.concurrency});
        });
    }
});
Ejemplo n.º 2
0
var Base = require('extendable-base');

/*
 * Base class for recursively walking the parse tree and optionally generating
 * output.
 *
 * A derived class should implement functions caled visit_<ASTNodeType> for each
 * node type in the grammar.
 */
var ASTVisitor = Base.extend({
    visit: function(node) {
        var type = node.type;
        var visitor = this['visit_' + type];
        return visitor.apply(this, [node]);
    }
});

module.exports = ASTVisitor;
Ejemplo n.º 3
0
var stream = Base.extend({
    initialize: function(fetcher, fetch_size) {
        this.fetcher = fetcher;
        this.fetch_size = fetch_size;
        this.buffer = [];
        this._more_to_fetch = true;
    },

    peek_time: function() {
        if (this.buffer.length === 0) {
            throw new Error('peek_time on empty buffer!');
        }
        return this.buffer[0].time;
    },

    pop: function() {
        return this.buffer.shift();
    },

    empty: function() {
        return this.buffer.length === 0;
    },

    more_to_fetch: function() {
        return this._more_to_fetch;
    },

    fetch: function fetch() {
        var self = this;
        return this.fetcher.fetch(this.fetch_size)
        .then(function(result) {
            var points = result.points.map(function(pt) {
                return _.extend({time: pt[0] / 1000, value: pt[1]}, self.fetcher.tags);
            });
            self.buffer = self.buffer.concat(points);
            self._more_to_fetch = !result.eof;
        });
    }
});
Ejemplo n.º 4
0
var Electra = Base.extend({
    initialize: function(config) {
        this.logger = Logger.get('electra');

        this.es_url = url.format({
            protocol: 'http',
            hostname: config.elasticsearch.host,
            port: config.elasticsearch.port
        });
    },

    _connect_to_es: function() {
        // check that es is running by hitting the /version endpoint
        var self = this;
        return request.getAsync({
            url: this.es_url,
            json: true
        }).spread(function(response, body) {
            if (response.statusCode !== 200) {
                throw new Error('cannot read version, got status ' +
                response.statusCode);
            } else {
                self.logger.info('elasticsearch validation response:', body);
            }
        })
        .catch(function(err) {
            self.logger.info('waiting for elasticsearch to start...', {message: err.message});
            throw err;
        });
    },

    startup: function() {
        var self = this;

        this.logger.info('startup, elasticsearch at ', self.es_url);

        return retry(function() {
            return self._connect_to_es();
        }, { max_tries: Infinity })
        .catch(function(err) {
            self.logger.error('error validating elasticsearch:', err.message);
            throw err;
        });
    },
},
{
    init: function(config) {
        query.init(config);
    }
});