Browse Source

Merge branch 'master' of bitbucket.org:ikflowdat/installer

Luciano Andrade 8 years ago
parent
commit
49d75fe0c0

+ 42 - 0
docker-compose.yml

@@ -149,6 +149,8 @@ services:
     image: grafana/grafana
     links:
      - jsonendpoint:endpoint
+     - mysql_jsonendpoint:endpoint
+     - mongodb_jsonendpoint:endpoint
      - mysql:mysql
     ports:
      - 3000:3000
@@ -167,6 +169,18 @@ services:
 
 
   jsonendpoint:
+    restart: always
+    image: fd3_simple_json_endpoint
+    ports:
+      - 9003:8000
+    build: 
+      context: ./statsd/endpoint/json
+    volumes:
+      - ./statsd/endpoint/json:/opt/datasource
+    environment:
+      TIMEOUT: 60000
+      
+  mysql_jsonendpoint:
     restart: always
     image: fd3_simple_json_endpoint_mysql
     environment:
@@ -180,6 +194,32 @@ services:
     volumes:
       - ./statsd/endpoint/mysql:/opt/datasource
 
+
+  mongodb_jsonendpoint:
+    restart: always
+    image: fd3_simple_json_endpoint_mongo
+    links:
+      - mongodb:mongodb
+    ports:
+      - 9002:8000
+    build: 
+      context: ./statsd/endpoint/mongodb
+    volumes:
+      - ./statsd/endpoint/mongodb:/opt/datasource
+
+
+  mongo_express:
+    image: mongo-express
+    ports:
+      - 8081:8081
+    links:
+      - mongodb:mongo
+
+  mongodb:
+    volumes: 
+      - ./mongodb:/data/db 
+    image: mongo:3.4
+
   statsd:
     restart: always
     image: fd3-statsd-mysql-backend
@@ -188,9 +228,11 @@ services:
     ports:
       - "8125:8125/udp"
     links:
+      - mongodb:mongodb
       - mysql:mysql
     volumes:
       - ./statsd/statsd/:/opt/config
+      - ./statsd/backend/mongodb/mongodb.js:/opt/statsd/backends/mongodb.js
  
   supervisord:
     restart: always

+ 198 - 0
statsd/backend/mongodb/mongodb.js

@@ -0,0 +1,198 @@
+'use strict';
+
+var mongo = require('mongodb'),
+	async = require('async'),
+	util = require('util'),
+	dbs = {},
+	options = {
+		debug: false,
+		prefix: true,
+		size: 100,
+		max: 2610,
+		name: 'statsd',
+		host: '127.0.0.1',
+		port: 27017
+	};
+
+/**
+ *	Prefix the db correctly
+ */
+var dbPrefix = function(metric) {
+	var rtr = options.prefix ? metric.split('.')[0] : options.name;
+	return rtr;
+};
+
+/**
+ *	Prefix a collection name	
+ */
+var colPrefix = function(metric_type, metric) {
+	var ary = metric.split('.');
+	if (options.prefix) ary.shift();
+	ary.unshift(metric_type);
+	return ary.join('_')+'_'+options.rate;
+};
+
+/**
+ *	Aggregate the metrics
+ */
+var aggregate = {
+	/**
+	 *	Aggregate some metrics bro
+	 *	@param {Number} time
+	 *	@param {Stirng} key
+	 *	@param {String} val
+	 */
+	gauges: function(time, key, val) {
+		return {
+			db: dbPrefix(key),
+			col: key,
+			data: {
+				time: time,
+				gauge: val,
+                                type: 'gauges'
+			},
+		};
+	},
+	/**
+	 *	Aggregate some timer_data bro
+	 *	@param {Number} time
+	 *	@param {Stirng} key
+	 *	@param {String} vals
+	 */
+	timer_data: function(time, key, val) {
+		val.time = time;
+		return {
+			db: dbPrefix(key),
+			col: colPrefix('timers', key),
+			data: {
+                            time: time,
+                            durations: val,
+                            type: 'timers'
+                        }
+		};
+	},
+	/**
+	 *	Aggregate some timers bro
+	 *	@param {Number} time
+	 *	@param {Stirng} key
+	 *	@param {String} vals
+	 */
+	timers: function(time, key, val) {
+		return {
+			db: dbPrefix(key),
+			col: key,
+			data: {
+				time: time,
+				durations: val,
+                                type: 'timers'
+			},
+		};
+	},
+	/**
+	 *	Aggregate some counters bro
+	 *	@param {Number} time
+	 *	@param {Stirng} key
+	 *	@param {String} val
+	 */
+	counters: function(time, key, val) {
+		return {
+			db: dbPrefix(key),
+			col: key,
+			data: {
+				time: time,
+				count: val,
+                                type: 'counters'
+			},
+		};
+	},
+	/**
+	 *	Aggregate some sets bro
+	 *	@param {Number} time
+	 *	@param {Stirng} key
+	 *	@param {String} val
+	 */
+	sets: function(time, key, val) {
+		return {
+			db: dbPrefix(key),
+			col: key,
+			data: {
+				time: time,
+				set: val,
+                                type: 'sets'
+			},
+		};
+	}
+};
+
+
+/**
+ *	Insert the data to the database
+ *	@method insert
+ *	@param {String} database
+ *	@param {String} collection
+ *	@param {Object} metric
+ *	@param {Function} callback
+ */
+function insert(dbName, collection, metric){
+	mongo.connect("mongodb://" + options.host + "/" + options.name, function(err, db) {
+		if(options.debug) console.log("Connected successfully to server");
+		  var colInfo = {capped:true, size:options.size*options.max, max:options.max};
+		  db.createCollection(collection, colInfo,
+				  function(err, coll) {
+				      if (options.debug) console.log("Collection created " + collection + ".");
+				      var col = db.collection(collection);
+				      col.insert(metric, function(err, result) {
+					      if (err) {
+						      console.log("Error occurred in inserting a document", err); 
+					      } else {
+						      if(options.debug) console.log("Inserted a document in the collection. "); 
+					      }
+					      db.close();
+			              });
+				  }
+			  );
+	});
+}
+/**
+ *	our `flush` event handler
+ */
+var onFlush = function(time, metrics) {
+	var metricTypes = ['gauges', 'timer_data', 'timers', 'counters', 'sets'];
+	metricTypes.forEach(function(type, i){
+		var obj;
+
+		for (var key in metrics[type]) {
+			obj = aggregate[type](time, key, metrics[type][key]);
+			insert(obj.db, obj.col, obj.data);
+		};
+	});
+	return ;
+};
+
+/**
+ *	Expose our init function to StatsD
+ *	@param {Number} startup_time
+ *	@param {Object} config
+ *	@param {Object} events
+ */
+exports.init = function(startup_time, config, events) {
+	if (!startup_time || !config || !events) return false;
+
+	options.debug = config.debug;
+
+	if (typeof config.mongoPrefix == 'boolean' && typeof config.mongoName !== 'string') {
+		console.log('config.mongoPrefix is false, config.mongoName must be set.');
+		return false;
+	};
+
+	options.rate = parseInt(config.flushInterval/1000, 10);
+	options.max = config.mongoMax ? parseInt(config.mongoMax, 10) : 2160;
+	options.host = config.mongoHost || '127.0.0.1';
+	options.prefix = typeof config.mongoPrefix == 'boolean' ? config.mongoPrefix : true;
+	options.name = config.mongoName;
+	options.port = config.mongoPort || options.port;
+
+	events.on('flush', onFlush);
+
+	return true;
+};

+ 17 - 0
statsd/endpoint/json/Dockerfile

@@ -0,0 +1,17 @@
+FROM debian:8
+RUN apt-get update && apt-get install -yq git wget tmux vim nodejs npm
+
+RUN npm install -g n
+
+RUN n stable
+
+WORKDIR /opt/datasource
+COPY index.js /opt/datasource/index.js
+COPY package.json /opt/datasource/package.json
+
+EXPOSE 8000
+
+RUN npm install -g nodemon
+
+CMD npm install && nodemon index.js
+

+ 104 - 0
statsd/endpoint/json/index.js

@@ -0,0 +1,104 @@
+'use strict';
+
+var express = require('express');
+var morgan = require('morgan');
+var bodyParser = require('body-parser');
+var _ = require('lodash');
+var app = express();
+
+var request = require('request');
+
+var timeout = process.env.TIMEOUT;
+
+app.use(morgan('combined'));
+app.use(bodyParser.json());
+
+function setCORSHeaders(res) {
+  res.setHeader("Access-Control-Allow-Origin", "*");
+  res.setHeader("Access-Control-Allow-Methods", "POST");
+  res.setHeader("Access-Control-Allow-Headers", "accept, content-type");  
+}
+
+app.all('/', function(req, res) {
+  setCORSHeaders(res);
+  res.send('https://grafana.com/plugins/grafana-simple-json-datasource\n');
+  res.end();
+});
+
+function search(url, body, res)
+{
+    request.post({
+            url: url,
+            body: JSON.stringify(body),
+            timeout: timeout
+        },
+        function (error, response, body) {
+            if (!error && response.statusCode == 200) {
+                if (search_result.length == 0) {
+                    search_result = JSON.parse(body);
+                } else {
+                    var values = JSON.parse(body);
+                    for (var i = 0; i < values.length - 1; i++) {
+                        if (search_result.indexOf(values[i]) === -1) {
+                            search_result.push(values[i]);
+                        }
+                    }
+                    search_result.sort();
+                }
+                
+                if (res !== undefined) {
+                    setCORSHeaders(res);
+                    res.json(search_result);
+                    res.end();
+                }
+            } else {
+                console.log(error);
+            }
+        }
+    );
+}
+
+function query(url, query_result, req, res)
+{       
+    request.post({
+            url: url,
+            headers: req.headers,
+            body: JSON.stringify(req.body),
+            timeout: timeout
+        },
+        function (error, response, body) {
+            if (!error && response.statusCode == 200) {
+                if (query_result.length == 0) {
+                    query_result = JSON.parse(body);
+                } else {
+                    query_result = query_result.concat(JSON.parse(body));
+                }
+                
+                if (res !== undefined) {
+                    setCORSHeaders(res);
+                    res.json(query_result);
+                    res.end();
+                }
+            } else {
+                console.log(error);
+            }
+        }
+    );
+}
+
+var search_result = [];
+app.all('/search', function (req, res) {
+    search_result = [];
+    search('http://mysql_jsonendpoint:8000/search', req.body);
+    search('http://mongodb_jsonendpoint:8000/search', req.body, res);
+});
+
+app.all('/query', function (req, res) {
+    var query_result = [];
+    query('http://mysql_jsonendpoint:8000/query', query_result, req);
+    query('http://mongodb_jsonendpoint:8000/query', query_result, req, res);
+});
+
+app.listen(8000);
+
+console.log("Server is listening to port 8000");

+ 22 - 0
statsd/endpoint/json/package.json

@@ -0,0 +1,22 @@
+{
+  "name": "statsd_json_grafana_datasource",
+  "version": "1.0.0",
+  "description": "",
+  "main": "index.js",
+  "dependencies": {
+    "body-parser": "^1.15.1",
+    "express": "^4.15.3",
+    "lodash": "^4.13.1",
+    "mongodb": "^2.2.29",
+    "morgan": "^1.8.1",
+    "mysql": "^2.13.0",
+    "nodemon": "^1.11.0",
+    "request": "2.81.0"
+  },
+  "devDependencies": {},
+  "scripts": {
+    "test": "echo \"Error: no test specified\" && exit 1"
+  },
+  "author": "",
+  "license": "ISC"
+}

+ 17 - 0
statsd/endpoint/mongodb/Dockerfile

@@ -0,0 +1,17 @@
+FROM debian:8
+RUN apt-get update && apt-get install -yq git wget tmux vim nodejs npm
+
+RUN npm install -g n
+
+RUN n stable
+
+WORKDIR /opt/datasource
+COPY index.js /opt/datasource/index.js
+COPY package.json /opt/datasource/package.json
+
+EXPOSE 8000
+
+RUN npm install -g nodemon
+
+CMD npm install && nodemon index.js
+

+ 121 - 0
statsd/endpoint/mongodb/index.js

@@ -0,0 +1,121 @@
+'use strict';
+
+var express = require('express');
+var morgan = require('morgan');
+var bodyParser = require('body-parser');
+var _ = require('lodash');
+var app = express();
+
+var mongo = require('mongodb'),
+    MongoClient = mongo.MongoClient,
+    async = require('async'),
+    util = require('util'),
+    dbs = {},
+    options = {
+            debug: false,
+            prefix: true,
+            size: 100,
+            max: 2610,
+            name: 'statsd',
+            host: 'mongodb',
+            port: 27017
+    };
+
+app.use(morgan('combined'));
+app.use(bodyParser.json());
+
+function setCORSHeaders(res) {
+  res.setHeader("Access-Control-Allow-Origin", "*");
+  res.setHeader("Access-Control-Allow-Methods", "POST");
+  res.setHeader("Access-Control-Allow-Headers", "accept, content-type");  
+}
+
+app.all('/', function(req, res) {
+  setCORSHeaders(res);
+  res.send('https://grafana.com/plugins/grafana-simple-json-datasource\n');
+  res.end();
+});
+
+app.all('/search', function (req, res) {
+    mongo.connect("mongodb://" + options.host + "/" + options.name, function (err, db) {
+        if (err) {
+            console.log(err);
+        }
+        if (options.debug) {
+            console.log("Connected successfully to server");
+        }
+        db.listCollections().toArray(function(err, collInfos) {
+            var mongo_search_result = [];
+            _.each(collInfos, function(collInfo) {
+                if (mongo_search_result.indexOf(collInfo.name) === -1) {
+                    mongo_search_result.push(collInfo.name);
+                }
+            });
+            
+            setCORSHeaders(res);
+            res.json(mongo_search_result);
+            res.end();
+        });
+    });
+});
+
+app.all('/query', function (req, res) {
+    var mongo_query_result = [];
+    var from = new Date(req.body.range.from);
+    var to = new Date(req.body.range.to);
+    var from_str = Math.floor(from.getTime() / 1000);
+    var to_str = Math.floor(to.getTime() / 1000);
+    var names = _.map(req.body.targets, function (t) {
+        return t.target;
+    });
+    var interval = req.body.intervalMs / 1000;
+    var maxDataPoints = req.body.maxDataPoints;
+
+    mongo.connect("mongodb://" + options.host + "/" + options.name, function (err, db) {
+        if (err) {
+            console.log(err);
+        }
+//        https://docs.mongodb.com/manual/reference/method/db.collection.find/#db.collection.find
+        _.each(names, function(name, index) {
+            db.collection(""+name).find({ time: { $gte: from_str, $lte: to_str } }, { type: 1, time: 1, count:1, durations: 1, gauge: 1, set: 1, $slice: maxDataPoints }).sort({ time: 1 }).toArray(function (err, docs) {
+                if (err) {
+                    console.log(err);
+                }
+                var result = {};
+                result[name] = new Array();
+                _.each(docs, function (doc) {
+                    var value = 0;
+                    if (doc.type == 'counters') {
+                        value = doc.count;
+                    } else if (doc.type == 'timers') {
+                        value = doc.durations;
+                    } else if (doc.type == 'gauges') {
+                        value = doc.gauge;
+                    } else if (doc.type == 'sets') {
+                        value = doc.set;
+                    }
+                    (result[name]).push([value, 1000 * doc.time]);
+                });
+
+                _.each(_.keys(result), function (key) {
+                    var data = {
+                        target: key,
+                        datapoints: result[key]
+                    };
+                    mongo_query_result.push(data);
+                });
+                
+                if (index === names.length -1) {
+                    setCORSHeaders(res);
+                    res.json(mongo_query_result);
+                    res.end();
+                }
+            });
+        });
+        
+    });
+});
+
+app.listen(8000);
+
+console.log("Server is listening to port 8000");

+ 21 - 0
statsd/endpoint/mongodb/package.json

@@ -0,0 +1,21 @@
+{
+  "name": "statsd_mongo_grafana_datasource",
+  "version": "1.0.0",
+  "description": "",
+  "main": "index.js",
+  "dependencies": {
+    "body-parser": "^1.15.1",
+    "express": "^4.15.3",
+    "lodash": "^4.13.1",
+    "mongodb": "^2.2.29",
+    "morgan": "^1.8.1",
+    "mysql": "^2.13.0",
+    "nodemon": "^1.11.0"
+  },
+  "devDependencies": {},
+  "scripts": {
+    "test": "echo \"Error: no test specified\" && exit 1"
+  },
+  "author": "",
+  "license": "ISC"
+}

+ 1 - 1
statsd/endpoint/mysql/Dockerfile

@@ -13,5 +13,5 @@ EXPOSE 8000
 
 RUN npm install -g nodemon
 
-CMD npm install && nodejs index.js
+CMD npm install && nodemon index.js
 

+ 63 - 127
statsd/endpoint/mysql/index.js

@@ -4,69 +4,18 @@ var bodyParser = require('body-parser');
 var _ = require('lodash');
 var app = express();
 
-
 var mysql      = require('mysql');
 var connection =  mysql.createConnection({
   host     : 'mysql',
   user     : 'root',
   password : '235r2342gtfsw',
-  database : 'statsd_db'
+  database : 'statsd_db',
+  multipleStatements: true
 });
 
 app.use(morgan('combined'))
 
 app.use(bodyParser.json());
-
-var timeserie = [
-	{'target': 's1', 'datapoints':  [ [0,0], [1,0], [2,0], [3,0], [4,0] , [5,0], [6,0] , [7,0], [8,0], [9,0], [10,0], [11,0], [12,0], [13,0], [14,0], [15,0]]},
-	{'target': 's2', 'datapoints':  [ [15,0], [14,0], [13,0], [12,0], [11,0] , [10,0], [9,0] , [8,0], [7,0], [6,0], [5,0], [4,0], [3,0], [2,0], [1,0], [0,0]]},
-	{'target': 's3', 'datapoints':  [ [0,0], [1,0], [2,0], [3,0], [4,0] , [5,0], [6,0] , [7,0], [8,0], [9,0], [10,0], [11,0], [12,0], [13,0], [14,0], [15,0]]},
-	{'target': 's4', 'datapoints':  [ [15,0], [14,0], [13,0], [12,0], [11,0] , [10,0], [9,0] , [8,0], [7,0], [6,0], [5,0], [4,0], [3,0], [2,0], [1,0], [0,0]]},
-	{'target': 's5', 'datapoints':  [ [0,0], [1,0], [2,0], [3,0], [4,0] , [5,0], [6,0] , [7,0], [8,0], [9,0], [10,0], [11,0], [12,0], [13,0], [14,0], [15,0]]},
-	{'target': 's6', 'datapoints':  [ [15,0], [14,0], [13,0], [12,0], [11,0] , [10,0], [9,0] , [8,0], [7,0], [6,0], [5,0], [4,0], [3,0], [2,0], [1,0], [0,0]]}
-	]; //require('./series');
-
-var now = Date.now();
-
-for (var i = timeserie.length -1; i >= 0; i--) {
-  var series = timeserie[i];
-  var decreaser = 0;
-  for (var y = series.datapoints.length -1; y >= 0; y--) {
-    series.datapoints[y][1] = Math.round((now - decreaser) /1000) * 1000;
-    decreaser += 50000;
-  }
-}
-
-var annotation = {
-  name : "annotation name",
-  enabled: true,
-  datasource: "generic datasource",
-  showLine: true,
-}
-
-var annotations = [
-  { annotation: annotation, "title": "Donlad trump is kinda funny", "time": 1450754160000, text: "teeext", tags: "taaags" },
-  { annotation: annotation, "title": "Wow he really won", "time": 1450754160000, text: "teeext", tags: "taaags" },
-  { annotation: annotation, "title": "When is the next ", "time": 1450754160000, text: "teeext", tags: "taaags" }
-];
-
-var now = Date.now();
-var decreaser = 0;
-for (var i = 0;i < annotations.length; i++) {
-  var anon = annotations[i];
-  anon.time = (now - decreaser);
-  decreaser += 1000000
-}
-
-var table =
-  {
-    columns: [{text: 'Time', type: 'time'}, {text: 'Country', type: 'string'}, {text: 'Number', type: 'number'}],
-    values: [
-      [ 1234567, 'SE', 123 ],
-      [ 1234567, 'DE', 231 ],
-      [ 1234567, 'US', 321 ],
-    ]
-  };
   
 function setCORSHeaders(res) {
   res.setHeader("Access-Control-Allow-Origin", "*");
@@ -74,118 +23,105 @@ function setCORSHeaders(res) {
   res.setHeader("Access-Control-Allow-Headers", "accept, content-type");  
 }
 
-
-var now = Date.now();
-var decreaser = 0;
-for (var i = 0;i < table.values.length; i++) {
-  var anon = table.values[i];
-
-  anon[0] = (now - decreaser);
-  decreaser += 1000000
-}
-
 app.all('/', function(req, res) {
   setCORSHeaders(res);
   res.send('https://grafana.com/plugins/grafana-simple-json-datasource\n');
   res.end();
 });
 
-var result = [];
-function search(table)
+function search(table, res)
 {
-    connection.query('SELECT `name` FROM `' + table + '` GROUP BY `name` ORDER BY `name`', function (err, rows, fields) {
-        if (err)
+    connection.query('SELECT DISTINCT(`name`) FROM `' + table + '` WHERE `name` NOT IN ("' + mysql_search_result.join('", "') + '") ORDER BY `name`', function (err, rows, fields) {
+        if (err) {
+            console.log(err);
             throw err;
-        _.each(rows, function (ts) {
-            if (result.indexOf(ts.name) === -1) { // Se agregan solo los name que no esten en result
-                result.push(ts.name);
+        }
+        for (var i = 0; i <= rows.length - 1; i++) {
+            if (mysql_search_result.indexOf(rows[i].name) === -1) { // Se agregan solo los name que no esten en result
+                mysql_search_result.push(rows[i].name);
             }
-        });
+        }
     });
-
 }
+
+var mysql_search_result = [];
+
+search('gauges_statistics');
+search('counters_statistics');
+search('timers_statistics');
+search('sets_statistics');
+
 app.all('/search', function (req, res) {
     setCORSHeaders(res);
+    res.json(mysql_search_result);
+    res.end();
     
     search('gauges_statistics');
     search('counters_statistics');
     search('timers_statistics');
     search('sets_statistics');
-        
-    res.json(result);
-    res.end();
 });
 
-/**
- * Este metodo no esta implementado.
- */
-//app.all('/annotations', function(req, res) {
-//  setCORSHeaders(res);
-//  console.log(req.url);
-//  console.log(req.body);
-//
-//  res.json(annotations);
-//  res.end();
-//})
-
-/**
- * @todo :  intervalMs: 500,
- *          targets: [ { 
- * 		type: 'timeserie' 
- * 	    }]
- * 	    maxDataPoints: 1920,
- * 
- */
-var tsResult = [];
-function query(table, req)
+function query(table, req, res)
 {
     var from = new Date(req.body.range.from);
     var to = new Date(req.body.range.to);
     var from_str = Math.floor(from.getTime() / 1000);
     var to_str = Math.floor(to.getTime() / 1000);
     var names = _.map(req.body.targets, function (t) {
-        return t.target
-        console.log(t);
+        return t.target;
     });
-    var sql = 'SELECT timestamp, name, AVG(value) as value FROM `' + table + '` WHERE (timestamp BETWEEN ' + from_str + ' AND ' + to_str + ') AND (`name` IN ("' + names.join('", "') + '")) ORDER BY timestamp ASC';
-
+    
+    //var sql = mode + 'SELECT `timestamp`, `name`, AVG(`value`) AS `value` FROM `' + table + '` WHERE (`timestamp` BETWEEN ' + from_str + ' AND ' + to_str + ') AND (`name` IN ("' + names.join('", "') + '"))';
+    //var group_by = ' GROUP BY UNIX_TIMESTAMP(`timestamp`) DIV ' + interval;
+    
     var interval = req.body.intervalMs / 1000;
-    var group_by = ' GROUP BY UNIX_TIMESTAMP(timestamp) DIV ' + interval;
-
+    var mode = "SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY','')); ";
+    var sql = mode + 'SELECT `timestamp`, `name`, `value` FROM `' + table + '` WHERE (`timestamp` BETWEEN ' + from_str + ' AND ' + to_str + ') AND (`name` IN ("' + names.join('", "') + '"))';
+    var group_by = '';
+    var order_by = ' ORDER BY `timestamp` ASC';
     var maxDataPoints = req.body.maxDataPoints;
-    var limit = ' LIMIT ' + maxDataPoints;
-
-    sql += group_by + limit;
-    console.log(sql);
+    var limit = ' LIMIT ' + maxDataPoints + ';';
+    sql += group_by + order_by + limit;  
+        
     connection.query(sql, function (err, rows, fields) {
-        var result = {};
-        _.each(rows, function (d) {
-            if (!(d.name in result)) {
-                result[d.name] = new Array();
+        if (err) {
+            console.log(err);
+            throw err;
+        }
+        
+        var result = [];
+        for (var i = 0; i < rows.length - 1; i++) {
+            if (rows[i].name !== undefined) {
+                if (result.indexOf(rows[i].name) === -1) {
+                    result[rows[i].name] = new Array();   
+                }
+                (result[rows[i].name]).push([rows[i].value, 1000 * rows[i].timestamp]);
             }
-            (result[d.name]).push([d.value, 1000 * d.timestamp]);
-        });
-
-        _.each(_.keys(result), function (d) {
-            var data = {target: d};
-            data['datapoints'] = result[d];
-            tsResult.push(data);
-        });
+        }
+        var keys = _.keys(result);
+        for (var i = 0; i < keys.length; i++) {
+            var data = {
+                target: keys[i],
+                datapoints: result[keys[i]]
+            };
+            mysql_query_result.push(data);
+        }
+        if (res !== undefined) {
+            setCORSHeaders(res);
+            res.json(mysql_query_result);
+            res.end();
+        }
     });
 }
 
+var mysql_query_result = [];
 app.all('/query', function (req, res) {
-    setCORSHeaders(res);
-    console.log(req.url);
-    console.log(req.body);
-
+    mysql_query_result = [];
     query('gauges_statistics', req);
     query('counters_statistics', req);
     query('timers_statistics', req);
-    query('sets_statistics', req);
-
-    res.json(tsResult);
-    res.end();
+    query('sets_statistics', req, res);
 });
 
 app.listen(8000);

+ 5 - 0
statsd/statsd/Dockerfile

@@ -14,4 +14,9 @@ RUN cd /opt/nodejs-statsd-mysql-backend ; npm install sequence
 RUN git clone https://github.com/etsy/statsd /opt/statsd
 RUN apt-get install -yq wget 
 
+RUN wget https://raw.githubusercontent.com/dynmeth/mongo-statsd-backend/master/lib/index.js -O /opt/statsd/backends/mongodb.js
+
+RUN cd /opt/statsd ; npm install --save mongodb
+RUN cd /opt/statsd ; npm install --save async
+
 CMD node /opt/statsd/stats.js /opt/config/statsd.config.js

+ 33 - 25
statsd/statsd/statsd.config.js

@@ -1,28 +1,36 @@
 {
-	"backends" : [ 
-		"./backends/console","/opt/nodejs-statsd-mysql-backend/mysql-backend"
-		],
-	"server" : "./servers/udp",
-	"address" : "0.0.0.0",
-	"port": 8125,
-	"debug" : true,
-	"deleteGauges":   true,
-	"deleteTimers":   true,
-	"deleteSets" :    true,
-	"deleteCounters": true,
-
-	mysql: { 
-	     host: "mysql", 
-	     port: 3306, 
-	     user: "root", 
-             password: "235r2342gtfsw", 
-	     database: "statsd_db",
-	     backendPath : "/opt/nodejs-statsd-mysql-backend/", 
-	     engines : {
-		    counters: ["engines/countersEngine.js"],
-		    gauges:   ["engines/gaugesEngine.js"],
-		    timers:   ["engines/timersEngine.js"],
-		    sets:     ["engines/setsEngine.js"]
-	     }
+    "backends": [
+        "./backends/console",
+        "/opt/nodejs-statsd-mysql-backend/mysql-backend",
+        "./backends/mongodb"
+    ],
+    "server": "./servers/udp",
+    "address": "0.0.0.0",
+    "port": 8125,
+    "debug": true,
+    "deleteGauges": true,
+    "deleteTimers": true,
+    "deleteSets": true,
+    "deleteCounters": true,
+    
+    mysql: {
+        host: "mysql",
+        port: 3306,
+        user: "root",
+        password: "235r2342gtfsw",
+        database: "statsd_db",
+        backendPath: "/opt/nodejs-statsd-mysql-backend/",
+        engines: {
+            counters: ["engines/countersEngine.js"],
+            gauges:   ["engines/gaugesEngine.js"],
+            timers:   ["engines/timersEngine.js"],
+            sets:     ["engines/setsEngine.js"]
         }
+    },
+    
+    "mongoHost": 'mongodb',
+    "mongoPort": 27017,
+    "mongoMax": 2160, 
+    "mongoPrefix": false, 
+    "mongoName": 'statsd'
 }