Merge remote-tracking branch 'refs/remotes/origin/feature/csv-columns'
This commit is contained in:
@@ -74,7 +74,7 @@ app.get('/json/val/:tag', fns.latestValueS
|
||||
app.get('/json/series/:tag/:hours', fns.seriesTagValues); // Gets all the values of a tag for the last X hours
|
||||
app.get('/json/valBetween/:tag/:startDatetime/:endDatetime', fns.seriesTagValuesBetween); // Gets the values of a tag between the start time and end time
|
||||
app.get('/json/CSV/all', fns.allDataCSV); // Gets a CSV of all values stored
|
||||
app.get('/json/CSV/:tag/:startDatetime/:endDatetime', fns.seriesCSV); // Gets a CSV of the values of a tag between the start time and end time
|
||||
app.get('/json/CSV/:tag/:startDatetime/:endDatetime', fns.seriesCSVBetween); // Gets a CSV of the values of a tag between the start time and end time
|
||||
app.get('/json/CSV/:tag/:hours', fns.seriesCSV); // Gets a CSV of the values of a tag for the last x hours
|
||||
app.get('/json/all', fns.latestValueAllTags); // Gets the latest values of all tags in the scan list
|
||||
app.get('/json/config', fns.getSetup); // Gets the contents of the config table
|
||||
|
||||
@@ -197,10 +197,25 @@ exports.seriesTagValuesBetween = function(req, res){
|
||||
});
|
||||
};
|
||||
|
||||
var createCSVrow = function(header, dataRow){
|
||||
var i = header.indexOf(dataRow.vanityName);
|
||||
var csvRow = dataRow.id.toString() + "," + dataRow.dateAdded + ",";
|
||||
if (i >= 0){
|
||||
for (var j = 2; j < header.length; j++){
|
||||
if (j == i){
|
||||
csvRow = csvRow + dataRow.val.toString()+ ",";
|
||||
} else {
|
||||
csvRow = csvRow + ",";
|
||||
}
|
||||
}
|
||||
csvRow = csvRow.slice(0, -1) + "\r";
|
||||
return csvRow;
|
||||
}
|
||||
};
|
||||
|
||||
exports.allDataCSV = function(req, res){
|
||||
var sqlite3 = require('sqlite3').verbose();
|
||||
var db = new sqlite3.Database(dbFile);
|
||||
|
||||
db.serialize(function(){
|
||||
var query = "SELECT v.id, t.vanityName, v.val, v.dateAdded FROM tags t JOIN vals v ON t.id = v.tagID";
|
||||
var prepQuery = db.prepare(query);
|
||||
@@ -211,17 +226,24 @@ exports.allDataCSV = function(req, res){
|
||||
console.log(err);
|
||||
res.json({status:"error", message:err, query:query});
|
||||
} else {
|
||||
var csvString = "";
|
||||
var h = ["ID", "Tag Name", "Value", "DateAdded"];
|
||||
csvString = csvString + h.join(",") + "\r";
|
||||
for (var i= 0; i < rows.length; i++){
|
||||
var r = [rows[i].id, rows[i].vanityName, rows[i].val, rows[i].dateAdded];
|
||||
csvString = csvString + r.join(",") + "\r";
|
||||
}
|
||||
|
||||
res.set('Content-Type', 'text/csv');
|
||||
res.set('Content-Disposition', "attachment;filename=tagdata.csv");
|
||||
res.send(csvString);
|
||||
getAllTags(function(err,tags){
|
||||
if (err){
|
||||
console.log(err);
|
||||
} else {
|
||||
var csvString = "";
|
||||
var da = ["id", "DateAdded"];
|
||||
var tagVanityNames = tags.map(function(t){return t.vanityName;});
|
||||
var h = da.concat(tagVanityNames);
|
||||
console.log(h);
|
||||
csvString = csvString + h.join(",") + "\r";
|
||||
for (var i= 0; i < rows.length; i++){
|
||||
csvString = csvString + createCSVrow(h, rows[i]);
|
||||
}
|
||||
res.set('Content-Type', 'text/csv');
|
||||
res.set('Content-Disposition', "attachment;filename=tagdata.csv");
|
||||
res.send(csvString);
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -232,7 +254,7 @@ exports.seriesCSV = function(req, res){
|
||||
var db = new sqlite3.Database(dbFile);
|
||||
|
||||
db.serialize(function(){
|
||||
var query = "SELECT * FROM vals WHERE tagID = ? AND dateAdded > DATETIME('now', '-1 HOUR')";
|
||||
var query = "SELECT v.id, t.vanityName, v.val, v.dateAdded FROM tags t JOIN vals v ON t.id = v.tagID WHERE tagID = ? AND v.dateAdded > DATETIME('now', '-1 HOUR')";
|
||||
var prepQuery = db.prepare(query);
|
||||
prepQuery.all(parseInt(req.params.tag), function(err, rows){
|
||||
prepQuery.finalize();
|
||||
@@ -242,13 +264,11 @@ exports.seriesCSV = function(req, res){
|
||||
res.json({status:"error", message:err, query:query});
|
||||
} else {
|
||||
var csvString = "";
|
||||
var h = ["ID", "Value", "DateAdded"];
|
||||
var h = ["id", "DateAdded", rows[0].vanityName];
|
||||
csvString = csvString + h.join(",") + "\r";
|
||||
for (var i= 0; i < rows.length; i++){
|
||||
var r = [rows[i].id, rows[i].val, rows[i].dateAdded];
|
||||
csvString = csvString + r.join(",") + "\r";
|
||||
csvString = csvString + [rows[i].id, rows[i].dateAdded, rows[i].val].join(",") + "\r";
|
||||
}
|
||||
|
||||
res.set('Content-Type', 'text/csv');
|
||||
res.set('Content-Disposition', "attachment;filename=tagdata.csv");
|
||||
res.send(csvString);
|
||||
@@ -262,7 +282,7 @@ exports.seriesCSVBetween = function(req, res){
|
||||
var db = new sqlite3.Database(dbFile);
|
||||
|
||||
db.serialize(function(){
|
||||
var query = "SELECT * FROM vals WHERE tagID = ? AND dateAdded >= DATETIME(?) AND dateAdded <= DATETIME(?)";
|
||||
var query = "SELECT v.id, t.vanityName, v.val, v.dateAdded FROM tags t JOIN vals v ON t.id = v.tagID WHERE tagID = ? AND dateAdded >= DATETIME(?) AND dateAdded <= DATETIME(?)";
|
||||
var prepQuery = db.prepare(query);
|
||||
prepQuery.all(parseInt(req.params.tag), dString_to_sqlite(req.params.startDatetime), dString_to_sqlite(req.params.endDatetime), function(err, rows){
|
||||
prepQuery.finalize();
|
||||
@@ -272,13 +292,11 @@ exports.seriesCSVBetween = function(req, res){
|
||||
res.json({status:"error", message:err, query:query});
|
||||
} else {
|
||||
var csvString = "";
|
||||
var h = ["ID", "Value", "DateAdded"];
|
||||
var h = ["id", "DateAdded", rows[0].vanityName];
|
||||
csvString = csvString + h.join(",") + "\r";
|
||||
for (var i= 0; i < rows.length; i++){
|
||||
var r = [rows[i].id, rows[i].val, rows[i].dateAdded];
|
||||
csvString = csvString + r.join(",") + "\r";
|
||||
csvString = csvString + [rows[i].id, rows[i].dateAdded, rows[i].val].join(",") + "\r";
|
||||
}
|
||||
|
||||
res.set('Content-Type', 'text/csv');
|
||||
res.set('Content-Disposition', "attachment;filename=tagdata.csv");
|
||||
res.send(csvString);
|
||||
|
||||
Reference in New Issue
Block a user