3 local uci = require("uci")
4 local cur = uci.cursor()
5 local json = require "json"
13 print("no sqlite log defined!")
18 function shallowcopy(orig)
19 local orig_type = type(orig)
21 if orig_type == 'table' then
23 for orig_key, orig_value in pairs(orig) do
24 copy[orig_key] = orig_value
26 else -- number, string, boolean, etc
32 function median(dataset)
34 table.sort(shallowcopy(dataset))
35 return dataset[math.floor(#dataset/2)]
39 function filter_data(dataset,width)
47 local window_spread = math.floor(width/2)
50 for i = 1,window_spread do
51 window[#window+1] = dataset[i]["y"]
54 for key,value in pairs(dataset) do
55 nextelem = dataset[key+window_spread]
57 window[#window+1] = nextelem["y"]
59 if not nextelem or #window>width then
60 table.remove(window,1)
64 row["y"]=median(window)
65 result[#result+1] = row
72 function average_results(dataset,con)
73 local name = os.tmpname()
75 local tmpcon = assert(env:connect(name))
76 assert(tmpcon:execute("create table series(time_stamp datetime,value float)"))
77 for key,value in pairs(dataset) do
78 assert(tmpcon:execute(string.format("INSERT INTO series(time_stamp,value) VALUES ('%s','%s')",value["t"],value["y"])))
80 local sql = "select rounded as t,avg(value) as y from (select substr(strftime('%Y-%m-%dT%H:%M',time_stamp),1,15)||'5:00' rounded,value from series) group by rounded order by rounded"
81 results = run_sql(sql,tmpcon)
87 function run_sql(sql,con)
90 cursor = assert(con:execute(sql))
91 row = cursor:fetch ({}, "a")
93 result[#result+1] = row
94 row = cursor:fetch ({}, "a")
100 function get_list(day,con)
102 sql = string.format("SELECT DISTINCT sensor_id,sensor,param FROM log WHERE time_stamp>=datetime('now','-1 day','localtime') ORDER BY sensor_id,sensor,param")
104 sql = string.format("SELECT DISTINCT sensor_id,sensor,param FROM log WHERE time_stamp>='%s' and time_stamp<date('%s','+1 day') ORDER BY sensor_id,sensor,param",day,day)
106 return run_sql(sql,con)
109 function get_raw(day,con,sensor_id,sensor_type,param)
110 format = '%Y-%m-%dT%H:%M:%S'
112 sql = string.format("SELECT strftime('%s',time_stamp) as t,value as y FROM log WHERE time_stamp>=datetime('now','-1 day','localtime') and sensor_id='%s' and sensor='%s' and param='%s' ORDER BY time_stamp",format,sensor_id,sensor_type,param)
114 sql = string.format("SELECT strftime('%s',time_stamp) as t,value as y FROM log WHERE time_stamp>='%s' and time_stamp<date('%s','+1 day') and sensor_id='%s' and sensor='%s' and param='%s' ORDER BY time_stamp",format,day,day,sensor_id,sensor_type,param)
116 return run_sql(sql,con)
119 function get_filtered(day,con,sensor_id,sensor_type,param,width)
120 format = '%Y-%m-%dT%H:%M:%S'
122 sql = string.format("SELECT strftime('%s',time_stamp) as t,value as y FROM log WHERE time_stamp>=datetime('now','-1 day','localtime') and sensor_id='%s' and sensor='%s' and param='%s' ORDER BY time_stamp",format,sensor_id,sensor_type,param)
124 sql = string.format("SELECT strftime('%s',time_stamp) as t,value as y FROM log WHERE time_stamp>='%s' and time_stamp<date('%s','+1 day') and sensor_id='%s' and sensor='%s' and param='%s' ORDER BY time_stamp",format,day,day,sensor_id,sensor_type,param)
126 return filter_data(run_sql(sql,con),width)
129 function dump_json(dataset,file)
132 f = io.open(file,"w")
135 io.write(json.encode(dataset))
141 function dump_txt(dataset,file)
144 f = io.open(file,"w")
147 for key,row in pairs(dataset) do
148 io.write(row["t"].." "..row["y"].."\n")
155 function dump_list(dataset,file)
158 f = io.open(file,"w")
161 for key,row in pairs(dataset) do
162 io.write(row["sensor_id"].." "..row["sensor"].." "..row["param"].."\n")
169 local command = arg[2]
172 local dbdriver = require "luasql.sqlite3"
173 env = assert(dbdriver.sqlite3())
174 con = assert(env:connect(logdb))
176 if command == "list" then
178 dump_list(get_list(day,con))
180 elseif command == "get" then
186 dump_txt(get_raw(day,con,sensor_id,sensor_type,param))
188 elseif command == "get-filtered" then
199 dataset =get_filtered(day,con,sensor_id,sensor_type,param)
202 elseif command == "get-compacted" then
213 dataset =get_filtered(day,con,sensor_id,sensor_type,param)
214 dump_txt(average_results(dataset))
216 elseif command == "dump" then
218 local directory = arg[4]
219 if not directory then
223 list = get_list(day,con)
224 dump_json(list,directory.."/sensors.json")
225 for key,value in pairs(list) do
226 dump_json(get_filtered(day,con,value["sensor_id"],value["sensor"],value["param"]),directory.."/"..value["sensor_id"].."."..value["sensor"].."."..value["param"]..".json")
229 elseif command == "dump-compacted" then
231 local directory = arg[4]
232 if not directory then
236 list = get_list(day,con)
237 dump_json(list,directory.."/sensors.json")
238 for key,value in pairs(list) do
239 dump_json(average_results(get_filtered(day,con,value["sensor_id"],value["sensor"],value["param"])),directory.."/"..value["sensor_id"].."."..value["sensor"].."."..value["param"]..".json")