====== АННОТАЦИЯ ====== В данном документе приведён текст программы системы усреднения данных с приборов сети измерений кафедры К3 МФ МГТУ им. Н. Э. Баумана. ====== ПРОГРАММНЫЙ КОД ВЕБ-ПРИЛОЖЕНИЯ ====== ===== Корневой каталог веб-приложения ===== Home page ---- = now()::date - interval \'1 day\' ORDER BY date DESC'; $result = pg_query($pgi, "$query"); if(!$result) { die("Wrong query!"); } else { $t_mas = array(); while($myrow = pg_fetch_assoc($result)) { if(!in_array($myrow['average'], $t_mas)) { array_push($t_mas, $myrow['average']); } } rsort($t_mas); array_unshift($t_mas, "name","date"); pg_result_seek($result, 0); $i = 0; while($infrow = pg_fetch_assoc($result)) { $i++; $flag = -100; for($j=1; $j<$i; $j++) { if ($infrow["name"] == $mass[$j][0]) { $flag = $j; } } unset($j); if ($flag != -100) { $mass[$flag][array_search($infrow['average'], $t_mas)] = $infrow['json_file']; $i--; } else { $mass[$i][array_search($infrow['average'], $t_mas)] = $infrow['json_file']; $mass[$i][array_search("name", $t_mas)] = $infrow['name']; $mass[$i][array_search("date", $t_mas)] = $infrow['date']; } } usort($mass, "cmp"); unset($i); $title = ''; for($i=0; $i'; } unset($i); $max = ''; for($i=1; $i'; } $max .= ''; } $list = $title.''.$max.''; echo ''.$list.'
'; } } pg_close($pgi); ?>
---- = now()::date - interval \'1 hour\' AND (average = \'maximum_in_hour\' OR average = \'minimum_in_hour\' OR average = \'aver_hour\') ORDER BY date DESC'; $result = pg_query($pgi, "$query"); if(!$result) { die("Wrong query!"); } else { $t_mas = array(); while($myrow = pg_fetch_assoc($result)) { if(!in_array($myrow['average'], $t_mas)) { array_push($t_mas, $myrow['average']); } } rsort($t_mas); array_unshift($t_mas, "name","date"); pg_result_seek($result, 0); $i = 0; while($infrow = pg_fetch_assoc($result)) { $i++; $flag = -100; for($j=1; $j<$i; $j++) { if ($infrow["name"] == $mass[$j][0]) { $flag = $j; } } unset($j); if ($flag != -100) { $mass[$flag][array_search($infrow['average'], $t_mas)] = $infrow['json_file']; $i--; } else { $mass[$i][array_search($infrow['average'], $t_mas)] = $infrow['json_file']; $mass[$i][array_search("name", $t_mas)] = $infrow['name']; $mass[$i][array_search("date", $t_mas)] = $infrow['date']; } } usort($mass, "cmp"); unset($i); $title = ''; for($i=0; $i'; } unset($i); $max = ''; for($i=1; $i'; } $max .= ''; } $list = $title.''.$max.''; echo ''.$list.'
'; } } pg_close($pgi); ?>
---- = now()::date - interval \'15 minutes\' AND (average = \'minimum_in_15min\' OR average = \'maximum_in_15max\' OR average = \'aver_15min\') ORDER BY date DESC'; $result = pg_query($pgi, "$query"); if(!$result) { die("Wrong query!"); } else { $t_mas = array(); while($myrow = pg_fetch_assoc($result)) { if(!in_array($myrow['average'], $t_mas)) { array_push($t_mas, $myrow['average']); } } rsort($t_mas); array_unshift($t_mas, "name","date"); pg_result_seek($result, 0); $i = 0; while($infrow = pg_fetch_assoc($result)) { $i++; $flag = -100; for($j=1; $j<$i; $j++) { if ($infrow["name"] == $mass[$j][0]) { $flag = $j; } } unset($j); if ($flag != -100) { //echo "yes"; $mass[$flag][array_search($infrow['average'], $t_mas)] = $infrow['json_file']; $i--; } else { $mass[$i][array_search($infrow['average'], $t_mas)] = $infrow['json_file']; $mass[$i][array_search("name", $t_mas)] = $infrow['name']; $mass[$i][array_search("date", $t_mas)] = $infrow['date']; } } usort($mass, "cmp"); unset($i); $title = ''; for($i=0; $i'; } unset($i); $max = ''; for($i=1; $i'; } $max .= ''; } $list = $title.''.$max.''; echo ''.$list.'
'; } } pg_close($pgi); ?>
---- Request guide ---- = \''.$getpar['fdate'].'\' AND date <= \''.$getpar['sdate'].'\' ORDER BY date'; } elseif ($getpar['avr_type'] == 'halfpath15') { $query = 'SELECT * FROM half_path WHERE average = \'half_path_15min\' AND name in ('.$getpar['name'].') AND date >= \''.$getpar['fdate'].'\' AND date <= \''.$getpar['sdate'].'\' ORDER BY date'; } elseif ($getpar['avr_type'] == 'halfpathhour') { $query = 'SELECT * FROM half_path WHERE average = \'half_path_hour\' AND name in ('.$getpar['name'].') AND date >= \''.$getpar['fdate'].'\' AND date <= \''.$getpar['sdate'].'\' ORDER BY date'; } elseif ($getpar['avr_type'] == '15min') { $query = 'SELECT * FROM table_with_time WHERE (average = \'minimum_in_15min\' OR average = \'maximum_in_15max\' OR average = \'aver_15min\') AND name in ('.$getpar['name'].') AND date >= \''.$getpar['fdate'].'\' AND date <= \''.$getpar['sdate'].'\' ORDER BY date'; } elseif ($getpar['avr_type'] == 'hour') { $query = 'SELECT * FROM table_with_time WHERE (average = \'minimum_in_hour\' OR average = \'maximum_in_hour\' OR average = \'aver_hour\') AND name in ('.$getpar['name'].') AND date >= \''.$getpar['fdate'].'\' AND date <= \''.$getpar['sdate'].'\' ORDER BY date'; } $result = pg_query($pgi, "$query"); if(!$result) { die("Wrong query!"); } else { $myarr = array(); $i = 0; while($myrow = pg_fetch_assoc($result)) { $myarr[(string)$i." "] = stripslashes(json_encode($myrow, JSON_UNESCAPED_UNICODE)); $i++; } $str = stripslashes(str_replace(" \"","\"",json_encode($myarr, JSON_UNESCAPED_UNICODE))); $str3 = str_replace(":\"{",":{",$str) ; echo stripslashes(str_replace("}\"","}",$str3)); } pg_close($pgi); } ?> ---- Export Data ---- = \''.$start_date.'\' AND date <= \''.$end_date.'\' ORDER BY date'; } elseif ($avg_type == 'hour') { $query = 'SELECT * FROM table_with_time WHERE (average = \'minimum_in_hour\' OR average = \'maximum_in_hour\' OR average = \'aver_hour\') AND date >= \''.$start_date.'\' AND date <= \''.$end_date.'\' ORDER BY date'; } elseif ($avg_type == '15min') { $query = 'SELECT * FROM table_with_time WHERE (average = \'minimum_in_15min\' OR average = \'maximum_in_15max\' OR average = \'aver_15min\') AND date >= \''.$start_date.'\' AND date <= \''.$end_date.'\' ORDER BY date'; } $result = pg_query($pgi, "$query"); if(!$result) { die("Wrong query!"); } else { if ($file_type == 'json') { header('Content-Type: application/json'); header('Content-Disposition: attachment; filename="data.json"'); $myarr = array(); $i = 0; while ($myrow = pg_fetch_assoc($result)) { $myarr[(string)$i." "] = stripslashes(json_encode($myrow, JSON_UNESCAPED_UNICODE)); $i++; } $str = stripslashes(str_replace(" \"", "\"", json_encode($myarr, JSON_UNESCAPED_UNICODE))); $str3 = str_replace(":\"{",":{",$str); echo stripslashes(str_replace("}\"","}",$str3)); } else if ($file_type == 'csv') { header('Content-Type: text/csv; charset=utf-8'); header('Content-Disposition: attachment; filename="data.csv"'); $results = pg_fetch_all($result); $output = fopen('php://output', 'w'); if (!empty($results)) { // Get the headers fputcsv($output, array_keys($results[0])); // Output the data foreach ($results as $row) { foreach ($row as &$value) { $value = mb_convert_encoding($value, 'UTF-8', 'auto'); } fputcsv($output, $row); } } fclose($output); } } pg_close($pgi); } } else { echo 'Invalid request method'; } ?> ---- html, body { padding: 0px; margin: 0px; } body { width: 100vw; min-height: 100vh; overflow-x: hidden; display: flex; justify-content: center; align-items: center; flex-direction: column; background-color: rgb(33, 33, 33); } .nav { display: flex; flex-direction: column; width: 392px; align-items: flex-start; padding: 30px; border-radius: 3px; background-color: rgba(255, 255, 255, 0.05); font-family: 'Lucida Sans', 'Lucida Sans Regular', 'Lucida Grande', 'Lucida Sans Unicode', Geneva, Verdana, sans-serif; } .nav > h1 { width: 100%; margin: 0px; padding: 0px; font-size: 18px; border-bottom: 1px solid rgba(255, 255, 255, 0.125); padding-bottom: 10px; margin-bottom: 12px; font-weight: lighter; color: rgb(240, 240, 240); } .nav > h3 { width: 100%; margin: 0px; padding: 0px; font-size: 18px; border-bottom: 1px solid rgba(255, 255, 255, 0.125); padding-bottom: 10px; margin-bottom: 12px; font-weight: lighter; color: rgb(240, 240, 240); } .nav > h6 { width: 500px; margin: 0px; padding: 0px; font-size: 16px; border-bottom: 1px solid rgba(255, 255, 255, 0.125); padding-bottom: 10px; margin-bottom: 12px; font-weight: lighter; color: rgb(240, 240, 240); } .form-group label, .form-group input, .form-group select { display: block; font-size: 16px; color: rgb(220, 220, 220); margin-top: 8px; text-decoration: none; padding-bottom: 0px; } .nav > a { font-size: 16px; color: rgb(220, 220, 220); margin-top: 8px; text-decoration: none; padding-bottom: 0px; border-bottom: 1px solid rgb(100, 100, 100); transition: .25s; } .nav > a:hover { color: rgb(240, 240, 240); border-bottom: 1px solid rgb(160, 160, 160); } ---- html, body { padding: 0px; margin: 0px; } body { width: 100vw; min-height: 100vh; overflow-x: hidden; display: flex; justify-content: center; align-items: center; flex-direction: column; background-color: rgb(33, 33, 33); } .nav { display: flex; flex-direction: column; width: 500px; align-items: flex-start; padding: 30px; border-radius: 3px; background-color: rgba(255, 255, 255, 0.05); font-family: 'Lucida Sans', 'Lucida Sans Regular', 'Lucida Grande', 'Lucida Sans Unicode', Geneva, Verdana, sans-serif; } .nav > h3 { width: 100%; margin: 0px; padding: 0px; font-size: 18px; border-bottom: 1px solid rgba(255, 255, 255, 0.125); padding-bottom: 10px; margin-bottom: 12px; font-weight: lighter; color: rgb(240, 240, 240); } .nav > h6 { width: 500px; margin: 0px; padding: 0px; font-size: 18px; border-bottom: 1px solid rgba(255, 255, 255, 0.125); padding-bottom: 2px; margin-bottom: 12px; font-weight: lighter; color: rgb(240, 240, 240); } .nav > a { font-size: 16px; color: rgb(220, 220, 220); margin-top: 8px; text-decoration: none; padding-bottom: 2px; margin-bottom: 10px; border-bottom: 1px solid rgb(100, 100, 100); transition: .25s; } .nav > a:hover { color: rgb(240, 240, 240); border-bottom: 1px solid rgb(160, 160, 160); } ---- ====== ПРОГРАММНЫЙ КОД АВТОМАТИЗАЦИИ ====== ===== Архив с кодом автоматизации и библиотеками ===== {{ :doc:2003:filesfordoku.zip | Server}} ===== Текст кода библиотек ===== import json import datetime import string import psycopg2 import requests def is_float(a_string): if a_string is None: return False try: float(a_string) return True except ValueError: return False class Baz(): "Stores name, serial and date" def __init__(self, name, serial, dt): self.name = name self.serial = serial self.dt = dt def mindata(j: dict) -> dict: """ This function is finding min for new data """ vec = [] data = [] fin = [] t = 0 dictData = j """ writing in file for debug with open("testfile2.json", "a") as f: f.write(json.dumps(dictData)) """ for i in dictData: "relocating names and dates in vector, will be using as set" dictDatai = dictData[i] t = 0 for s in vec: if (s.name == dictDatai["uName"] and s.serial == dictDatai["serial"] and dictDatai["Date"][0:10] == s.dt) or dictDatai["uName"].find("Сервер") != -1: t = 1 if t == 0: vec.append(Baz(dictDatai["uName"],dictDatai["serial"],str(dictDatai["Date"][0:10]))) """ debug with open("testfile3.json", "a") as f: f.write(json.dumps(dictDatai, ensure_ascii=False)); """ for k in vec: "creating vector of data for each pribor at each date" data = [] for i in dictData: dictDatai = dictData[i] if dictDatai["uName"] == k.name and dictDatai["serial"] == k.serial and dictDatai["Date"][0:10] == k.dt: data.append(dictDatai["data"]) "working with date" if len(data) != 0: for i in data: fl = False while fl == False: fl = True for h in i: if "system" == h[0:6] or h[0:3] == "RTC" or h[0:3] == "NTP" or h.find("calibr_date") != -1: i.pop(h) fl = False break if len(data) != 0: "comparison" for h in data[0]: if not is_float(data[0][h]): data[0][h] = None elif h.find("temp") != -1 and h.find("color_tempCT") == -1 and (float(data[0][h]) < -100 or float(data[0][h]) > 100): data[0][h] = None elif h.find("pressure") != -1 and (float(data[0][h]) < 100 or float(data[0][h]) > 1000): data[0][h] = None elif h.find("humidity") != -1 and (float(data[0][h]) < 0 or float(data[0][h]) > 100): data[0][h] = None for i in range(1,len(data)): if h in data[i]: tr = 0 if not is_float(data[i][h]): tr = 1 elif h.find("temp") != -1 and h.find("color_tempCT") == -1 and (float(data[i][h]) < -100 or float(data[i][h]) > 100): tr = 1 elif h.find("pressure") != -1 and (float(data[i][h]) < 100 or float(data[i][h]) > 1000): tr = 1 elif h.find("humidity") != -1 and (float(data[i][h]) < 0 or float(data[i][h]) > 100): tr = 1 if tr == 0: if data[0][h] is None: data[0][h] = round(float(data[i][h]), 4) else: data[0][h] = round(float(min(float(data[0][h]), float(data[i][h]))), 4) "forming result" final = dict(Date= k.dt, uName = k.name, serial = k.serial, data = data[0]) fin.append(final) fe = {} for i in range(len(fin)): fe.update({str(i): fin[i]}) return fe def maxdata(j: dict) -> dict: """ This function is finding max new data same system as min """ vec = [] data = [] fin = [] t = 0 dictData = j """ debug with open("testfile2.json", "a") as f: f.write(json.dumps(dictData)) """ for i in dictData: dictDatai = dictData[i] t = 0 for s in vec: if (s.name == dictDatai["uName"] and s.serial == dictDatai["serial"] and s.dt == dictDatai["Date"][0:10]) or dictDatai["uName"][0:6] == "Сервер": t = 1 if t == 0: vec.append(Baz(dictDatai["uName"],dictDatai["serial"],str(dictDatai["Date"][0:10]))) """ debug with open("testfile3.json", "a") as f: f.write(json.dumps(dictDatai, ensure_ascii=False)); """ for k in vec: data = [] for i in dictData: dictDatai = dictData[i] if dictDatai["uName"] == k.name and dictDatai["serial"] == k.serial and dictDatai["Date"][0:10] == k.dt: data.append(dictDatai["data"]) if len(data) != 0: for i in data: fl = False while fl == False: fl = True for h in i: if "system" == h[0:6] or h[0:3] == "RTC" or h[0:3] == "NTP" or h.find("calibr_date") != -1: i.pop(h) fl = False break if len(data) != 0: for h in data[0]: if not is_float(data[0][h]): data[0][h] = None elif h.find("temp") != -1 and h.find("color_tempCT") == -1 and (float(data[0][h]) < -100 or float(data[0][h]) > 100): data[0][h] = None elif h.find("pressure") != -1 and (float(data[0][h]) < 100 or float(data[0][h]) > 1000): data[0][h] = None elif h.find("humidity") != -1 and (float(data[0][h]) < 0 or float(data[0][h]) > 100): data[0][h] = None for i in range(1,len(data)): if h in data[i]: tr = 0 if not is_float(data[i][h]): tr = 1 elif h.find("temp") != -1 and h.find("color_tempCT") == -1 and (float(data[i][h]) < -100 or float(data[i][h]) > 100): tr = 1 elif h.find("pressure") != -1 and (float(data[i][h]) < 100 or float(data[i][h]) > 1000): tr = 1 elif h.find("humidity") != -1 and (float(data[i][h]) < 0 or float(data[i][h]) > 100): tr = 1 if tr == 0: if data[0][h] is None: data[0][h] = round(float(data[i][h]), 4) else: data[0][h] = round(float(max(float(data[0][h]), float(data[i][h]))), 4) final = dict(Date= k.dt, uName = k.name, serial = k.serial, data = data[0]) fin.append(final) fe = {} for i in range(len(fin)): fe.update({str(i): fin[i]}) return fe def halfpath15(j: dict) -> dict: """ This function is finding sum and amount in 15 minutes """ vec = [] data = [] fin = [] t = 0 dictData = j """ debug with open("testfile2.json", "a") as f: f.write(json.dumps(dictData)) """ for i in dictData: "appending our set of names and dates" dictDatai = dictData[i] t = 0 for s in vec: if (s.name == dictDatai["uName"] and s.serial == dictDatai["serial"] and s.dt == dictDatai["Date"][0:10]) or dictDatai["uName"][0:6] == "Сервер": t = 1 if t == 0: vec.append(Baz(dictDatai["uName"],dictDatai["serial"],str(dictDatai["Date"][0:10]))) """ debug with open("testfile3.json", "a") as f: f.write(json.dumps(dictDatai, ensure_ascii=False)); """ for k in vec: "working with data and deleting system info" tempk = k.name if (k.name[0:6] != "Сервер"): data = [] for i in dictData: dictDatai = dictData[i] if dictDatai["uName"] == k.name and dictDatai["serial"] == k.serial and dictDatai["Date"][0:10] == k.dt: data.append(dictDatai["data"]) if len(data) != 0: for i in data: fl = False while fl == False: fl = True for h in i: if "system" == h[0:6] or h[0:3] == "RTC" or h[0:3] == "NTP" or h.find("calibr_date") != -1: i.pop(h) fl = False break templendata = len(data) tempdata = {} if templendata != 0: "averaging" for h in data[0]: lena = 1 if not is_float(data[0][h]): data[0][h] = "0" lena -= 1 elif h.find("temp") != -1 and h.find("color_tempCT") == -1 and (float(data[0][h]) < -100 or float(data[0][h]) > 100): data[0][h] = "0" lena -= 1 elif h.find("pressure") != -1 and (float(data[0][h]) < 100 or float(data[0][h]) > 1000): data[0][h] = "0" lena -= 1 elif h.find("humidity") != -1 and (float(data[0][h]) < 0 or float(data[0][h]) > 100): data[0][h] = "0" lena -= 1 sum = float(data[0][h]) for i in range(1,templendata): if h in data[i]: tr = 0 if not is_float(data[i][h]): tr = 1 elif h.find("temp") != -1 and h.find("color_tempCT") == -1 and (float(data[i][h]) < -100 or float(data[i][h]) > 100): tr = 1 elif h.find("pressure") != -1 and (float(data[i][h]) < 100 or float(data[i][h]) > 1000): tr = 1 elif h.find("humidity") != -1 and (float(data[i][h]) < 0 or float(data[i][h]) > 100): tr = 1 if tr == 0: lena += 1 sum += float(data[i][h]) data[0][h] = round(sum, 4) tempdata.update({str(h): lena}) # print(data[0], data[1]) final = dict(Date= k.dt, uName = k.name, serial = k.serial, summa = data[0], length = tempdata) # print(final) fin.append(final) fe = {} for i in range(len(fin)): fe.update({str(i): fin[i]}) return fe def aver15(j: dict) -> dict: """ This function is finding average in 15 minutes """ fin = [] dictData = j """ debug with open("testfile2.json", "a") as f: f.write(json.dumps(dictData)) """ for i in dictData: "appending our set of names and dates" dictDatai = dictData[i] data = dict() for t in dictDatai["summa"]: # k = dictDatai["summa"][t] # print(dictDatai["uName"], t, dictDatai["length"][t]) if int(dictDatai["length"][t]) == 0: data[t] = 0.0000 else: data[t] = round(float(dictDatai["summa"][t])/int(dictDatai["length"][t]), 4) final = dict(Date= dictDatai["Date"], uName = dictDatai["uName"], serial = dictDatai["serial"], avr = data) fin.append(final) fe = {} for i in range(len(fin)): fe.update({str(i): fin[i]}) return fe def uploadolddata(y, m: string) -> json: "uploading old data from archive" with open('/home/alenanaz/db_all_' + y + '_'+ m +'.json') as f: templates = json.load(f) return templates def insert15min(p: dict, dbn, usavr, passavr, hostavr, tabavr, tabhp, datetime: string): """ inserting avr in data base """ p1 = aver15(p); "connecting to the data base" conn = psycopg2.connect(dbname=dbn, user=usavr,password=passavr, host=hostavr) cursor = conn.cursor() "sql queries" for i in p: if (p[i]["uName"][0:6] != "Сервер"): cursor.execute(f'INSERT INTO {tabhp} VALUES(%s, %s, %s, %s, %s)', (datetime, (str(p[i]["uName"]) + ' ' + str(p[i]["serial"])), "half_path_15min", json.dumps(p[i]["summa"], ensure_ascii=False), json.dumps(p[i]["length"], ensure_ascii=False))) for i in p1: if (p[i]["uName"][0:6] != "Сервер"): cursor.execute(f'INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (datetime, (str(p1[i]["uName"]) + ' ' + str(p1[i]["serial"])), "aver_15min", json.dumps(p1[i]["avr"], ensure_ascii=False))) conn.commit() def insertmin15(p: dict, dbn, usavr, passavr, hostavr, tabavr, datetime: string): """ inserting min in data base """ "connecting to the data base" conn = psycopg2.connect(dbname=dbn, user=usavr,password=passavr, host=hostavr) cursor = conn.cursor() "sql queries" for i in p: if (p[i]["uName"][0:6] != "Сервер"): cursor.execute(f'INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (datetime, (str(p[i]["uName"])+' '+str(p[i]["serial"])), "minimum_in_15min", json.dumps(p[i]["data"], ensure_ascii=False))) conn.commit() def insertmax15(p: dict, dbn, usavr, passavr, hostavr, tabavr, datetime: string): """ insert max in db """ "connecting to db" conn = psycopg2.connect(dbname=dbn, user=usavr,password=passavr, host=hostavr) cursor = conn.cursor() "sql queries" for i in p: if (p[i]["uName"][0:6] != "Сервер"): cursor.execute(f'INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (datetime, (str(p[i]["uName"])+' '+str(p[i]["serial"])), "maximum_in_15max", json.dumps(p[i]["data"], ensure_ascii=False))) conn.commit() ---- import json import datetime import string import psycopg2 class Baz(): "Stores name, serial and date" def __init__(self, name, serial, dt): self.name = name self.serial = serial self.dt = dt def insertavghour(dbn, usavr, passavr, hostavr, tabavr, tabhp, dt: string): conn = psycopg2.connect(dbname=dbn, user=usavr,password=passavr, host=hostavr) cursor = conn.cursor() dt1 = datetime.datetime.strptime(dt, '%Y-%m-%d %H:%M:%S') cursor.execute(f'SELECT a.name, parametr, SUM( CAST(sum->>parametr AS NUMERIC)) AS summa, SUM(CAST(num->>parametr AS INTEGER)) AS numbers FROM {tabhp} INNER JOIN (SELECT name, JSON_OBJECT_KEYS(sum) AS parametr FROM {tabhp} GROUP BY name, parametr) AS a ON a.name = {tabhp}.name WHERE (\'{dt1}\' - date >= \'00:00:00\') AND (\'{dt1}\' - date < \'01:00:00\') AND average = \'half_path_15min\' GROUP BY a.name,parametr ORDER BY a.name') arr = tuple() arr = cursor.fetchall() dir_sum = {} dir_num = {} dir_avg = {} name = "" dt = str(dt1 - datetime.timedelta(minutes=8)) i = 0 for p in arr: if(p[1] != None and p[2] != None and p[3] != None): if (i == 0 or name == p[0]): dir_sum[p[1]] = float(p[2]) dir_num[p[1]] = float(p[3]) if(p[3] != 0): dir_avg[p[1]] = float(round(p[2]/p[3], 4)) else: if(p[3] != 0): cursor.execute(f'INSERT INTO {tabhp} VALUES(%s, %s, %s, %s, %s)', (dt, (str(name)), "half_path_hour", json.dumps(dir_sum, ensure_ascii=False), json.dumps(dir_num, ensure_ascii=False))) cursor.execute(f'INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)), "aver_hour", json.dumps(dir_avg, ensure_ascii=False))) dir_sum.clear() dir_num.clear() dir_avg.clear() dir_sum[p[1]] = float(p[2]) dir_num[p[1]] = float(p[3]) if(p[3] != 0): dir_avg[p[1]] = float(round(p[2]/p[3], 4)) name = p[0] i += 1 if(i == len(arr) and p[3] != 0): cursor.execute(f'INSERT INTO {tabhp} VALUES(%s, %s, %s, %s, %s)', (dt, (str(name)), "half_path_hour", json.dumps(dir_sum, ensure_ascii=False), json.dumps(dir_num, ensure_ascii=False))) cursor.execute(f'INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)), "aver_hour", json.dumps(dir_avg, ensure_ascii=False))) conn.commit() def insertmaxhour(dbn, usavr, passavr, hostavr, tabavr, dt: string): conn = psycopg2.connect(dbname=dbn, user=usavr,password=passavr, host=hostavr) cursor = conn.cursor() dt1 = datetime.datetime.strptime(dt, '%Y-%m-%d %H:%M:%S') cursor.execute(f'select a.name, parametr, max( cast(json_file->>parametr as numeric)) as max from {tabavr} inner join (select name, json_object_keys(json_file) as parametr from {tabavr} group by name, parametr) as a on a.name = {tabavr}.name where (\'{dt1}\' - date >= \'00:00:00\') and (\'{dt1}\' - date < \'01:00:00\') and average = \'maximum_in_15max\' group by a.name,parametr order by a.name') arr = tuple() arr = cursor.fetchall() dir_max = {} name = "" i = 0 dt = str(dt1 - datetime.timedelta(minutes=8)) for p in arr: if(p[1] != None and p[2] != None): if (i == 0 or name == p[0]): dir_max[p[1]] = float(p[2]) else: cursor.execute(f'INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)), 'maximum_in_hour', json.dumps(dir_max, ensure_ascii=False))) dir_max.clear() dir_max[p[1]] = float(p[2]) name = p[0] i += 1 if(i == len(arr)): cursor.execute(f'INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)), 'maximum_in_hour', json.dumps(dir_max, ensure_ascii=False))) conn.commit() def insertminhour(dbn, usavr, passavr, hostavr, tabavr, dt: string): conn = psycopg2.connect(dbname=dbn, user=usavr,password=passavr, host=hostavr) cursor = conn.cursor() dt1 = datetime.datetime.strptime(dt, '%Y-%m-%d %H:%M:%S') cursor.execute(f'select a.name, parametr, min( cast(json_file->>parametr as numeric)) as min from {tabavr} inner join (select name, json_object_keys(json_file) as parametr from {tabavr} group by name, parametr) as a on a.name = {tabavr}.name where (\'{dt1}\' - date >= \'00:00:00\') and (\'{dt1}\' - date < \'01:00:00\') and average = \'minimum_in_15min\' group by a.name,parametr order by a.name') arr = tuple() arr = cursor.fetchall() dir_max = {} name = "" i = 0 dt = str(dt1 - datetime.timedelta(minutes=8)) for p in arr: if(p[1] != None and p[2] != None): if (i == 0 or name == p[0]): dir_max[p[1]] = float(p[2]) else: cursor.execute(f'INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)), 'minimum_in_hour', json.dumps(dir_max, ensure_ascii=False))) dir_max.clear() dir_max[p[1]] = float(p[2]) name = p[0] i += 1 if(i == len(arr)): cursor.execute(f'INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)), 'minimum_in_hour', json.dumps(dir_max, ensure_ascii=False))) conn.commit() #days def insertavgday(dbn, usavr, passavr, hostavr, tabavr, tabhp, dt: string): conn = psycopg2.connect(dbname=dbn, user=usavr,password=passavr, host=hostavr) cursor = conn.cursor() dt1 = datetime.datetime.strptime(dt, '%Y-%m-%d %H:%M:%S') cursor.execute(f'SELECT a.name, parametr, SUM( CAST(sum->>parametr AS NUMERIC)) AS summa, SUM(CAST(num->>parametr AS NUMERIC)) AS numbers FROM {tabhp} INNER JOIN (SELECT name, JSON_OBJECT_KEYS(sum) AS parametr FROM {tabhp} GROUP BY name, parametr) AS a ON a.name = {tabhp}.name WHERE (\'{dt1}\' - date >= \'0:00:00\') AND (\'{dt1}\' - date < \'23:55:00\') AND average = \'half_path_hour\' GROUP BY a.name,parametr ORDER BY a.name') arr = tuple() arr = cursor.fetchall() dir_sum = {} dir_num = {} dir_avg = {} name = "" dt = str(dt1 - datetime.timedelta(days=1))[0:10] i = 0 for p in arr: if(p[1] != None and p[2] != None and p[3] != None): if (i == 0 or name == p[0]): dir_sum[p[1]] = float(p[2]) dir_num[p[1]] = float(p[3]) if(p[3] != 0): dir_avg[p[1]] = float(round(p[2]/p[3], 4)) else: if(p[3] != 0): cursor.execute(f'INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)), "average_of_day", json.dumps(dir_avg, ensure_ascii=False))) dir_sum.clear() dir_num.clear() dir_avg.clear() dir_sum[p[1]] = float(p[2]) dir_num[p[1]] = float(p[3]) dir_avg[p[1]] = float(round(p[2]/p[3], 4)) name = p[0] i += 1 if(i == len(arr) and p[3] != 0): cursor.execute(f'INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)), "average_of_day", json.dumps(dir_avg, ensure_ascii=False))) conn.commit() def insertmaxday(dbn, usavr, passavr, hostavr, tabavr, tabmn, dt: string): conn = psycopg2.connect(dbname=dbn, user=usavr,password=passavr, host=hostavr) cursor = conn.cursor() dt1 = datetime.datetime.strptime(dt, '%Y-%m-%d %H:%M:%S') cursor.execute(f'select a.name, parametr, max( cast(json_file->>parametr as numeric)) as max from {tabavr} inner join (select name, json_object_keys(json_file) as parametr from {tabavr} group by name, parametr) as a on a.name = {tabavr}.name where (\'{dt1}\' - date >= \'0:00:00\') and (\'{dt1}\' - date < \'23:55:00\') and average = \'maximum_in_hour\' group by a.name,parametr order by a.name') arr = tuple() arr = cursor.fetchall() dir_max = {} name = "" i = 0 dt = str(dt1 - datetime.timedelta(days=1))[0:10] for p in arr: if(p[1] != None and p[2] != None): if (i == 0 or name == p[0]): dir_max[p[1]] = float(p[2]) else: cursor.execute(f'INSERT INTO {tabmn} VALUES(%s, %s, %s, %s)', (dt, (str(name)), 'maximum_in_day', json.dumps(dir_max, ensure_ascii=False))) dir_max.clear() dir_max[p[1]] = float(p[2]) name = p[0] i += 1 if(i == len(arr)): cursor.execute(f'INSERT INTO {tabmn} VALUES(%s, %s, %s, %s)', (dt, (str(name)), 'maximum_in_day', json.dumps(dir_max, ensure_ascii=False))) conn.commit() def insertminday(dbn, usavr, passavr, hostavr, tabavr, tabmn, dt: string): conn = psycopg2.connect(dbname=dbn, user=usavr,password=passavr, host=hostavr) cursor = conn.cursor() dt1 = datetime.datetime.strptime(dt, '%Y-%m-%d %H:%M:%S') cursor.execute(f'select a.name, parametr, min( cast(json_file->>parametr as numeric)) as min from {tabavr} inner join (select name, json_object_keys(json_file) as parametr from {tabavr} group by name, parametr) as a on a.name = {tabavr}.name where (\'{dt1}\' - date >= \'0:00:00\') and (\'{dt1}\' - date < \'23:55:00\') and average = \'minimum_in_hour\' group by a.name,parametr order by a.name') arr = tuple() arr = cursor.fetchall() dir_max = {} name = "" i = 0 dt = str(dt1 - datetime.timedelta(days=1))[0:10] for p in arr: if(p[1] != None and p[2] != None): if (i == 0 or name == p[0]): dir_max[p[1]] = float(p[2]) else: cursor.execute(f'INSERT INTO {tabmn} VALUES(%s, %s, %s, %s)', (dt, (str(name)), 'minimum_in_day', json.dumps(dir_max, ensure_ascii=False))) dir_max.clear() dir_max[p[1]] = float(p[2]) name = p[0] i += 1 if(i == len(arr)): cursor.execute(f'INSERT INTO {tabmn} VALUES(%s, %s, %s, %s)', (dt, (str(name)), 'minimum_in_day', json.dumps(dir_max, ensure_ascii=False))) conn.commit() ----