Инструменты пользователя

Инструменты сайта


doc:2003:mgul.200300.001.1201

Различия

Здесь показаны различия между двумя версиями данной страницы.

Ссылка на это сравнение

Предыдущая версия справа и слева Предыдущая версия
Следующая версия
Предыдущая версия
doc:2003:mgul.200300.001.1201 [2024/08/10 17:31]
daftwi [Корневой каталог веб-приложения]
doc:2003:mgul.200300.001.1201 [2024/08/11 22:53] (текущий)
daftwi [Корневой каталог веб-приложения]
Строка 285: Строка 285:
 //​ini_set('​display_startup_errors','​1'​);​ //​ini_set('​display_startup_errors','​1'​);​
 //​error_reporting(E_ALL);​ //​error_reporting(E_ALL);​
-$pgi = pg_connect("​host=localhost port=5432 dbname=averobo user=averobo password=genshin2021alena2021");+$pgi = pg_connect("​host=localhost port=5432 dbname=averobo user=averobo password=*****");
 ?> ?>
 <​!DOCTYPE html> <​!DOCTYPE html>
Строка 807: Строка 807:
  border-bottom:​ 1px solid rgb(160, 160, 160);  border-bottom:​ 1px solid rgb(160, 160, 160);
 } }
 +</​code></​hidden>​
 +
 +
 +----
 +
 +====== ПРОГРАММНЫЙ КОД АВТОМАТИЗАЦИИ ======
 +
 +===== Архив с кодом автоматизации и библиотеками =====
 +
 +{{ :​doc:​2003:​filesfordoku.zip | Server}}
 +
 +===== Текст кода библиотек =====
 +
 +<hidden lib2><​code>​
 +import json
 +import datetime
 +import string
 +import psycopg2
 +import requests
 +
 +def is_float(a_string):​
 + if a_string is None:
 + return False
 + try:
 + float(a_string)
 + return True
 + except ValueError:
 + return False
 +
 +
 +class Baz():
 + "​Stores name, serial and date"
 + def __init__(self,​ name, serial, dt):
 + self.name = name
 + self.serial = serial
 + self.dt = dt
 +
 +def mindata(j: dict) -> dict:
 + """​
 + This function is finding min for new data 
 + """​
 + vec = []
 + data = []
 + fin = []
 + t = 0
 + dictData = j
 + """​
 + writing in file for debug
 + with open("​testfile2.json",​ "​a"​) as f:
 + f.write(json.dumps(dictData))
 + """​
 + for i in dictData:
 + "​relocating names and dates in vector, will be using as set"
 + dictDatai = dictData[i]
 + t = 0
 + for s in vec:
 + if (s.name == dictDatai["​uName"​] and s.serial == dictDatai["​serial"​] and dictDatai["​Date"​][0:​10] == s.dt) or dictDatai["​uName"​].find("​Сервер"​) != -1:
 + t = 1
 + if t == 0:
 + vec.append(Baz(dictDatai["​uName"​],​dictDatai["​serial"​],​str(dictDatai["​Date"​][0:​10])))
 + """​
 + debug
 + with open("​testfile3.json",​ "​a"​) as f:
 + f.write(json.dumps(dictDatai,​ ensure_ascii=False));​
 + """​
 + for k in vec:
 + "​creating vector of data for each pribor at each date"
 + data = []
 + for i in dictData:
 + dictDatai = dictData[i]
 + if dictDatai["​uName"​] == k.name and dictDatai["​serial"​] == k.serial and dictDatai["​Date"​][0:​10] == k.dt:
 + data.append(dictDatai["​data"​])
 + "​working with date"
 + if len(data) != 0:
 + for i in data:
 + fl = False
 + while fl == False:
 + fl = True
 + for h in i:
 + if "​system"​ == h[0:6] or h[0:3] == "​RTC"​ or h[0:3] == "​NTP"​ or h.find("​calibr_date"​) != -1:
 + i.pop(h)
 + fl = False
 + break
 + if len(data) != 0:
 + "​comparison"​
 + for h in data[0]:
 + if not is_float(data[0][h]):​
 + data[0][h] = None
 + elif h.find("​temp"​) != -1 and h.find("​color_tempCT"​) == -1 and (float(data[0][h]) < -100 or float(data[0][h]) > 100):
 + data[0][h] = None
 + elif h.find("​pressure"​) != -1 and (float(data[0][h]) < 100 or float(data[0][h]) > 1000):
 + data[0][h] = None
 + elif h.find("​humidity"​) != -1 and (float(data[0][h]) < 0 or float(data[0][h]) > 100):
 + data[0][h] = None
 + for i in range(1,​len(data)):​
 + if h in data[i]:
 + tr = 0
 + if not is_float(data[i][h]):​
 + tr = 1
 + elif h.find("​temp"​) != -1 and h.find("​color_tempCT"​) == -1 and (float(data[i][h]) < -100 or float(data[i][h]) > 100):
 + tr = 1
 + elif h.find("​pressure"​) != -1 and (float(data[i][h]) < 100 or float(data[i][h]) > 1000):
 + tr = 1
 + elif h.find("​humidity"​) != -1 and (float(data[i][h]) < 0 or float(data[i][h]) > 100):
 + tr = 1
 + if tr == 0:
 + if data[0][h] is None:
 + data[0][h] = round(float(data[i][h]),​ 4)
 + else:​
 + data[0][h] = round(float(min(float(data[0][h]),​ float(data[i][h]))),​ 4)
 + "​forming result"​
 + final = dict(Date= k.dt, uName = k.name, serial = k.serial, data = data[0])
 + fin.append(final)
 + fe = {}
 + for i in range(len(fin)):​
 + fe.update({str(i):​ fin[i]})
 + return fe
 +
 +def maxdata(j: dict) -> dict:
 + """​
 + This function is finding max new data
 + same system as min
 + """​
 + vec = []
 + data = []
 + fin = []
 + t = 0
 + dictData = j
 + """​
 + debug
 + with open("​testfile2.json",​ "​a"​) as f:
 + f.write(json.dumps(dictData))
 + """​
 + for i in dictData:
 + dictDatai = dictData[i]
 + t = 0
 + for s in vec:
 + if (s.name == dictDatai["​uName"​] and s.serial == dictDatai["​serial"​] and s.dt == dictDatai["​Date"​][0:​10]) or dictDatai["​uName"​][0:​6] == "​Сервер":​
 + t = 1
 + if t == 0:
 + vec.append(Baz(dictDatai["​uName"​],​dictDatai["​serial"​],​str(dictDatai["​Date"​][0:​10])))
 + """​
 + debug
 + with open("​testfile3.json",​ "​a"​) as f:
 + f.write(json.dumps(dictDatai,​ ensure_ascii=False));​
 + """​
 + for k in vec:
 + data = []
 + for i in dictData:
 + dictDatai = dictData[i]
 + if dictDatai["​uName"​] == k.name and dictDatai["​serial"​] == k.serial and dictDatai["​Date"​][0:​10] == k.dt:
 + data.append(dictDatai["​data"​])
 + if len(data) != 0:
 + for i in data:
 + fl = False
 + while fl == False:
 + fl = True
 + for h in i:
 + if "​system"​ == h[0:6] or h[0:3] == "​RTC"​ or h[0:3] == "​NTP"​ or h.find("​calibr_date"​) != -1:
 + i.pop(h)
 + fl = False
 + break
 + if len(data) != 0:
 + for h in data[0]:
 + if not is_float(data[0][h]):​
 + data[0][h] = None
 + elif h.find("​temp"​) != -1 and h.find("​color_tempCT"​) == -1 and (float(data[0][h]) < -100 or float(data[0][h]) > 100):
 + data[0][h] = None
 + elif h.find("​pressure"​) != -1 and (float(data[0][h]) < 100 or float(data[0][h]) > 1000):
 + data[0][h] = None
 + elif h.find("​humidity"​) != -1 and (float(data[0][h]) < 0 or float(data[0][h]) > 100):
 + data[0][h] = None
 + for i in range(1,​len(data)):​
 + if h in data[i]:
 + tr = 0
 + if not is_float(data[i][h]):​
 + tr = 1
 + elif h.find("​temp"​) != -1 and h.find("​color_tempCT"​) == -1 and (float(data[i][h]) < -100 or float(data[i][h]) > 100):
 + tr = 1
 + elif h.find("​pressure"​) != -1 and (float(data[i][h]) < 100 or float(data[i][h]) > 1000):
 + tr = 1
 + elif h.find("​humidity"​) != -1 and (float(data[i][h]) < 0 or float(data[i][h]) > 100):
 + tr = 1
 + if tr == 0:
 + if data[0][h] is None:
 + data[0][h] = round(float(data[i][h]),​ 4)
 + else:​
 + data[0][h] = round(float(max(float(data[0][h]),​ float(data[i][h]))),​ 4)
 + final = dict(Date= k.dt, uName = k.name, serial = k.serial, data = data[0])
 + fin.append(final)
 + fe = {}
 + for i in range(len(fin)):​
 + fe.update({str(i):​ fin[i]})
 + return fe
 +
 +def halfpath15(j:​ dict) -> dict:
 + """​
 + This function is finding sum and amount in 15 minutes
 + """​
 + vec = []
 + data = []
 + fin = []
 + t = 0
 + dictData = j
 + """​
 + debug
 + with open("​testfile2.json",​ "​a"​) as f:
 + f.write(json.dumps(dictData))
 + """​
 + for i in dictData:
 + "​appending our set of names and dates"
 + dictDatai = dictData[i]
 + t = 0
 + for s in vec:
 + if (s.name == dictDatai["​uName"​] and s.serial == dictDatai["​serial"​] and s.dt == dictDatai["​Date"​][0:​10]) or dictDatai["​uName"​][0:​6] == "​Сервер":​
 + t = 1
 + if t == 0:
 + vec.append(Baz(dictDatai["​uName"​],​dictDatai["​serial"​],​str(dictDatai["​Date"​][0:​10])))
 + """​
 + debug
 + with open("​testfile3.json",​ "​a"​) as f:
 + f.write(json.dumps(dictDatai,​ ensure_ascii=False));​
 + """​
 + for k in vec:
 + "​working with data and deleting system info"
 + tempk = k.name
 + if (k.name[0:​6] != "​Сервер"​):​
 + data = []
 + for i in dictData:
 + dictDatai = dictData[i]
 + if dictDatai["​uName"​] == k.name and dictDatai["​serial"​] == k.serial and dictDatai["​Date"​][0:​10] == k.dt:
 + data.append(dictDatai["​data"​])
 + if len(data) != 0:
 + for i in data:
 + fl = False
 + while fl == False:
 + fl = True
 + for h in i:
 + if "​system"​ == h[0:6] or h[0:3] == "​RTC"​ or h[0:3] == "​NTP"​ or h.find("​calibr_date"​) != -1:
 + i.pop(h)
 + fl = False
 + break
 + templendata = len(data)
 + tempdata = {}
 + if templendata != 0:
 + "​averaging"​
 + for h in data[0]:
 + lena = 1
 + if not is_float(data[0][h]):​
 + data[0][h] = "​0"​
 + lena -= 1
 + elif h.find("​temp"​) != -1 and h.find("​color_tempCT"​) == -1 and (float(data[0][h]) < -100 or float(data[0][h]) > 100):
 + data[0][h] = "​0"​
 + lena -= 1
 + elif h.find("​pressure"​) != -1 and (float(data[0][h]) < 100 or float(data[0][h]) > 1000):
 + data[0][h] = "​0"​
 + lena -= 1
 + elif h.find("​humidity"​) != -1 and (float(data[0][h]) < 0 or float(data[0][h]) > 100):
 + data[0][h] = "​0"​
 + lena -= 1
 + sum = float(data[0][h])
 + for i in range(1,​templendata):​
 + if h in data[i]:
 + tr = 0
 + if not is_float(data[i][h]):​
 + tr = 1
 + elif h.find("​temp"​) != -1 and h.find("​color_tempCT"​) == -1 and (float(data[i][h]) < -100 or float(data[i][h]) > 100):
 + tr = 1
 + elif h.find("​pressure"​) != -1 and (float(data[i][h]) < 100 or float(data[i][h]) > 1000):
 + tr = 1
 + elif h.find("​humidity"​) != -1 and (float(data[i][h]) < 0 or float(data[i][h]) > 100):
 + tr = 1
 + if tr == 0:
 + lena += 1
 + sum += float(data[i][h])
 + data[0][h] = round(sum, 4)
 + tempdata.update({str(h):​ lena})
 + # print(data[0],​ data[1])
 + final = dict(Date= k.dt, uName = k.name, serial = k.serial, summa = data[0], length = tempdata)
 + # print(final)
 + fin.append(final)
 + fe = {}
 + for i in range(len(fin)):​
 + fe.update({str(i):​ fin[i]})
 + return fe
 +
 +def aver15(j: dict) -> dict:
 + """​
 + This function is finding average in 15 minutes
 + """​
 + fin = []
 + dictData = j
 + """​
 + debug
 + with open("​testfile2.json",​ "​a"​) as f:
 + f.write(json.dumps(dictData))
 + """​
 + for i in dictData:
 + "​appending our set of names and dates"
 + dictDatai = dictData[i]
 + data = dict()
 + for t in dictDatai["​summa"​]:​
 + # k = dictDatai["​summa"​][t]
 + # print(dictDatai["​uName"​],​ t, dictDatai["​length"​][t])
 + if int(dictDatai["​length"​][t]) == 0:
 + data[t] = 0.0000
 + else:
 + data[t] = round(float(dictDatai["​summa"​][t])/​int(dictDatai["​length"​][t]),​ 4)
 + final = dict(Date= dictDatai["​Date"​],​ uName = dictDatai["​uName"​],​ serial = dictDatai["​serial"​],​ avr = data)
 + fin.append(final)
 + fe = {}
 + for i in range(len(fin)):​
 + fe.update({str(i):​ fin[i]})
 + return fe
 +
 +def uploadolddata(y,​ m: string) -> json:
 + "​uploading old data from archive"​
 + with open('/​home/​alenanaz/​db_all_'​ + y + '​_'​+ m +'​.json'​) as f:
 + templates = json.load(f)
 + return templates
 +
 +def insert15min(p:​ dict, dbn, usavr, passavr, hostavr, tabavr, tabhp, datetime: string):
 + """​
 + inserting avr in data base
 + """​
 + p1 = aver15(p);
 + "​connecting to the data base"
 + conn = psycopg2.connect(dbname=dbn,​ user=usavr,​password=passavr,​ host=hostavr)
 + cursor = conn.cursor()
 + "sql queries"​
 + for i in p:
 + if (p[i]["​uName"​][0:​6] != "​Сервер"​):​
 + cursor.execute(f'​INSERT INTO {tabhp} VALUES(%s, %s, %s, %s, %s)', (datetime, (str(p[i]["​uName"​]) + ' ' + str(p[i]["​serial"​])),​ "​half_path_15min",​ json.dumps(p[i]["​summa"​],​ ensure_ascii=False),​ json.dumps(p[i]["​length"​],​ ensure_ascii=False)))
 + for i in p1:
 + if (p[i]["​uName"​][0:​6] != "​Сервер"​):​
 + cursor.execute(f'​INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (datetime, (str(p1[i]["​uName"​]) + ' ' + str(p1[i]["​serial"​])),​ "​aver_15min",​ json.dumps(p1[i]["​avr"​],​ ensure_ascii=False)))
 + conn.commit()
 +
 +def insertmin15(p:​ dict, dbn, usavr, passavr, hostavr, tabavr, datetime: string):
 + """​
 + inserting min in data base
 + """​
 + "​connecting to the data base"
 + conn = psycopg2.connect(dbname=dbn,​ user=usavr,​password=passavr,​ host=hostavr)
 + cursor = conn.cursor()
 + "sql queries"​
 + for i in p:
 + if (p[i]["​uName"​][0:​6] != "​Сервер"​):​
 + cursor.execute(f'​INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (datetime, (str(p[i]["​uName"​])+'​ '​+str(p[i]["​serial"​])),​ "​minimum_in_15min",​ json.dumps(p[i]["​data"​],​ ensure_ascii=False)))
 + conn.commit()
 +
 +def insertmax15(p:​ dict, dbn, usavr, passavr, hostavr, tabavr, datetime: string):
 + """​
 + insert max in db
 + """​
 + "​connecting to db"
 + conn = psycopg2.connect(dbname=dbn,​ user=usavr,​password=passavr,​ host=hostavr)
 + cursor = conn.cursor()
 + "sql queries"​
 + for i in p:
 + if (p[i]["​uName"​][0:​6] != "​Сервер"​):​
 + cursor.execute(f'​INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (datetime, (str(p[i]["​uName"​])+'​ '​+str(p[i]["​serial"​])),​ "​maximum_in_15max",​ json.dumps(p[i]["​data"​],​ ensure_ascii=False)))
 + conn.commit()
 +</​code></​hidden>​
 +
 +
 +----
 +
 +<hidden lib3><​code>​
 +import json
 +import datetime
 +import string
 +import psycopg2
 +
 +class Baz():
 + "​Stores name, serial and date"
 + def __init__(self,​ name, serial, dt):
 + self.name = name
 + self.serial = serial
 + self.dt = dt
 +
 +def insertavghour(dbn,​ usavr, passavr, hostavr, tabavr, tabhp, dt: string):
 + conn = psycopg2.connect(dbname=dbn,​ user=usavr,​password=passavr,​ host=hostavr)
 + cursor = conn.cursor()
 + dt1 = datetime.datetime.strptime(dt,​ '​%Y-%m-%d %H:​%M:​%S'​)
 + cursor.execute(f'​SELECT a.name, parametr, SUM( CAST(sum->>​parametr AS NUMERIC)) AS summa, SUM(CAST(num->>​parametr AS INTEGER)) AS numbers FROM {tabhp} INNER JOIN (SELECT name, JSON_OBJECT_KEYS(sum) AS parametr FROM {tabhp} GROUP BY name, parametr) AS a ON a.name = {tabhp}.name WHERE (\'​{dt1}\'​ - date >= \'​00:​00:​00\'​) AND (\'​{dt1}\'​ - date < \'​01:​00:​00\'​) AND average = \'​half_path_15min\'​ GROUP BY a.name,​parametr ORDER BY a.name'​)
 + arr = tuple()
 + arr = cursor.fetchall()
 + dir_sum = {}
 + dir_num = {}
 + dir_avg = {}
 + name = ""​
 + dt = str(dt1 - datetime.timedelta(minutes=8))
 + i = 0
 + for p in arr:
 + if(p[1] != None and p[2] != None and p[3] != None):
 + if (i == 0 or name == p[0]):
 + dir_sum[p[1]] = float(p[2])
 + dir_num[p[1]] = float(p[3])
 + if(p[3] != 0):
 + dir_avg[p[1]] = float(round(p[2]/​p[3],​ 4))
 + else:
 + if(p[3] != 0):
 + cursor.execute(f'​INSERT INTO {tabhp} VALUES(%s, %s, %s, %s, %s)', (dt, (str(name)),​ "​half_path_hour",​ json.dumps(dir_sum,​ ensure_ascii=False),​ json.dumps(dir_num,​ ensure_ascii=False)))
 + cursor.execute(f'​INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)),​ "​aver_hour",​ json.dumps(dir_avg,​ ensure_ascii=False)))
 + dir_sum.clear()
 + dir_num.clear()
 + dir_avg.clear()
 + dir_sum[p[1]] = float(p[2])
 + dir_num[p[1]] = float(p[3])
 + if(p[3] != 0):
 + dir_avg[p[1]] = float(round(p[2]/​p[3],​ 4))
 + name = p[0]
 + i += 1
 + if(i == len(arr) and p[3] != 0):
 + cursor.execute(f'​INSERT INTO {tabhp} VALUES(%s, %s, %s, %s, %s)', (dt, (str(name)),​ "​half_path_hour",​ json.dumps(dir_sum,​ ensure_ascii=False),​ json.dumps(dir_num,​ ensure_ascii=False)))
 + cursor.execute(f'​INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)),​ "​aver_hour",​ json.dumps(dir_avg,​ ensure_ascii=False)))
 + conn.commit()
 +
 +def insertmaxhour(dbn,​ usavr, passavr, hostavr, tabavr, dt: string):
 + conn = psycopg2.connect(dbname=dbn,​ user=usavr,​password=passavr,​ host=hostavr)
 + cursor = conn.cursor()
 + dt1 = datetime.datetime.strptime(dt,​ '​%Y-%m-%d %H:​%M:​%S'​)
 + cursor.execute(f'​select a.name, parametr, max( cast(json_file->>​parametr as numeric)) as max from {tabavr} inner join (select name, json_object_keys(json_file) as parametr from {tabavr} group by name, parametr) as a on a.name = {tabavr}.name where (\'​{dt1}\'​ - date >= \'​00:​00:​00\'​) and (\'​{dt1}\'​ - date < \'​01:​00:​00\'​) ​ and average = \'​maximum_in_15max\'​ group by a.name,​parametr order by a.name'​)
 + arr = tuple()
 + arr = cursor.fetchall()
 + dir_max = {}
 + name = ""​
 + i = 0
 + dt = str(dt1 - datetime.timedelta(minutes=8))
 + for p in arr:
 + if(p[1] != None and p[2] != None):
 + if (i == 0 or name == p[0]):
 + dir_max[p[1]] = float(p[2])
 + else:
 + cursor.execute(f'​INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)),​ '​maximum_in_hour',​ json.dumps(dir_max,​ ensure_ascii=False)))
 + dir_max.clear()
 + dir_max[p[1]] = float(p[2])
 +
 + name = p[0]
 + i += 1
 + if(i == len(arr)):
 + cursor.execute(f'​INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)),​ '​maximum_in_hour',​ json.dumps(dir_max,​ ensure_ascii=False)))
 + conn.commit()
 +
 +def insertminhour(dbn,​ usavr, passavr, hostavr, tabavr, dt: string):
 + conn = psycopg2.connect(dbname=dbn,​ user=usavr,​password=passavr,​ host=hostavr)
 + cursor = conn.cursor()
 + dt1 = datetime.datetime.strptime(dt,​ '​%Y-%m-%d %H:​%M:​%S'​)
 + cursor.execute(f'​select a.name, parametr, min( cast(json_file->>​parametr as numeric)) as min from {tabavr} inner join (select name, json_object_keys(json_file) as parametr from {tabavr} group by name, parametr) as a on a.name = {tabavr}.name where (\'​{dt1}\'​ - date >= \'​00:​00:​00\'​) and (\'​{dt1}\'​ - date < \'​01:​00:​00\'​) ​ and average = \'​minimum_in_15min\'​ group by a.name,​parametr order by a.name'​)
 + arr = tuple()
 + arr = cursor.fetchall()
 + dir_max = {}
 + name = ""​
 + i = 0
 + dt = str(dt1 - datetime.timedelta(minutes=8))
 + for p in arr:
 + if(p[1] != None and p[2] != None):
 + if (i == 0 or name == p[0]):
 + dir_max[p[1]] = float(p[2])
 + else:
 + cursor.execute(f'​INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)),​ '​minimum_in_hour',​ json.dumps(dir_max,​ ensure_ascii=False)))
 + dir_max.clear()
 + dir_max[p[1]] = float(p[2])
 +
 + name = p[0]
 + i += 1
 + if(i == len(arr)):
 + cursor.execute(f'​INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)),​ '​minimum_in_hour',​ json.dumps(dir_max,​ ensure_ascii=False)))
 + conn.commit()
 +
 +#days
 +
 +def insertavgday(dbn,​ usavr, passavr, hostavr, tabavr, tabhp, dt: string):
 + conn = psycopg2.connect(dbname=dbn,​ user=usavr,​password=passavr,​ host=hostavr)
 + cursor = conn.cursor()
 + dt1 = datetime.datetime.strptime(dt,​ '​%Y-%m-%d %H:​%M:​%S'​)
 + cursor.execute(f'​SELECT a.name, parametr, SUM( CAST(sum->>​parametr AS NUMERIC)) AS summa, SUM(CAST(num->>​parametr AS NUMERIC)) AS numbers FROM {tabhp} INNER JOIN (SELECT name, JSON_OBJECT_KEYS(sum) AS parametr FROM {tabhp} GROUP BY name, parametr) AS a ON a.name = {tabhp}.name WHERE (\'​{dt1}\'​ - date >= \'​0:​00:​00\'​) AND (\'​{dt1}\'​ - date < \'​23:​55:​00\'​) AND average = \'​half_path_hour\'​ GROUP BY a.name,​parametr ORDER BY a.name'​)
 + arr = tuple()
 + arr = cursor.fetchall()
 + dir_sum = {}
 + dir_num = {}
 + dir_avg = {}
 + name = ""​
 + dt = str(dt1 - datetime.timedelta(days=1))[0:​10]
 + i = 0
 + for p in arr:
 + if(p[1] != None and p[2] != None and p[3] != None):
 + if (i == 0 or name == p[0]):
 + dir_sum[p[1]] = float(p[2])
 + dir_num[p[1]] = float(p[3])
 + if(p[3] != 0):
 + dir_avg[p[1]] = float(round(p[2]/​p[3],​ 4))
 + else:
 + if(p[3] != 0):
 + cursor.execute(f'​INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)),​ "​average_of_day",​ json.dumps(dir_avg,​ ensure_ascii=False)))
 + dir_sum.clear()
 + dir_num.clear()
 + dir_avg.clear()
 + dir_sum[p[1]] = float(p[2])
 + dir_num[p[1]] = float(p[3])
 + dir_avg[p[1]] = float(round(p[2]/​p[3],​ 4))
 + name = p[0]
 + i += 1
 + if(i == len(arr) and p[3] != 0):
 + cursor.execute(f'​INSERT INTO {tabavr} VALUES(%s, %s, %s, %s)', (dt, (str(name)),​ "​average_of_day",​ json.dumps(dir_avg,​ ensure_ascii=False)))
 + conn.commit()
 +
 +def insertmaxday(dbn,​ usavr, passavr, hostavr, tabavr, tabmn, dt: string):
 + conn = psycopg2.connect(dbname=dbn,​ user=usavr,​password=passavr,​ host=hostavr)
 + cursor = conn.cursor()
 + dt1 = datetime.datetime.strptime(dt,​ '​%Y-%m-%d %H:​%M:​%S'​)
 + cursor.execute(f'​select a.name, parametr, max( cast(json_file->>​parametr as numeric)) as max from {tabavr} inner join (select name, json_object_keys(json_file) as parametr from {tabavr} group by name, parametr) as a on a.name = {tabavr}.name where (\'​{dt1}\'​ - date >= \'​0:​00:​00\'​) and (\'​{dt1}\'​ - date < \'​23:​55:​00\'​) ​ and average = \'​maximum_in_hour\'​ group by a.name,​parametr order by a.name'​)
 + arr = tuple()
 + arr = cursor.fetchall()
 + dir_max = {}
 + name = ""​
 + i = 0
 + dt = str(dt1 - datetime.timedelta(days=1))[0:​10]
 + for p in arr:
 + if(p[1] != None and p[2] != None):
 + if (i == 0 or name == p[0]):
 + dir_max[p[1]] = float(p[2])
 + else:
 + cursor.execute(f'​INSERT INTO {tabmn} VALUES(%s, %s, %s, %s)', (dt, (str(name)),​ '​maximum_in_day',​ json.dumps(dir_max,​ ensure_ascii=False)))
 + dir_max.clear()
 + dir_max[p[1]] = float(p[2])
 +
 + name = p[0]
 + i += 1
 + if(i == len(arr)):
 + cursor.execute(f'​INSERT INTO {tabmn} VALUES(%s, %s, %s, %s)', (dt, (str(name)),​ '​maximum_in_day',​ json.dumps(dir_max,​ ensure_ascii=False)))
 + conn.commit()
 +
 +def insertminday(dbn,​ usavr, passavr, hostavr, tabavr, tabmn, dt: string):
 + conn = psycopg2.connect(dbname=dbn,​ user=usavr,​password=passavr,​ host=hostavr)
 + cursor = conn.cursor()
 + dt1 = datetime.datetime.strptime(dt,​ '​%Y-%m-%d %H:​%M:​%S'​)
 + cursor.execute(f'​select a.name, parametr, min( cast(json_file->>​parametr as numeric)) as min from {tabavr} inner join (select name, json_object_keys(json_file) as parametr from {tabavr} group by name, parametr) as a on a.name = {tabavr}.name where (\'​{dt1}\'​ - date >= \'​0:​00:​00\'​) and (\'​{dt1}\'​ - date < \'​23:​55:​00\'​) ​ and average = \'​minimum_in_hour\'​ group by a.name,​parametr order by a.name'​)
 + arr = tuple()
 + arr = cursor.fetchall()
 + dir_max = {}
 + name = ""​
 + i = 0
 + dt = str(dt1 - datetime.timedelta(days=1))[0:​10]
 + for p in arr:
 + if(p[1] != None and p[2] != None):
 + if (i == 0 or name == p[0]):
 + dir_max[p[1]] = float(p[2])
 + else:
 + cursor.execute(f'​INSERT INTO {tabmn} VALUES(%s, %s, %s, %s)', (dt, (str(name)),​ '​minimum_in_day',​ json.dumps(dir_max,​ ensure_ascii=False)))
 + dir_max.clear()
 + dir_max[p[1]] = float(p[2])
 +
 + name = p[0]
 + i += 1
 + if(i == len(arr)):
 + cursor.execute(f'​INSERT INTO {tabmn} VALUES(%s, %s, %s, %s)', (dt, (str(name)),​ '​minimum_in_day',​ json.dumps(dir_max,​ ensure_ascii=False)))
 + conn.commit()
 </​code></​hidden>​ </​code></​hidden>​
  
  
 ---- ----
doc/2003/mgul.200300.001.1201.1723300293.txt.gz · Последние изменения: 2024/08/10 17:31 — daftwi