fieldstations/dashboard/views.py

241 lines
9.5 KiB
Python

import decimal
import json
import psycopg2
from django.db import connection
from django.http import HttpResponse
from django.shortcuts import render
import pymssql
import numpy as np
# Create your views here.
# conn = pymssql.connect(host='192.168.92.79', user='sa', port='1433', password='datalib@bdc79', database='CASNW_LNDB',
# charset='GBK', autocommit=True)
from dashboard.models import Instrument
def index(request):
return render(request, 'dashboard/index.html')
def station_point_info(request):
with connection.cursor() as cursor:
cursor.execute('select * from dashboard_station')
row = cursor.fetchall()
geoCoordMap = dict()
data = []
for r in row:
o = dict()
if r[5] and r[6]:
geoCoordMap[r[2]] = [r[5], r[6]]
o['name'] = r[2]
o['value'] = 50
data.append(o)
return HttpResponse(json.dumps({
"status": '1',
"geoCoordMap": geoCoordMap,
"data": data,
}))
def station_list(request):
with connection.cursor() as cursor:
cursor.execute('select * from dashboard_station')
row = cursor.fetchall()
res = []
for r in row:
o = dict()
o['id'] = str(r[0])
o['shortname'] = r[2]
res.append(o)
return render(request, 'dashboard/station_list.html', {'res': res})
def station_detail(request, pk):
with connection.cursor() as cursor:
cursor.execute("select * from dashboard_station where id = '%s'" % (pk))
row = cursor.fetchall()
res = []
geoCoordMap = dict()
data = []
for r in row:
o = dict()
geoCoordMap[r[2]] = [r[5], r[6]]
o['name'] = r[2]
o['value'] = 100
data.append(o)
o['id'] = str(r[0])
o['r_name'] = r[1]
o['shortname'] = r[2]
o['type'] = r[3]
o['geodesc'] = r[4]
o['lon'] = r[5]
o['lat'] = r[6]
o['remarks'] = r[7]
o['photopath'] = r[8]
o['enname'] = r[9]
res.append(o)
return HttpResponse(json.dumps({
'res': res, 'geoCoordMap': geoCoordMap, 'data': data
}))
def qlz_instrument_distribution(request):
with connection.cursor() as cursor:
# cursor.execute("SELECT CAST (D.NODE_ID_id AS NVARCHAR(MAX)) id,D.NODE_NAME,LOWER (D.NODE_TYPE),D.NODE_LEVEL,O.LONGTITUDE,O.LATITUDE,O.STR_LONGTITUDE,O.STR_LATITUDE FROM dashboard_view_sfd_tree D LEFT JOIN dashboard_org_node O ON CAST (O.ID AS VARCHAR(32)) = CAST (D.NODE_ID_id AS VARCHAR(32)) ORDER BY ID_PATH")
cursor.execute("select id,name,lon,lat from dashboard_instrument where station = 'QLZ'")
row = cursor.fetchall()
res = []
geoCoordMap = dict()
data = []
for r in row:
o = dict()
if r[2] and r[3]:
geoCoordMap[r[1]] = [r[2], r[3]]
# geoCoordMap[r[1]] = [r[4].to_eng_string(), r[5].to_eng_string()]
o['name'] = r[1]
o['value'] = 50
data.append(o)
return HttpResponse(json.dumps({
"status":1,
'geoCoordMap': geoCoordMap, 'data': data,
}))
def qlz_observation_data(request):
data_list = ['QLZ_AROU_ARYK_D', 'QLZ_AROU_HLGX2_4055D', 'QLZ_BY1_4650_BY1_D', 'QLZ_BY2_4450_BY2_D',
'QLZ_BY3_4144_BY3_D', 'QLZ_BY4_3850_BY4_D', 'QLZ_BY_DFIR_BYDF_D', 'QLZ_HCH_3560_HC_D',
'QLZ_HLGX1_4310_HLGX2_D', 'QLZ_HLGX2_4055_HLGX2_4055D', 'QLZ_HLGX2_CR300_HLGX2_4055D',
'QLZ_HLGX3_3724_HLGX1_D', 'QLZ_HLGX_STG2_CR300_STG2_3509D', 'QLZ_HLGX_STG3_CR300_STG3_3840D',
'QLZ_HLGX_STG4_CR300_HLGX2_4055D', 'QLZ_JYL_3846_JYL_D', 'QLZ_NCYK_4050_NCYK_D',
'QLZ_SNYK_4203_SNYK_D', 'QLZ_STG1_CR300_STG1_3193D', 'QLZ_STG1_CR300_STG1_3193D',
'QLZ_STG3_CR300_STG3_3840D', 'QLZ_STG5_4405_STG5_D', 'QLZ_YL_4156_YL1_D']
date_list = [2016,2017, 2018, 2019, 2020, 2021,2022]
by3_4144_by3_d_data = []
by4_3850_by4_data = []
by1_4650_by1_d_data = []
by2_4450_by2_d_data = []
by_dfir_bydf_data = []
with connection.cursor() as cur_by3_4144_by3_d:
for d_l in date_list:
cur_by3_4144_by3_d.execute("select count(*) from QLZ_BY3_4144_BY3_D where TmStamp like '%s%%' " %(d_l))
row_by3_4144_by3_d = cur_by3_4144_by3_d.fetchall()
for r_b_4_b in row_by3_4144_by3_d:
by3_4144_by3_d_data.append(r_b_4_b[0])
with connection.cursor() as cur_by4_3850_by4_d:
for d_l in date_list:
cur_by4_3850_by4_d.execute("select count(*) from QLZ_BY4_3850_BY4_P where TmStamp like '%s%%' " % (d_l))
row_by4_3850_by4_d = cur_by4_3850_by4_d.fetchall()
for r_b_3_b_d in row_by4_3850_by4_d:
by4_3850_by4_data.append(r_b_3_b_d[0])
with connection.cursor() as cur_by1_4650_by1_d:
for d_l in date_list:
cur_by1_4650_by1_d.execute("select count(*) from QLZ_BY1_4650_BY1_P where TmStamp like '%s%%' " % (d_l))
row_by1_4650_by1_d = cur_by1_4650_by1_d.fetchall()
for r_b_4650_b_d in row_by1_4650_by1_d:
by1_4650_by1_d_data.append(r_b_4650_b_d[0])
with connection.cursor() as cur_by2_4450_by2_d:
for d_l in date_list:
cur_by2_4450_by2_d.execute("select count(*) from QLZ_BY2_4450_BY2_P where TmStamp like '%s%%' " % (d_l))
row_by2_4450_by2_d = cur_by2_4450_by2_d.fetchall()
for r_b_4450_b_d in row_by2_4450_by2_d:
by2_4450_by2_d_data.append(r_b_4450_b_d[0])
with connection.cursor() as cur_by_dfir_bydf_d:
for d_l in date_list:
cur_by_dfir_bydf_d.execute("select count(*) from QLZ_BY_DFIR_BYDF_P where TmStamp like '%s%%' " % (d_l))
row_by_dfir_bydf_d = cur_by_dfir_bydf_d.fetchall()
for r_b_d_b_d in row_by_dfir_bydf_d:
by_dfir_bydf_data.append(r_b_d_b_d[0])
# print(arou_aryk_d_data,arou_hlgx2_4055d_data,by1_4650_by1_d_data,by2_4450_by2_d_data)
# print(list(np.array(arou_aryk_d_data)+np.array(arou_hlgx2_4055d_data)+np.array(by1_4650_by1_d_data)+np.array(by2_4450_by2_d_data)))
by3_4144_by3_d_data_new = []
by4_3850_by4_data_new = []
by1_4650_by1_d_data_new = []
by2_4450_by2_d_data_new = []
by_dfir_bydf_data_new = []
by1 = np.array(by1_4650_by1_d_data)
by2 = np.array(by2_4450_by2_d_data)
by3 = np.array(by3_4144_by3_d_data)
by4 = np.array(by4_3850_by4_data)
by = np.array(by_dfir_bydf_data)
by1_4650_by1_d_data_new = by1.cumsum().tolist()
by2_4450_by2_d_data_new = by2.cumsum().tolist()
by3_4144_by3_d_data_new = by3.cumsum().tolist()
by4_3850_by4_data_new = by4.cumsum().tolist()
by_dfir_bydf_data_new = by.cumsum().tolist()
print(by1_4650_by1_d_data_new)
return HttpResponse(json.dumps({
"status":"1",
"by3_4144_by3_d_data":by3_4144_by3_d_data_new,
"by4_3850_by4_data":by4_3850_by4_data_new,
"by1_4650_by1_d_data":by1_4650_by1_d_data_new,
"by2_4450_by2_d_data":by2_4450_by2_d_data_new,
"by_dfir_bydf_data":by_dfir_bydf_data_new,
"date_list":date_list,
}))
def weather_data_list_min_30(request):
with connection.cursor() as cursor:
cursor.execute(
"SELECT top(50) * FROM QLZ_NCYK_4050_NCYK_M ORDER BY TmStamp desc ")
row = cursor.fetchall()
res = []
for r in row:
o = dict()
o['tmstamp'] = r[0].strftime("%Y-%m-%d %H:%M:%S").split(':')[0] + ':' + r[0].strftime("%Y-%m-%d %H:%M:%S").split(':')[1]
o['rainsnow'] = r[14]
o['ta'] = r[4]
o['rh'] = r[5]
o['ws'] = r[6]
o['wd'] = r[7]
res.append(o)
print(res)
return HttpResponse(json.dumps({
"status":"1",
"res":res
}))
def hlgx3_3724_hlgx1_p(request):
with connection.cursor() as cursor:
cursor.execute(
"SELECT top(50) * FROM QLZ_HLGX3_3724_HLGX1_M ORDER BY TmStamp desc ")
row = cursor.fetchall()
res = []
for r in row:
o = dict()
# o['tmstamp'] = r[0].strftime("%Y-%m-%d %H:%M:%S").split(':')[0] + ":" +r[0].strftime("%Y-%m-%d %H:%M:%S").split(':')[1]
o['tmstamp'] = r[0].strftime("%Y-%m-%d %H:%M:%S").split(':')[0] + ':' + r[0].strftime("%Y-%m-%d %H:%M:%S").split(':')[1]
o['rainsnow'] = r[14]
o['ta'] = r[4]
o['rh'] = r[5]
o['ws'] = r[6]
o['wd'] = r[7]
res.append(o)
return HttpResponse(json.dumps({
"status":"1",
"res":res
}))
def by_dfir_bydf_p(request):
with connection.cursor() as cursor:
cursor.execute(
"SELECT top(50) * FROM QLZ_BY1_4650_BY1_M ORDER BY TmStamp desc ")
row = cursor.fetchall()
res = []
for r in row:
o = dict()
o['tmstamp'] = r[0].strftime("%Y-%m-%d %H:%M:%S").split(':')[0] + ':' + r[0].strftime("%Y-%m-%d %H:%M:%S").split(':')[1]
o['rainsnow'] = r[14]
o['ta'] = r[4]
o['rh'] = r[5]
o['ws'] = r[6]
o['wd'] = r[7]
res.append(o)
return HttpResponse(json.dumps({
"status":"1",
"res":res
}))
def index_video_surveillance_list(request):
return render(request,'dashboard/video_surveillance_list.html')
# def qlz_instrument_statistics(request):
# stg = Instrument.objects.filter(name__istartswith=)