Merge pull request #1 from gshau/influx

Influx
pull/2/head
gshau 2017-10-09 09:29:43 -05:00 zatwierdzone przez GitHub
commit ac77dc31ea
8 zmienionych plików z 2441 dodań i 287 usunięć

1
.gitignore vendored 100644
Wyświetl plik

@ -0,0 +1 @@
.DS*

2354
Grafana/sample.json 100644

Plik diff jest za duży Load Diff

Wyświetl plik

@ -1,26 +1,77 @@
# ESP8266 driven weather station with Flask webserver frontend.
# ESP8266 driven weather station with a Flask webserver frontend and Influx/Grafana plotting.
There are two main components to this project. The device source in esp8266/ and the webserver in app/. Data on device can be accessed directly via:\
http://deviceIP
http://deviceIP. Additionally, a Grafana and Influx server can be configured to display historical data and is highly configurable.
## Device configuration
For debugging purposes, the device can have firmware uploaded directly from the webpage. It's recommended that after deployment, this feature be removed for security.
Recommend platformio for compiling and distributing the esp8266 source to devices.
I'd recommend platformio for compiling and distributing the esp8266 source to devices.
![Alt text](/screenshots/WXDevice_ESP8266.png?raw=true "Device Webpage")
## Flask frontend
Flask and SocketIO frontend to weather station. It can accept multiple stations simultaneously.
To setup:\
$pip -r requirements
To setup:
```
pip -r requirements
```
To run:
Run station receiver that pushes socketIO packets to flask webserver:\
$python stationServer.py
Run station receiver that pushes socketIO packets to flask webserver which also posts the data to an Influx database server. You'll need to change the database server if it's not the localhost:
Run flask webserver that listens for SocketIO packets:\
$python app.py
```
python stationServerInfluxDB.py
```
In this file, you'll need to specify the Influx server and create a database on it named StationDB.
Run flask webserver that listens for SocketIO packets:
```
python app.py
```
Webpage will be located at and will update when packet is received from stations
http://localhost:5000/
![Alt text](/screenshots/FlaskWXServer.png?raw=true "Flask Server Webpage")
# Influx and Grafana configuration
I'd recommend installing Influx and Grafana via docker for minimal headache. Install docker here: `https://www.docker.com/`
###InfluxDB setup:
Pull down the influxdb docker images:
```
docker pull influxdb
```
Then run the image as a container named influx. The port forwarding is done from the container to your local machine.
```
docker run -d --name influx -p 8086:8086 -p 8083:8083 -v /local/machine/path/to/db:/var/lib/influxdb -e INFLUXDB_ADMIN_ENABLED=true influxdb
```
Navigate to http://localhost:8083 and create a database with name StationDB
###Grafana setup:
As with Influx, pull down the docker image for grafana:
```
docker pull grafana/grafana
docker run -d --name grafana -p 3000:3000 grafana/grafana
```
Navigate to http://localhost:3000 and start creating Grafana dashboards for plotting and monitoring your data.
An example json file of a sample dashboard is given in Grafana/sample.json
![Alt text](/screenshots/Grafana.png?raw=true "Sample Grafana page")

Wyświetl plik

@ -1,23 +1,22 @@
Flask==0.11.1
Flask-SocketIO==2.7.1
Jinja2==2.8
MarkupSafe==0.23
PyYAML==3.12
Werkzeug==0.11.11
argparse==1.2.1
backports-abc==0.4
bokeh==0.10.0
certifi==2016.9.26
click==6.6
Flask==0.11.1
Flask-SocketIO==2.7.1
gevent==1.1.2
greenlet==0.4.10
influxdb==4.1.0
itsdangerous==0.24
Jinja2==2.8
MarkupSafe==0.23
numpy==1.11.1
pandas==0.18.1
python-dateutil==2.5.3
python-engineio==1.0.3
python-socketio==1.6.0
pytz==2016.6.1
PyYAML==3.12
pyzmq==15.4.0
requests==2.11.1
singledispatch==3.4.0.3
@ -25,4 +24,5 @@ six==1.10.0
socketIO-client==0.7.0
tornado==4.4.1
websocket-client==0.37.0
wsgiref==0.1.2
Werkzeug==0.11.11

Wyświetl plik

@ -1,198 +0,0 @@
import sqlite3
import pandas as pd
import stationDB as st
from jinja2 import Template
from bokeh.plotting import *
from bokeh.resources import INLINE
# from bokeh.util.browser import view
from bokeh.models import HoverTool
from bokeh.embed import components
import time
import datetime
import os.path
class DB:
def __init__(self,fileName, table):
self.fileName=fileName
self.table=table
self.columnNames=[]
if not os.path.isfile(self.fileName):
self.createTable()
self.getColumnNames()
def createTable(self):
con=sqlite3.connect(self.fileName)
c=con.cursor()
c.execute("CREATE TABLE %s (timestamp real, timeString text)" % self.table)
con.close()
def addColumn(self, name):
con=sqlite3.connect(self.fileName)
c=con.cursor()
type="real"
c.execute("ALTER TABLE %s ADD COLUMN %s %s" % (self.table, name, type))
con.close()
def getColumnNames(self):
con=sqlite3.connect(self.fileName)
command="SELECT * from %s" % self.table
self.df=pd.read_sql_query(command, con)
self.columnNames=self.df.columns
con.close()
def addData(self, packet):
con=sqlite3.connect(self.fileName)
c=con.cursor()
# create column if not present
for key in packet.keys():
if key not in self.columnNames:
self.addColumn(key)
self.getColumnNames()
columns=', '.join(packet.keys())
placeholders = ', '.join('?' * len(packet))
sql = 'INSERT INTO master ({}) VALUES ({})'.format(columns, placeholders)
c.execute(sql, packet.values())
# # command="create table if not exists %s (date real, dateString text, name text, value real)" % name
# c.execute(command)
# timeString=datetime.datetime.fromtimestamp(time).strftime('%Y/%m/%d %H:%M:%S')
# dataTuple=(time, timeString, name, data,)
# command="INSERT INTO %s VALUES (?, ?, ?, ?)" % name
# c.execute(command, dataTuple)
con.commit()
con.close()
def loadDB(self,name):
con=sqlite3.connect(self.fileName)
command="SELECT * from %s" % name
self.df=pd.read_sql_query(command, con)
con.close()
class DBview:
def __init__(self,db,df,UTCOffset):
# self.timeRange=timeRange
self.db = db
self.df = df
self.UTCOffset = UTCOffset
def qp(self,attrList, name, timeRange):
now=datetime.datetime.now()
epoch=datetime.datetime.utcfromtimestamp(0)
tstart=now-datetime.timedelta(days=timeRange)
tcut=(tstart-epoch).total_seconds()
dataFrame=self.df[self.df.timestamp>tcut]
dataFrame['t']=dataFrame.timestamp*1000
# y1=getattr(self.df,attr1)
# y2=getattr(self.df,attr2)
output_file(name+'.html')
timeString=[datetime.datetime.fromtimestamp(dt).strftime('%Y/%m/%d %H:%M:%S') for dt in dataFrame.timestamp]
source=ColumnDataSource(data=dataFrame.to_dict('list'))
TOOLS="resize,hover,crosshair,pan,wheel_zoom,box_zoom,reset,tap,previewsave,box_select,poly_select,lasso_select"
p=figure(x_axis_type="datetime",tools=TOOLS)
for key in attrList:
print key
p.scatter('t',key, source=source)
p.select(dict(type=HoverTool)).tooltips=[
("Time", "@timeString"),
("Value", "@y1"),
("Value", "@y2"),
]
show(p)
def qph(self,attrList, name, timeRange):
now=datetime.datetime.now()
epoch=datetime.datetime.utcfromtimestamp(0)
tstart=now-datetime.timedelta(days=0,hours=timeRange)
tcut=(tstart-epoch).total_seconds()
dataFrame=self.df[self.df.timestamp>tcut]
dataFrame['t']=dataFrame.timestamp*1000
# y1=getattr(self.df,attr1)
# y2=getattr(self.df,attr2)
output_file(name+'.html')
timeString=[datetime.datetime.fromtimestamp(dt).strftime('%Y/%m/%d %H:%M:%S') for dt in dataFrame.timestamp]
source=ColumnDataSource(data=dataFrame.to_dict('list'))
TOOLS="resize,hover,crosshair,pan,wheel_zoom,box_zoom,reset,tap,previewsave,box_select,poly_select,lasso_select"
p=figure(x_axis_type="datetime",tools=TOOLS)
for key in attrList:
print key
p.scatter('t',key, source=source)
p.select(dict(type=HoverTool)).tooltips=[
("Time", "@timeString"),
("Value", "@y1"),
("Value", "@y2"),
]
show(p)
return source
def qphLive(self,attrList, name, timeRange):
# attrList=['stationLoadVolt']
now=datetime.datetime.now()
epoch=datetime.datetime.utcfromtimestamp(0)
tstart=now-datetime.timedelta(days=0,hours=timeRange-self.UTCOffset)
tcut=(tstart-epoch).total_seconds()
dataFrame=self.df[self.df.timestamp>tcut]
dataFrame['t']=dataFrame.timestamp*1000 - self.UTCOffset*3600*1000
output_server(name,url='http://10.0.1.2:5006')
colors=["red","blue","green","orange","purple","black","gray","magenta","cyan","brown","gold","darkkhaki","darksalmon"]
timeString=[datetime.datetime.fromtimestamp(dt).strftime('%Y/%m/%d %H:%M:%S') for dt in dataFrame.timestamp]
source=ColumnDataSource(data=dataFrame.to_dict('list'))
TOOLS="resize,hover,crosshair,pan,wheel_zoom,box_zoom,reset,tap,previewsave,box_select,poly_select,lasso_select"
keyList=attrList.keys()
p={}
ds={}
for mainKey in keyList:
if mainKey == keyList[0]:
p[mainKey]=figure(x_axis_type="datetime",tools=TOOLS, width=600, height=400, title=mainKey)
else:
p[mainKey]=figure(x_axis_type="datetime",tools=TOOLS, width=600, height=400, title=mainKey, x_range=p[keyList[0]].x_range)
ikey=0
hover={}
for key in attrList[mainKey]:
print key
# keySource=ColumnDataSource({'x': source.data['t'], 'y': series.values, 'series_name': name_for_display, 'Date': toy_df.index.format()})
p[mainKey].scatter('t',key, source=source, name=key,fill_color=colors[ikey],line_color=colors[ikey], legend=key)
hover = p[mainKey].select(dict(type=HoverTool))
# hover[ikey].renderers=[source.data[key]]
# hover[ikey].tooltips=tooltips+[("Series",key),("Time","@timeString"), ("Value", "@"+key)]
hover.tooltips=[("Series",key),("Time","@timeString"), ("Value", "@"+key)]
# hover.mode = "mouse"
ikey+=1
p[mainKey].legend.orientation="top_left"
renderer = p[mainKey].select(dict(name=key))
ds[mainKey]=renderer[0].data_source
# allP = vplot(*p.values())
# allP = gridplot([p.values()])
group=lambda flat, size: [flat[i:i+size] for i in range(0,len(flat), size)]
allP = gridplot(group(p.values(),1))
show(allP)
while True:
print 'updating...'
self.db=st.DB('data.sdb','master')
self.db.loadDB('master')
self.df = self.db.df
now=datetime.datetime.now()
tstart=now-datetime.timedelta(days=0,hours=timeRange-self.UTCOffset)
tcut=(tstart-epoch).total_seconds()
dataFrame=self.df[self.df.timestamp>tcut]
dataFrame['t']=dataFrame.timestamp*1000 - self.UTCOffset*3600*1000
for mainKey in keyList:
ds[mainKey].data = dataFrame.to_dict('list')
# print ds.data['stationIRTemp']
cursession().store_objects(ds[mainKey])
time.sleep(30)

Wyświetl plik

@ -1,26 +0,0 @@
import stationDB as st
import numpy as np
import sqlite3
import pandas as pd
from bokeh.plotting import *
from bokeh.models import HoverTool
import time
import datetime
import pytz
db=st.DB('data.sdb','master')
db.loadDB('master')
UTCOffset = -5
dbv=st.DBview(db, db.df, UTCOffset)
attrList={}
attrList['Station Temperature']=['WeatherStationTemperature','WeatherStationHTUTemp','WeatherStationDewpoint']
attrList['Station IR']=['WeatherStationIRTemp','WeatherStationMLXTemp']
attrList['Brightness']=['WeatherStationBrightness']
attrList['Voltage']=['WeatherStationLoad']
attrList['Current']=['WeatherStationCurrent']
attrList['UV']=['WeatherStationUVIndex']
viewHistory=24
dbv.qphLive(attrList, 'DayView', viewHistory)

Wyświetl plik

@ -11,19 +11,24 @@ import datetime
from crypt import *
import numpy as np
from influxdb import InfluxDBClient
class Station:
def __init__(self):
self.name='Rpi'
self.udpPort=8123
self.BUF_SIZE=1024
self.BUF_SIZE=2048
self.packets={}
self.packetMean={}
self.nMeasurements={}
self.lastUpdate={}
self.avgFreq=30
# self.avgFreq=30
self.lastRelease=datetime.datetime.now()
# self.secret_key = '1234567890123456'
influxIP = '127.0.0.1'
self.client = InfluxDBClient(influxIP, 8086, 'admin', 'admin', 'stationDB')
@ -64,52 +69,20 @@ class Station:
print "Message from ", addr, " :", packet
return packet
def checkForPacketRelease(self):
releasePacket=False
now=datetime.datetime.now()
releaseTime=0
if (now-self.lastRelease).seconds > self.avgFreq:
self.packetMean['timestamp']=time.time()
releasePacket=True
releaseTime=float(now.strftime("%s"))
self.lastRelease=now
timeString=now.strftime('%Y/%m/%d %H:%M:%S')
self.packetMean['timeString']=timeString
self.nMeasurements['timeString']=1
# reset packet list
for key in self.packets.keys():
self.packets[key]=[]
return (releasePacket,releaseTime)
def updateMeasurement(self, key, value):
if key not in self.lastUpdate.keys():
self.lastUpdate[key]=datetime.datetime.now()
self.lastUpdate[key]=datetime.datetime.now()
if key not in self.packets.keys():
self.packets[key]=value
self.packets[key]=np.append(self.packets[key],value)
self.packetMean[key] = np.mean(self.packets[key])
self.nMeasurements[key] = len(self.packets[key])
#
db=stationDB.DB('data.sdb','master')
#db=stationDB.DB('/root/sio/data.sdb','master')
s=Station()
s.udpPort=9990
s.startUDPListen('0.0.0.0',s.udpPort)
sio=SocketIO('localhost', 5000)
verbose=False
verbose=True #False
readPackets=True
while readPackets:
rawPacket=s.recvPacket(verbose)
print rawPacket
print 'raw Packet: ',rawPacket
print 'length packet: ', len(rawPacket)
try:
sio.emit('dataPacket', rawPacket)
print 'sent packet'
@ -119,15 +92,14 @@ while readPackets:
# rawPacket['name']
for dataName in rawPacket['data'].keys():
packet = rawPacket['name']
key = rawPacket['name'].replace(' ','') + dataName.replace(' ','')
value = rawPacket['data'][dataName]['value']
releasePacket=s.updateMeasurement(key,value)
releasePacket,releaseTime=s.checkForPacketRelease()
if releasePacket:
for (key,value) in s.packetMean.items():
print 'Updating db: ',key,' = ',value
db.addData(s.packetMean)
s.packetMean={}
print 'Updating db: ',key,' = ',value
now = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S")
json_body = [{"measurement": dataName, "tags": {"host": packet},"time": now,"fields": {"value": value}}]
print json_body
s.client.write_points(json_body)
s.sock.close()

Plik binarny nie jest wyświetlany.

Po

Szerokość:  |  Wysokość:  |  Rozmiar: 379 KiB