This repository has been archived by the owner on Jun 14, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 1
/
core.py
297 lines (263 loc) · 12.3 KB
/
core.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
#
# written by @author ZyzonixDev
# published by ZyzonixDevelopments
# -
# date | Sun Jan 03 2021
# python-v | 3.5.3
# -
# file | core.py
# file-v | 2.0 // updated setupscript and configfile
#
# USING FOLLOWING RESSOURCE(S):
#
# https://zetcode.com/python/ftp/
# https://pythontic.com/ftplib/ftp/nlst
# https://www.thepythoncode.com/article/download-and-upload-files-in-ftp-server-using-python
# https://thispointer.com/compare-get-differences-between-two-lists-in-python/
#
#--------------------------------------
from datetime import date, datetime
from configparser import ConfigParser
import sys
import threading
import smbus
import sqlite3
import ftplib
import traceback
import os
import station.air_stats as ais
import station.air_quality as aiq
import sqldb as dbHandler
import station.sys_stats as sysStats
# SYSTEM SERVICE:
# name: station.service (located in: /lib/systemd/system)
# twinfile in /home/pi/weatherstation/sservice called station.service
# check status: sudo systemctl status station.service
# writing console output to console and logfile
class LogWriter(object):
def __init__(self, *files):
# retrieving files / output locations
self.files = files
def write(self, obj):
# getting files (logfile / console (as file))
for file in self.files:
file.write(obj)
file.flush()
def flush(self):
# flushing written lines/files
for file in self.files:
file.flush()
# data import / measurement / saving
class handleMeasurement(object):
# getting db formatted date/time
def getTime(self):
curTime = "" + str(datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
return curTime
# retrieving data
def retrieveData(self):
# creating/initializing next autorun
threading.Timer(self.MES_TIME, handleMeasurement.retrieveData, [self]).start()
print(self.getTime(), "installed next autorun --> collecting data")
self.PARTICULATE_MATTER_MES = self.PARTICULATE_MATTER_MES + 1
dbConnection = dbHandler.getDBConnection(self)
try:
# getting values
temperature = ais.getTemperature()
# splitting temperature array (see station.air_stats.getTemperature() for further information)
f_temperature = temperature[0]
raw_temperature = temperature[1]
humidity = ais.getHumidity()
pressure = ais.getPressure()
print(self.getTime(), "collected air_stats data successfully: temperature (°C):", f_temperature, ", humidity (%):", humidity,", pressure (hPa):", pressure)
cpu_usage, ram_usage, cpu_temp = sysStats.getSysStats(self)
print(self.getTime(), "collected system data successfully: cpu-usage (%):", cpu_usage, ", ram-usage (%):", ram_usage, ", cpu-temperature (°C):", cpu_temp)
# building sqlite command (previous version resulted in error [str max 3 values])
# SQL changes type from string back to integer when pasting into db
cmdbuilder = "'" + handleMeasurement.getTime(self) + "', "
cmdbuilder += str(f_temperature) + ", "
cmdbuilder += str(raw_temperature) + ", "
cmdbuilder += str(humidity) + ", "
cmdbuilder += str(pressure) + ", "
cmdbuilder += str(cpu_usage) + ", "
cmdbuilder += str(ram_usage) + ", "
cmdbuilder += str(cpu_temp)
# storing data
dbHandler.insertData(self, dbConnection, self.airstable, cmdbuilder)
except:
print(self.getTime(), "something went wrong while collecting airstats data")
traceback.print_exc()
if self.PARTICULATE_MATTER_MES == self.AIQ_TIME:
try:
# import class (seperate self statement required)
airquality = aiq.AIR_QUALITY()
pm025, pm100 = airquality.query()
print(self.getTime(), "collected air_quality data successfully: pm2.5:", pm025, "pm10:", pm100)
cmdbuilder = "'" + handleMeasurement.getTime(self) + "', "
cmdbuilder += str(pm025) + ", "
cmdbuilder += str(pm100)
# storing data
dbHandler.insertData(self, dbConnection, self.airqtable, cmdbuilder)
except:
print(self.getTime(), "something went wrong while collecting airquality data")
traceback.print_exc()
self.PARTICULATE_MATTER_MES = 0
dbHandler.closeDBConnection(self, dbConnection)
#print("\n")
# syncing db files to smb share
class dbUpload(object):
# syncing missing files
def syncMissing(self, mfileList, ftpConnection):
leftToSync = mfileList
try:
for sfile in leftToSync:
# checking for daily file
if sfile == str(date.today()) + ".db":
print(self.getTime(), "syncing daily db to daily-folder")
leftToSync.remove(sfile)
ftpConnection.cwd("daily/")
try:
for file in ftpConnection.nlst():
ftpConnection.delete(file)
except:
pass
uploadCMD = "STOR " + sfile
file = open(self.baseFilePath + "db/" + sfile, "rb")
# uploading to ftp share
ftpConnection.storbinary(uploadCMD, file)
ftpConnection.cwd("..")
else:
print(self.getTime(), "syncing the following database to ftp share: " + sfile)
leftToSync.remove(sfile)
try:
ftpConnection.delete(sfile)
except:
pass
uploadCMD = "STOR " + sfile
file = open(self.baseFilePath + "db/" + sfile, "rb")
# uploading to ftp share
ftpConnection.storbinary(uploadCMD, file)
# checking if all databases have been synced, if not --> rerun (throwed an error before [some db's were left out], reason unknown - this method prevents this failure)
if len(leftToSync) != 0:
self.syncFinished = False
dbUpload.syncMissing(self, leftToSync, ftpConnection)
return
else:
self.syncFinished = True
# confirming that sync is finished
print(self.getTime(), "syncing successful - all databases have been synced\n")
except:
print(self.getTime(), "something went wrong - was not able to sync missing databases\n")
self.syncFinished = False
traceback.print_exc()
self.syncFinished = True
ftpConnection.quit()
# checking if ftp db storage equals local db storage
def checkContent(self, rFileList, ftpConnection):
# retrieving local db storage + filesizes
localFileList = []
localFileSizeList = {}
for file in os.listdir(self.baseFilePath + "db/"):
localFileList.append(file)
localFileSizeList[file] = os.path.getsize(self.baseFilePath + "db/" + file)
# retrieving remote db storage + filesizes
remoteFileList = rFileList
remoteFileSizeList = {}
ftpConnection.voidcmd('TYPE I')
for file in remoteFileList:
try:
remoteFileSizeList[file] = ftpConnection.size(file)
# preventing error --> no size of daily-folder
except:
pass
# file sync list
toSync = []
# comparing filesizes on local and remote storage
for file in localFileSizeList.keys():
# case 1: file doesn't exist on ftp share
if not file in remoteFileSizeList.keys():
toSync.append(file)
# case 2: file exists in both storages
elif not remoteFileSizeList[file] == localFileSizeList[file]:
toSync.append(file)
# returns filelist
return toSync
# getting available files
def getFTPContent(self, ftpConnection):
# changing directory to specified db storage
ftpConnection.cwd(self.FTPshareLoc)
# retrieving available files
existingFiles = []
for filename in ftpConnection.nlst():
existingFiles.append(filename)
print(self.getTime(), "retrieved existing files of '" + self.FTPshareLoc + "' (number of existing files: " + str(len(existingFiles)) + ")")
return existingFiles
#dbUpload.checkContent(self, existingFiles, ftpConnection)
# initializes a ftp connection
def getFTPConnection(self):
if self.syncFinished == False:
print(self.getTime(), "last syncing process hasn't been finished yet\n")
return False, 0
try:
ftpConnection = ftplib.FTP(self.FTPServerIP)
# logs in into the ftp share
ftpConnection.login(self.FTPuname, self.FTPpwd)
print(self.getTime(), "ftp connection successful, welcomemsg: " + ftpConnection.getwelcome())
# starting sycing process
self.syncFinished = False
return True, ftpConnection
except:
print(self.getTime(), "something went wrong - was not able to initialize a ftp connection to: " + self.FTPServerIP + " - server may be offline\n")
return False, 0
# prints stack trace
#traceback.print_exc()
# validates all databases on ftp shares (preventing corrupted dbs)
# sync handler
def handleSync(self):
threading.Timer(self.SYNC_TIME, dbUpload.handleSync, [self]).start()
print(self.getTime(), "scheduled next autorun for syncing databases to ftp share")
connectionEstablished, ftpConnection = dbUpload.getFTPConnection(self)
if connectionEstablished:
existingFiles = dbUpload.getFTPContent(self, ftpConnection)
leftToSync = dbUpload.checkContent(self, existingFiles, ftpConnection)
dbUpload.syncMissing(self, leftToSync, ftpConnection)
# core class - sensor setup, data controlling, database intialization/setup, threading setup
class Core(object):
# console time service
def getTime(self):
curTime = "[" + str(datetime.now().strftime("%H:%M:%S")) + "]"
return curTime
# writing console to log
def writeLog(self):
self.logFile = open(self.baseFilePath + "logs/" + str(date.today()) + "_" + str(datetime.now().strftime("%H-%M-%S")) + "_log.txt", "w")
sys.stdout
sys.stdout = LogWriter(sys.stdout, self.logFile)
def __init__(self):
# saving ressources global (like public) // retrieving missing from config file
configImport = ConfigParser(comment_prefixes='/', allow_no_value=True)
# os.getcwd() returns execution directory
configImport.read(os.getcwd() + "/setup/config.ini")
# importing config data
self.baseFilePath = configImport["DATABASE"]["baseFilePath"]
self.sync_enabled = configImport["DATABASE"].getboolean("sync_enabled")
self.FTPServerIP = configImport["DATABASE"]["FTPServerIP"]
self.FTPshareLoc = configImport["DATABASE"]["FTPShareLoc"]
self.MES_TIME = int(configImport["CONFIGURATION"]["seconds"])
self.SYNC_TIME = int(configImport["CONFIGURATION"]["sync_time"])
self.AIQ_TIME = int(configImport["CONFIGURATION"]["air_quality_time"])
self.FTPuname = configImport["DATABASE"]["uname"]
self.FTPpwd = configImport["DATABASE"]["pwd"]
self.syncFinished = True
# initializing log
self.writeLog()
# values for protection of the particulate matter sensor (meassurement only every 2min)
self.PARTICULATE_MATTER_MES = 0
print("\n" + self.getTime(), "running core application \n")
# running measurement
handleMeasurement.retrieveData(self)
if self.sync_enabled:
# initializing db sync with 5 seconds delay (prevents corrupted databases through double access) if enabled
threading.Timer(5, dbUpload.handleSync, [self]).start()
else:
print(self.getTime(), "server syncronisation disabled - storing databases on local storage")
if __name__ == '__main__':
Core()