Fixed some issues, store dates in utc.

This commit is contained in:
Tiberiu Chibici 2020-04-18 23:52:27 +03:00
parent 6230d8392c
commit 4c010f2174
11 changed files with 48 additions and 54 deletions

View File

@ -1,17 +1,20 @@
import logging
import signal
from datetime import datetime, timedelta
from threading import Event
from apscheduler.schedulers.blocking import BlockingScheduler
import config
import database
import signal
from apscheduler.schedulers.blocking import BlockingScheduler
from threading import Event
from plugins.system.cpu_plugin import CpuPlugin
from plugins.system.memory_plugin import MemoryPlugin
from plugins.system.disk_plugin import DiskIOPlugin, DiskUsagePlugin
from plugins.system.network_plugin import NetworkPlugin
from plugins.system.temperatures_plugin import TemperaturesPlugin
from plugins.system.ping_plugin import PingPlugin
from plugins.finance.stocks_plugin import StocksPlugin
from plugins.finance.robor_plugin import RoborPlugin
import logging
from plugins.finance.stocks_plugin import StocksPlugin
from plugins.system.cpu_plugin import CpuPlugin
from plugins.system.disk_plugin import DiskIOPlugin, DiskUsagePlugin
from plugins.system.memory_plugin import MemoryPlugin
from plugins.system.network_plugin import NetworkPlugin
from plugins.system.ping_plugin import PingPlugin
from plugins.system.temperatures_plugin import TemperaturesPlugin
class Collector(object):
@ -41,8 +44,11 @@ class Collector(object):
return models
def schedule_plugins(self):
start_date = datetime.now() + timedelta(seconds=10)
for plugin in self.plugins:
self.scheduler.add_job(plugin.execute, 'interval', seconds=plugin.get_interval())
self.scheduler.add_job(plugin.execute, 'interval',
seconds=plugin.get_interval(),
start_date=start_date)
def run(self):
logging.basicConfig()
@ -53,22 +59,18 @@ class Collector(object):
database.DB.create_tables(models)
self.schedule_plugins()
# signal.signal(signal.SIGHUP, self.abort)
# signal.signal(signal.SIGTERM, self.abort)
# signal.signal(signal.SIGINT, self.abort)
print(f'Started.')
logging.info('Started.')
try:
self.scheduler.start()
except (KeyboardInterrupt, SystemExit):
pass
print(f'Stopped.')
def abort(self, signum, frame):
print(f'Received signal {signum}, aborting...')
self.event.set()
logging.info(f'Stopped.')
if __name__ == "__main__":
try:
Collector().run()
except BaseException as ex:
logging.critical("Unhandled exception.", exc_info=ex)

View File

@ -19,7 +19,7 @@ DEFAULT_INTERVAL = 30
# will create a PooledMySQLDatabase instance for the local MySQL database my_db with max_connections set to 20 and a
# stale_timeout setting of 300 seconds.
DATABASE_URL = 'sqlite:///data.db'
DATABASE_URL = 'postgresql://system_metrics_collector:theMetrixWriteer2123@localhost:5432/system_metrics'
# Plugin configuration
@ -55,7 +55,8 @@ PING_HOSTS = [
'192.168.0.1',
'1.1.1.1',
'google.com',
'bing.com'
'bing.com',
'tibich.com'
]
### Stocks

View File

@ -11,7 +11,7 @@ from plugins.plugin import Plugin
class Robor(BaseModel):
date = DateField(index=True, default=datetime.date.today(), null=False)
date = DateField(index=True, default=datetime.date.today, null=False)
field = TextField(null=False)
value = FloatField(null=False)

View File

@ -10,7 +10,7 @@ import yfinance as yf
class Stocks(BaseModel):
date = DateTimeField(index=True, default=datetime.datetime.now(), null=False)
date = DateTimeField(index=True, default=datetime.datetime.utcnow, null=False)
ticker = TextField(null=False)
label = TextField(null=False)
value_open = FloatField(null=False)
@ -29,6 +29,7 @@ class StocksPlugin(Plugin):
for ticker, label in config.STOCKS_TICKERS.items():
# Get last existing date
latest_date = Stocks.select(Stocks.date) \
.where(Stocks.ticker == ticker) \
.order_by(Stocks.date.desc()) \
.limit(1) \
.scalar()
@ -51,6 +52,6 @@ class StocksPlugin(Plugin):
entry.value_high = row.High
entry.value_low = row.Low
entry.save()
print(model_to_dict(entry))
except BaseException as e:
print(e)

View File

@ -10,7 +10,7 @@ from plugins.plugin import Plugin
class Cpu(BaseModel):
time = DateTimeField(index=True, default=datetime.now)
time = DateTimeField(index=True, default=datetime.utcnow)
cpu = SmallIntegerField(null=True)
idle_pct = FloatField(null=False)
user_pct = FloatField(null=False)

View File

@ -11,7 +11,7 @@ from plugins.plugin import Plugin
class DiskUsage(BaseModel):
time = DateTimeField(index=True, default=datetime.now)
time = DateTimeField(index=True, default=datetime.utcnow)
partition = TextField(null=False)
mountpoint = TextField(null=False)
total = BigIntegerField(null=False)
@ -20,7 +20,7 @@ class DiskUsage(BaseModel):
class DiskIO(BaseModel):
time = DateTimeField(index=True, default=datetime.now)
time = DateTimeField(index=True, default=datetime.utcnow)
disk = TextField(null=True)
read_count = FloatField(null=False) # all values are per second
write_count = FloatField(null=False)

View File

@ -9,7 +9,7 @@ from plugins.plugin import Plugin
class Memory(BaseModel):
time = DateTimeField(index=True, default=datetime.now)
time = DateTimeField(index=True, default=datetime.utcnow)
total = BigIntegerField(null=False)
available = BigIntegerField(null=False)
used = BigIntegerField(null=False)

View File

@ -11,7 +11,7 @@ from plugins.plugin import Plugin
class NetworkIO(BaseModel):
time = DateTimeField(index=True, default=datetime.now)
time = DateTimeField(index=True, default=datetime.utcnow)
nic = TextField(null=True)
packets_sent = FloatField(null=False) # all values are per second
packets_recv = FloatField(null=False)

View File

@ -1,4 +1,4 @@
import asyncio
import subprocess
import re
import subprocess
from datetime import datetime
@ -13,7 +13,7 @@ from plugins.plugin import Plugin
class Ping(BaseModel):
time = DateTimeField(index=True, default=datetime.now)
time = DateTimeField(index=True, default=datetime.utcnow)
host = TextField(null=False)
ping = FloatField(null=True) # null = timeout or error
@ -22,18 +22,15 @@ class PingPlugin(Plugin):
models = [Ping]
def __init__(self):
self.__timeout = config.PING_INTERVAL // 3
self.__timeout = 20
def get_interval(self):
return config.PING_INTERVAL
async def do_ping(self, host):
def do_ping(self, host):
command = ['ping', '-c', '1', '-W', str(self.__timeout), host]
proc = await asyncio.create_subprocess_shell(' '.join(command),
stdout=asyncio.subprocess.PIPE)
stdout,_ = await proc.communicate()
stdout = stdout.decode()
proc = subprocess.run(command, stdout=subprocess.PIPE)
stdout = proc.stdout.decode()
entry = Ping()
entry.host = host
@ -45,16 +42,6 @@ class PingPlugin(Plugin):
entry.save()
async def execute_internal(self):
await asyncio.gather(*[self.do_ping(host) for host in config.PING_HOSTS])
def execute(self):
if getattr(asyncio, 'run', None) is not None:
# Python 3.7+
asyncio.run(self.execute_internal())
else:
loop = asyncio.get_event_loop()
loop.run_until_complete(self.execute_internal())
loop.close()
for host in config.PING_HOSTS:
self.do_ping(host)

View File

@ -11,7 +11,7 @@ from plugins.plugin import Plugin
class Temperatures(BaseModel):
time = DateTimeField(index=True, default=datetime.now)
time = DateTimeField(index=True, default=datetime.utcnow)
sensor = TextField(null=False)
sensor_label = TextField(null=False)
current = FloatField(null=False) # all values are per second

View File

@ -1,6 +1,9 @@
apscheduler
peewee
# used for databases
psycopg2-binary
# Used in most system plugins
psutil