First commit
							parent
							
								
									d894c3db2a
								
							
						
					
					
						commit
						34c17fe452
					
				| 
						 | 
				
			
			@ -129,3 +129,4 @@ dmypy.json
 | 
			
		|||
# Pyre type checker
 | 
			
		||||
.pyre/
 | 
			
		||||
 | 
			
		||||
.idea/
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,3 @@
 | 
			
		|||
database_migrate: False
 | 
			
		||||
output_image_path: /var/www/downloads/speedgraph.png
 | 
			
		||||
output_txt_path: /var/www/downloads/speeds.txt
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,31 @@
 | 
			
		|||
version: 1
 | 
			
		||||
disable_existing_loggers: False
 | 
			
		||||
formatters:
 | 
			
		||||
    simple:
 | 
			
		||||
        format: "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
 | 
			
		||||
 | 
			
		||||
handlers:
 | 
			
		||||
    console:
 | 
			
		||||
        class: logging.StreamHandler
 | 
			
		||||
        level: DEBUG
 | 
			
		||||
        formatter: simple
 | 
			
		||||
        stream: ext://sys.stdout
 | 
			
		||||
 | 
			
		||||
    debug_file_handler:
 | 
			
		||||
        class: logging.handlers.RotatingFileHandler
 | 
			
		||||
        level: DEBUG
 | 
			
		||||
        formatter: simple
 | 
			
		||||
        filename: dashboard.log
 | 
			
		||||
        maxBytes: 5000000 # 5MB
 | 
			
		||||
        backupCount: 0
 | 
			
		||||
        encoding: utf8
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
root:
 | 
			
		||||
    level: ERROR
 | 
			
		||||
    handlers: [debug_file_handler]
 | 
			
		||||
 | 
			
		||||
loggers:
 | 
			
		||||
  "default":
 | 
			
		||||
    level: DEBUG
 | 
			
		||||
    handlers: [debug_file_handler, console]
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,13 @@
 | 
			
		|||
import os
 | 
			
		||||
import yaml
 | 
			
		||||
 | 
			
		||||
from paths import CONFIG_DIR
 | 
			
		||||
 | 
			
		||||
def read_config(name="config"):
 | 
			
		||||
    with open(os.path.join(CONFIG_DIR, name+".yaml"), "r") as f:
 | 
			
		||||
        data = yaml.load(f.read())
 | 
			
		||||
    return data
 | 
			
		||||
 | 
			
		||||
def write_config(data, name="config"):
 | 
			
		||||
    with open(os.path.join(CONFIG_DIR, name+".yaml"), "w+") as f:
 | 
			
		||||
        f.write(yaml.dump(data, default_flow_style=False))
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,97 @@
 | 
			
		|||
# region ############################# IMPORTS #############################
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
from debug import setup_logging
 | 
			
		||||
log = logging.getLogger("default")
 | 
			
		||||
setup_logging()
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
from datetime import datetime
 | 
			
		||||
from peewee import *
 | 
			
		||||
from playhouse.sqlite_ext import SqliteExtDatabase#, FTS5Model, SearchField
 | 
			
		||||
from configuration import read_config, write_config
 | 
			
		||||
from paths import DATA_DIR
 | 
			
		||||
 | 
			
		||||
# endregion
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# region ############################# GLOBALS #############################
 | 
			
		||||
realpath = os.path.dirname(os.path.realpath(__file__))
 | 
			
		||||
rp = realpath
 | 
			
		||||
 | 
			
		||||
db_path = os.path.join(DATA_DIR, 'database.db')
 | 
			
		||||
pragmas = [
 | 
			
		||||
    ('journal_mode', 'wal'),
 | 
			
		||||
    ('cache_size', -1000 * 32)]
 | 
			
		||||
db = SqliteExtDatabase(db_path, pragmas=pragmas)
 | 
			
		||||
 | 
			
		||||
# endregion
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# region ############################# TABLE CLASSES #############################
 | 
			
		||||
 | 
			
		||||
class BroModel(Model):
 | 
			
		||||
    date_created = DateTimeField(default=datetime.now())
 | 
			
		||||
    date_updated = DateTimeField(default=datetime.now())
 | 
			
		||||
    date_deleted = DateTimeField(null=True)
 | 
			
		||||
    deleted = BooleanField(default=False)
 | 
			
		||||
 | 
			
		||||
    def mark_deleted(self):
 | 
			
		||||
        self.deleted = True
 | 
			
		||||
        self.date_deleted = datetime.now()
 | 
			
		||||
        self.save()
 | 
			
		||||
 | 
			
		||||
class Entry(BroModel):
 | 
			
		||||
    upload = FloatField()
 | 
			
		||||
    download = FloatField()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    class Meta:
 | 
			
		||||
        database = db
 | 
			
		||||
 | 
			
		||||
    def create(self, **query):
 | 
			
		||||
        ret = super(Entry, self).create(**query)
 | 
			
		||||
        return ret
 | 
			
		||||
 | 
			
		||||
    def save(self, *args, **kwargs):
 | 
			
		||||
        self.date_updated = datetime.now()
 | 
			
		||||
        ret = super(Entry, self).save(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
        return ret
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# region Migration
 | 
			
		||||
config = read_config()
 | 
			
		||||
if config['database_migrate']:
 | 
			
		||||
    log.debug("=====================")
 | 
			
		||||
    log.debug("Migration stuff...")
 | 
			
		||||
    try:
 | 
			
		||||
        from playhouse.migrate import *
 | 
			
		||||
 | 
			
		||||
        migrator = SqliteMigrator(db)
 | 
			
		||||
 | 
			
		||||
        open_count = IntegerField(default=0)
 | 
			
		||||
 | 
			
		||||
        migrate(
 | 
			
		||||
            migrator.add_column('Entry', 'open_count', open_count)
 | 
			
		||||
        )
 | 
			
		||||
        log.debug("Migration success")
 | 
			
		||||
        log.debug("=====================")
 | 
			
		||||
 | 
			
		||||
        config['database_migrate'] = False
 | 
			
		||||
        write_config(config)
 | 
			
		||||
    except:
 | 
			
		||||
        log.error("Could not migrate", exc_info=True)
 | 
			
		||||
        log.debug("=====================")
 | 
			
		||||
# endregion
 | 
			
		||||
 | 
			
		||||
log.info(" ".join(["Using DB", str(db), "At path:", str(db_path)]))
 | 
			
		||||
 | 
			
		||||
# On init make sure we create database
 | 
			
		||||
 | 
			
		||||
db.connect()
 | 
			
		||||
db.create_tables([Entry])
 | 
			
		||||
 | 
			
		||||
# endregion
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,95 @@
 | 
			
		|||
'''
 | 
			
		||||
This file contains debugging stuff, like logger configuration, error wrap functions and the like.
 | 
			
		||||
'''
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import traceback
 | 
			
		||||
import logging
 | 
			
		||||
import logging.config
 | 
			
		||||
import yaml
 | 
			
		||||
from flask import Response, jsonify, render_template
 | 
			
		||||
import functools
 | 
			
		||||
 | 
			
		||||
basedir = os.path.dirname(os.path.realpath(__file__))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def setup_logging(
 | 
			
		||||
        default_path=os.path.join(basedir, 'config', 'logger.yaml'),
 | 
			
		||||
        default_level=logging.INFO,
 | 
			
		||||
        env_key='LOG_CFG',
 | 
			
		||||
        logname=None
 | 
			
		||||
):
 | 
			
		||||
    """Setup logging configuration
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    path = default_path
 | 
			
		||||
    value = os.getenv(env_key, None)
 | 
			
		||||
    if value:
 | 
			
		||||
        path = value
 | 
			
		||||
    if os.path.exists(path):
 | 
			
		||||
        with open(path, 'rt') as f:
 | 
			
		||||
            config = yaml.safe_load(f.read())
 | 
			
		||||
 | 
			
		||||
        logpath = os.path.join(basedir, config['handlers']['debug_file_handler']['filename'])
 | 
			
		||||
        print("Set log path to", logpath)
 | 
			
		||||
        config['handlers']['debug_file_handler']['filename'] = logpath
 | 
			
		||||
 | 
			
		||||
        logging.config.dictConfig(config)
 | 
			
		||||
    else:
 | 
			
		||||
        logging.basicConfig(level=default_level)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def catch_errors_json(f):
 | 
			
		||||
    @functools.wraps(f)
 | 
			
		||||
    def wrapped(*args, **kwargs):
 | 
			
		||||
        try:
 | 
			
		||||
            return f(*args, **kwargs)
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
            traceback.print_exc()
 | 
			
		||||
            return jsonify({"error": str(e), "traceback": traceback.format_exc()})
 | 
			
		||||
 | 
			
		||||
    return wrapped
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
loggers = {}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_logger(name):
 | 
			
		||||
    global loggers
 | 
			
		||||
 | 
			
		||||
    if loggers.get(name):
 | 
			
		||||
        # print (f"Logger {name} exists, reuse.")
 | 
			
		||||
        return loggers.get(name)
 | 
			
		||||
    else:
 | 
			
		||||
        logger = logging.getLogger(name)
 | 
			
		||||
        loggers[name] = logger
 | 
			
		||||
        setup_logging()
 | 
			
		||||
        return logger
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = logger = get_logger("default")
 | 
			
		||||
 | 
			
		||||
def catch_errors_json(f):
 | 
			
		||||
    @functools.wraps(f)
 | 
			
		||||
    def wrapped(*args, **kwargs):
 | 
			
		||||
        try:
 | 
			
		||||
            return f(*args, **kwargs)
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
            traceback.print_exc()
 | 
			
		||||
            log.error(traceback.format_exc())
 | 
			
		||||
            return jsonify({"error": str(e), "traceback": traceback.format_exc()})
 | 
			
		||||
 | 
			
		||||
    return wrapped
 | 
			
		||||
 | 
			
		||||
def catch_errors_html(f):
 | 
			
		||||
    @functools.wraps(f)
 | 
			
		||||
    def wrapped(*args, **kwargs):
 | 
			
		||||
        try:
 | 
			
		||||
            return f(*args, **kwargs)
 | 
			
		||||
        except Exception as e:
 | 
			
		||||
            traceback.print_exc()
 | 
			
		||||
            log.error(traceback.format_exc())
 | 
			
		||||
            return render_template("error.html", error=str(e), error_trace=traceback.format_exc())
 | 
			
		||||
 | 
			
		||||
    return wrapped
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,61 @@
 | 
			
		|||
'''
 | 
			
		||||
Lock system, can create, check and manage file locks.
 | 
			
		||||
Can be used with, for example, cron job scripts to check if another script is already running, or
 | 
			
		||||
for whatever you can think of.
 | 
			
		||||
'''
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
from paths import APP_DIR
 | 
			
		||||
 | 
			
		||||
# region Logger
 | 
			
		||||
import logging
 | 
			
		||||
from debug import setup_logging
 | 
			
		||||
 | 
			
		||||
log = logger = logging.getLogger("ark_dashboard")
 | 
			
		||||
setup_logging()
 | 
			
		||||
# endregion
 | 
			
		||||
 | 
			
		||||
class Lock(object):
 | 
			
		||||
    def __init__(self, name="general"):
 | 
			
		||||
        self.name = name
 | 
			
		||||
        self.filepath = os.path.join(APP_DIR, f"{name}.lock")
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def locked(self):
 | 
			
		||||
        return os.path.exists(self.filepath)
 | 
			
		||||
 | 
			
		||||
    is_locked = locked
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def message(self):
 | 
			
		||||
        if self.locked:
 | 
			
		||||
            with open(self.filepath, "r") as f:
 | 
			
		||||
                return f.read()
 | 
			
		||||
        else:
 | 
			
		||||
            log.warning(f"Lock {self.name} does not exist.")
 | 
			
		||||
 | 
			
		||||
    @message.setter
 | 
			
		||||
    def message(self, value):
 | 
			
		||||
        if self.locked:
 | 
			
		||||
            with open(self.filepath, "w") as f:
 | 
			
		||||
                f.write(value)
 | 
			
		||||
        else:
 | 
			
		||||
            log.warning(f"Lock {self.name} does not exist.")
 | 
			
		||||
 | 
			
		||||
    def lock(self, message=""):
 | 
			
		||||
        with open(self.filepath, "w+") as f:
 | 
			
		||||
            f.write(message)
 | 
			
		||||
 | 
			
		||||
    def unlock(self):
 | 
			
		||||
        if self.locked:
 | 
			
		||||
            os.remove(self.filepath)
 | 
			
		||||
        else:
 | 
			
		||||
            log.debug(f"Lock {self.name} is already unlocked.")
 | 
			
		||||
 | 
			
		||||
def get_locks():
 | 
			
		||||
    locks = []
 | 
			
		||||
    for filename in os.listdir(APP_DIR):
 | 
			
		||||
        name, ext = os.path.splitext(filename)
 | 
			
		||||
        if ext == ".lock":
 | 
			
		||||
            locks.append(Lock(name))
 | 
			
		||||
    return locks
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,11 @@
 | 
			
		|||
'''
 | 
			
		||||
Configuration file that holds static and dynamically generated paths, like path to your current app directory.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
 | 
			
		||||
# APP_DIR will point to the parent directory of paths.py file
 | 
			
		||||
APP_DIR = os.path.dirname(os.path.realpath(__file__))
 | 
			
		||||
CONFIG_DIR = os.path.join(APP_DIR, "config")
 | 
			
		||||
DATA_DIR = os.path.join(APP_DIR, "data")
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,4 @@
 | 
			
		|||
peewee
 | 
			
		||||
speedtest-cli
 | 
			
		||||
numpy
 | 
			
		||||
matplotlib
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,98 @@
 | 
			
		|||
from configuration import read_config
 | 
			
		||||
# region Logger
 | 
			
		||||
import logging
 | 
			
		||||
from debug import setup_logging
 | 
			
		||||
 | 
			
		||||
log = logger = logging.getLogger("default")
 | 
			
		||||
setup_logging()
 | 
			
		||||
# endregion
 | 
			
		||||
 | 
			
		||||
from dbo import Entry
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def gather_data():
 | 
			
		||||
    log.debug("Gathering data...")
 | 
			
		||||
    downloads = []
 | 
			
		||||
    uploads = []
 | 
			
		||||
    dates = []
 | 
			
		||||
    for entry in Entry.select():
 | 
			
		||||
        downloads.append(entry.download)
 | 
			
		||||
        uploads.append(entry.upload)
 | 
			
		||||
        dates.append(entry.date_created)
 | 
			
		||||
 | 
			
		||||
    return dates, downloads, uploads
 | 
			
		||||
 | 
			
		||||
def generate_plot_image(dates, downloads, uploads):
 | 
			
		||||
    log.debug("Genering image output...")
 | 
			
		||||
    import matplotlib
 | 
			
		||||
    import matplotlib.pyplot as plt
 | 
			
		||||
 | 
			
		||||
    dates = matplotlib.dates.date2num(dates)
 | 
			
		||||
    plt.plot_date(dates, downloads, fmt="b-")
 | 
			
		||||
    plt.ylabel('Download Speed Mbps')
 | 
			
		||||
    plt.savefig(read_config()['output_image_path'])
 | 
			
		||||
 | 
			
		||||
def generate_txt_output(dates, downloads, uploads):
 | 
			
		||||
    log.debug("Genering txt output...")
 | 
			
		||||
    txt = "Date:			Down;			Up;\n"
 | 
			
		||||
    for i, date in enumerate(dates):
 | 
			
		||||
        download = downloads[i]
 | 
			
		||||
        upload = uploads[i]
 | 
			
		||||
 | 
			
		||||
        txt += f"{date}:			{download} Mbps;			{upload} Mbps\n"
 | 
			
		||||
    with open(read_config()['output_txt_path'], "w+") as f:
 | 
			
		||||
        f.write(txt)
 | 
			
		||||
 | 
			
		||||
if __name__ == "__main__":
 | 
			
		||||
    '''
 | 
			
		||||
    This script will run a few speed tests, calculate average upload and download speeds and record them into database.
 | 
			
		||||
    Once finished it will also generate an image with graph plotted.
 | 
			
		||||
    '''
 | 
			
		||||
    from random import uniform
 | 
			
		||||
    try:
 | 
			
		||||
        import speedtest
 | 
			
		||||
 | 
			
		||||
        servers = []
 | 
			
		||||
        threads = None
 | 
			
		||||
 | 
			
		||||
        log.debug("Initializing speedtest...")
 | 
			
		||||
        # s = speedtest.Speedtest()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
        log.debug(f"Running test...")
 | 
			
		||||
        # s.get_servers(servers)
 | 
			
		||||
        # s.get_best_server()
 | 
			
		||||
        # s.download(threads=threads)
 | 
			
		||||
        # s.upload(threads=threads, pre_allocate=False)
 | 
			
		||||
        #
 | 
			
		||||
        # results_dict = s.results.dict()
 | 
			
		||||
        # download = round(results_dict['download']/1000000, 2)
 | 
			
		||||
        # upload = round(results_dict['upload']/1000000, 2)
 | 
			
		||||
        download = uniform(0,2)
 | 
			
		||||
        upload = uniform(0,2)
 | 
			
		||||
 | 
			
		||||
        log.debug(f"{download}mbps, {upload}mbps")
 | 
			
		||||
 | 
			
		||||
        entry = Entry()
 | 
			
		||||
        entry.upload = upload
 | 
			
		||||
        entry.download = download
 | 
			
		||||
        entry.save()
 | 
			
		||||
    except:
 | 
			
		||||
        log.error("Data record error.", exc_info=True)
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        dates, downloads, uploads = gather_data()
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            generate_txt_output(dates, downloads, uploads)
 | 
			
		||||
        except:
 | 
			
		||||
            log.error("Unable to save text file.", exc_info=True)
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            generate_plot_image(dates, downloads, uploads)
 | 
			
		||||
        except:
 | 
			
		||||
            log.error("Unable to save plot file.", exc_info=True)
 | 
			
		||||
 | 
			
		||||
    except:
 | 
			
		||||
        log.error("Error plotting.", exc_info=True)
 | 
			
		||||
 | 
			
		||||
		Loading…
	
		Reference in New Issue