Imports were a bit optimized
This commit is contained in:
@@ -1,12 +1,16 @@
|
||||
import os
|
||||
import gzip
|
||||
import json
|
||||
import shutil
|
||||
try:
|
||||
from ujson import loads
|
||||
except ModuleNotFoundError:
|
||||
from json import loads
|
||||
|
||||
from os import stat, makedirs, path, getcwd
|
||||
from gzip import open as gzipopen
|
||||
from shutil import copyfileobj
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
with open(os.getcwd()+os.path.sep+"config.json", "r", encoding='utf8') as file:
|
||||
json_contents = json.loads(file.read())
|
||||
with open(getcwd()+path.sep+"config.json", "r", encoding='utf8') as file:
|
||||
json_contents = loads(file.read())
|
||||
log_size = json_contents["logging"]["size"]
|
||||
log_folder = json_contents["logging"]["location"]
|
||||
file.close()
|
||||
@@ -22,16 +26,16 @@ def checkSize(debug=False):
|
||||
log_file = "latest.log"
|
||||
|
||||
try:
|
||||
os.makedirs(log_folder, exist_ok=True)
|
||||
log = os.stat(os.path.join(log_folder, log_file))
|
||||
makedirs(log_folder, exist_ok=True)
|
||||
log = stat(path.join(log_folder, log_file))
|
||||
if (log.st_size / 1024) > log_size:
|
||||
with open(os.path.join(log_folder, log_file), 'rb') as f_in:
|
||||
with gzip.open(os.path.join(log_folder, f'{datetime.now().strftime("%d.%m.%Y_%H:%M:%S")}.log.gz'), 'wb') as f_out:
|
||||
shutil.copyfileobj(f_in, f_out)
|
||||
print(f'Copied {os.path.join(log_folder, datetime.now().strftime("%d.%m.%Y_%H:%M:%S"))}.log.gz')
|
||||
open(os.path.join(log_folder, log_file), 'w').close()
|
||||
with open(path.join(log_folder, log_file), 'rb') as f_in:
|
||||
with gzipopen(path.join(log_folder, f'{datetime.now().strftime("%d.%m.%Y_%H:%M:%S")}.log.gz'), 'wb') as f_out:
|
||||
copyfileobj(f_in, f_out)
|
||||
print(f'Copied {path.join(log_folder, datetime.now().strftime("%d.%m.%Y_%H:%M:%S"))}.log.gz')
|
||||
open(path.join(log_folder, log_file), 'w').close()
|
||||
except FileNotFoundError:
|
||||
print(f'Log file {os.path.join(log_folder, log_file)} does not exist')
|
||||
print(f'Log file {path.join(log_folder, log_file)} does not exist')
|
||||
pass
|
||||
|
||||
# Append string to log
|
||||
@@ -47,7 +51,7 @@ def logAppend(message, debug=False):
|
||||
else:
|
||||
log_file = "latest.log"
|
||||
|
||||
log = open(os.path.join(log_folder, log_file), 'a')
|
||||
log = open(path.join(log_folder, log_file), 'a')
|
||||
log.write(f'{message_formatted}\n')
|
||||
log.close()
|
||||
|
||||
|
@@ -1,9 +1,9 @@
|
||||
try:
|
||||
import ujson as json
|
||||
from ujson import JSONDecodeError as JSONDecodeError
|
||||
from ujson import loads, dumps
|
||||
except ModuleNotFoundError:
|
||||
import json
|
||||
from json import JSONDecodeError as JSONDecodeError
|
||||
from json import loads, dumps
|
||||
|
||||
import os
|
||||
import sys
|
||||
@@ -16,7 +16,7 @@ def jsonLoad(filename):
|
||||
"""Loads arg1 as json and returns its contents"""
|
||||
with open(filename, "r", encoding='utf8') as file:
|
||||
try:
|
||||
output = json.loads(file.read())
|
||||
output = loads(file.read())
|
||||
except JSONDecodeError:
|
||||
logWrite(f"Could not load json file {filename}: file seems to be incorrect!\n{traceback.print_exc()}")
|
||||
raise
|
||||
@@ -30,7 +30,7 @@ def jsonSave(contents, filename):
|
||||
"""Dumps dict/list arg1 to file arg2"""
|
||||
try:
|
||||
with open(filename, "w", encoding='utf8') as file:
|
||||
file.write(json.dumps(contents, ensure_ascii=False, indent=4))
|
||||
file.write(dumps(contents, ensure_ascii=False, indent=4))
|
||||
file.close()
|
||||
except Exception as exp:
|
||||
logWrite(f"Could not save json file {filename}: {exp}\n{traceback.print_exc()}")
|
||||
|
Reference in New Issue
Block a user