Add files via upload
This commit is contained in:
parent
1d9b104e0d
commit
bcb01a2673
9
windowsAgent/source/biblio.ini
Normal file
9
windowsAgent/source/biblio.ini
Normal file
|
@ -0,0 +1,9 @@
|
|||
[DATA]
|
||||
databasePath=C:\Users\admin\Desktop\libri.xbb
|
||||
csvPath=C:\Users\admin\Documents\libri.csv
|
||||
serverUrl=http://192.168.1.107:5555/up.php
|
||||
authCode=xmeQCwqrQcSQ7TQX2Yyw
|
||||
significance=3
|
||||
verbose=1
|
||||
updateUserWait=100
|
||||
chunkSize=50
|
84
windowsAgent/source/do.py
Normal file
84
windowsAgent/source/do.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
import storage
|
||||
import pymsgbox, os, time, requests, uuid, configparser
|
||||
config = configparser.ConfigParser()
|
||||
config.read('biblio.ini')
|
||||
path=config["DATA"]["databasePath"]
|
||||
csvPath=config["DATA"]["csvPath"]
|
||||
serverurl=config["DATA"]["serverUrl"]
|
||||
auth=config["DATA"]["authCode"]
|
||||
significance=int(config["DATA"]["significance"])
|
||||
verbose=bool(config["DATA"]["verbose"])
|
||||
upwait=int(config["DATA"]["updateUserWait"])
|
||||
chunkRowNum=int(config["DATA"]["chunkSize"])
|
||||
def vprint(data):
|
||||
global verbose
|
||||
if verbose:
|
||||
print(data)
|
||||
def upload(csvPath):
|
||||
global serverurl
|
||||
global auth
|
||||
vprint("Generating nonce uuid")
|
||||
uid=str(uuid.uuid4())
|
||||
data=auth+"\n"+uid+"\nBEGIN"
|
||||
requests.post(serverurl, data=data)
|
||||
vprint("Sent BEGIN")
|
||||
accumulator=""
|
||||
index=0
|
||||
for line in open(csvPath):
|
||||
if index==0:
|
||||
data=auth+"\n"+uid+"\nWRITE\n"
|
||||
accumulator=data
|
||||
accumulator+=line
|
||||
index+=1
|
||||
if index>chunkRowNum:
|
||||
vprint("Sent WRITE")
|
||||
requests.post(serverurl, data=accumulator)
|
||||
index=0
|
||||
requests.post(serverurl, data=accumulator)
|
||||
vprint("Sent WRITE")
|
||||
data=auth+"\n"+uid+"\nCONCLUDE"
|
||||
requests.post(serverurl, data=data)
|
||||
vprint("Sent CONCLUDE")
|
||||
|
||||
if not storage.exists("lastUpdate"):
|
||||
vprint("Last database update record has been initialized")
|
||||
storage.save("lastUpdate",0)
|
||||
if not storage.exists("updateTick"):
|
||||
vprint("Update tick record has been initialized")
|
||||
storage.save("updateTick",0)
|
||||
if not storage.exists("lastCSVUpdate"):
|
||||
vprint("Last csv upsate record has been initialized")
|
||||
storage.save("lastCSVUpdate",0)
|
||||
|
||||
while True:
|
||||
vprint("Main loop entered")
|
||||
try:
|
||||
vprint("In try-catch wrapper")
|
||||
if storage.load("lastUpdate")<os.path.getmtime(path):
|
||||
vprint("The database file has been updated since the last recorded time")
|
||||
storage.save("lastUpdate",os.path.getmtime(path))
|
||||
vprint("Updating the last recorded time accordingly")
|
||||
i=storage.load("updateTick")
|
||||
vprint("Reading tick register: "+str(i))
|
||||
if i>significance:
|
||||
vprint("The counter has overflowed the significance threshold")
|
||||
storage.save("updateTick",0)
|
||||
vprint("Resetting the counter to 0 accordingly")
|
||||
vprint("Displaying update dialog")
|
||||
pymsgbox.alert('Il catalogo dei libri è stato aggiornato.\nSi prega di cliccare File > Esporta Archivio > Esporta > Sì', 'Catalogo aggiornato')
|
||||
vprint("Dialog displayed, waiting "+str(upwait)+"s")
|
||||
time.sleep(upwait)
|
||||
vprint("Wait expired")
|
||||
if storage.load("lastCSVUpdate")<os.path.getmtime(csvPath):
|
||||
vprint("Csv has been updated, good librarian!")
|
||||
storage.save("lastCSVUpdate",os.path.getmtime(csvPath))
|
||||
vprint("Updating the last recorded time accordingly")
|
||||
vprint("Beginning upload")
|
||||
upload(csvPath)#maybe check if export has been varied?
|
||||
|
||||
else:
|
||||
storage.save("updateTick",i+1)
|
||||
except Exception as e:
|
||||
vprint("OOPS! :"+str(e))
|
||||
time.sleep(5)
|
||||
time.sleep(30)
|
95
windowsAgent/source/storage.py
Normal file
95
windowsAgent/source/storage.py
Normal file
|
@ -0,0 +1,95 @@
|
|||
from pathlib import Path
|
||||
import json, os
|
||||
|
||||
loadCache = {}
|
||||
|
||||
|
||||
class CorruptDataException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def flushLoadCache():
|
||||
loadCache = {}
|
||||
|
||||
|
||||
class AbsentRecordException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def exists(name, saveSpace="Default"):
|
||||
f = Path("data/storage/" + saveSpace + "/" + name + ".json")
|
||||
return f.is_file()
|
||||
|
||||
|
||||
def delete(name, saveSpace="Default", silent=False):
|
||||
if exists(name, saveSpace):
|
||||
os.unlink("data/storage/" + saveSpace + "/" + name + ".json")
|
||||
else:
|
||||
if not silent:
|
||||
raise AbsentRecordException("Record " + name +
|
||||
" not found in savespace " + saveSpace)
|
||||
|
||||
|
||||
def commitToDisk(name=False, Savespace=False):
|
||||
for ISaveSpace in loadCache.keys():
|
||||
if Savespace != False:
|
||||
if ISaveSpace != Savespace:
|
||||
continue
|
||||
for Iname in loadCache[ISaveSpace].keys():
|
||||
if name != False:
|
||||
if Iname != name:
|
||||
continue
|
||||
save(Iname, loadCache[ISaveSpace][Iname], ISaveSpace)
|
||||
|
||||
|
||||
def refreshFromDisk(name=False, Savespace=False):
|
||||
for ISaveSpace in loadCache.keys():
|
||||
if Savespace != False:
|
||||
if ISaveSpace != Savespace:
|
||||
continue
|
||||
for Iname in loadCache[ISaveSpace].keys():
|
||||
if name != False:
|
||||
if Iname != name:
|
||||
continue
|
||||
load(Iname, ISaveSpace, True)
|
||||
|
||||
|
||||
def save(name, value, saveSpace="Default", useCacheOnly=False):
|
||||
if saveSpace in loadCache.keys():
|
||||
if name in loadCache[saveSpace].keys():
|
||||
loadCache[saveSpace][name] = value
|
||||
if useCacheOnly:
|
||||
if saveSpace not in loadCache.keys():
|
||||
loadCache[saveSpace] = {}
|
||||
if name not in loadCache[saveSpace].keys():
|
||||
loadCache[saveSpace][name] = value
|
||||
else:
|
||||
Path("data/storage/" + saveSpace).mkdir(parents=True, exist_ok=True)
|
||||
with open("data/storage/" + saveSpace + "/" + name + ".json",
|
||||
"w+") as file:
|
||||
file.write(json.dumps(value))
|
||||
|
||||
|
||||
def load(name, saveSpace="Default", cached=False):
|
||||
if cached and saveSpace in loadCache.keys():
|
||||
if name in loadCache[saveSpace].keys():
|
||||
return loadCache[saveSpace][name]
|
||||
try:
|
||||
with open("data/storage/" + saveSpace + "/" + name + ".json",
|
||||
"r") as file:
|
||||
try:
|
||||
res = json.loads(file.read())
|
||||
if cached:
|
||||
if saveSpace not in loadCache.keys():
|
||||
loadCache[saveSpace] = {}
|
||||
if name not in loadCache[saveSpace].keys():
|
||||
loadCache[saveSpace][name] = res
|
||||
return res
|
||||
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
raise CorruptDataException("Data inside " + name +
|
||||
" (savespace " + saveSpace +
|
||||
") is not valid Json")
|
||||
except FileNotFoundError as e:
|
||||
raise AbsentRecordException("Record " + name +
|
||||
" not found in savespace " + saveSpace)
|
Reference in New Issue
Block a user