2019-11-02 17:27:54 -05:00
|
|
|
from direct.directnotify import DirectNotifyGlobal
|
2022-12-16 18:40:57 -06:00
|
|
|
from panda3d.core import ConfigVariableBool
|
2019-11-02 17:27:54 -05:00
|
|
|
from direct.task import Task
|
2019-12-30 00:07:56 -06:00
|
|
|
import pickle
|
2019-11-02 17:27:54 -05:00
|
|
|
import os
|
|
|
|
import sys
|
2019-12-30 00:07:56 -06:00
|
|
|
import dbm
|
2019-11-02 17:27:54 -05:00
|
|
|
import time
|
|
|
|
|
|
|
|
class DataStore:
|
|
|
|
QueryTypes = []
|
2019-12-30 00:07:56 -06:00
|
|
|
QueryTypes = dict(list(zip(QueryTypes, list(range(len(QueryTypes))))))
|
2019-11-02 17:27:54 -05:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def addQueryTypes(cls, typeStrings):
|
2019-12-30 00:07:56 -06:00
|
|
|
superTypes = list(zip(list(cls.QueryTypes.values()), list(cls.QueryTypes.keys())))
|
2019-11-02 17:27:54 -05:00
|
|
|
superTypes.sort()
|
|
|
|
newTypes = [ item[1] for item in superTypes ] + typeStrings
|
2019-12-30 00:07:56 -06:00
|
|
|
newTypes = dict(list(zip(newTypes, list(range(1 + len(newTypes))))))
|
2019-11-02 17:27:54 -05:00
|
|
|
return newTypes
|
|
|
|
|
|
|
|
notify = DirectNotifyGlobal.directNotify.newCategory('DataStore')
|
|
|
|
wantAnyDbm = ConfigVariableBool('want-ds-anydbm', 1).getValue()
|
|
|
|
|
|
|
|
def __init__(self, filepath, writePeriod = 300, writeCountTrigger = 100):
|
|
|
|
self.filepath = filepath
|
|
|
|
self.writePeriod = writePeriod
|
|
|
|
self.writeCountTrigger = writeCountTrigger
|
|
|
|
self.writeCount = 0
|
|
|
|
self.data = None
|
|
|
|
self.className = self.__class__.__name__
|
|
|
|
if self.wantAnyDbm:
|
|
|
|
self.filepath += '-anydbm'
|
2019-12-30 00:07:56 -06:00
|
|
|
self.notify.debug('anydbm default module used: %s ' % dbm._defaultmod.__name__)
|
2019-11-02 17:27:54 -05:00
|
|
|
self.open()
|
|
|
|
return
|
|
|
|
|
|
|
|
def readDataFromFile(self):
|
|
|
|
if self.wantAnyDbm:
|
|
|
|
try:
|
|
|
|
if os.path.exists(self.filepath):
|
2019-12-30 00:07:56 -06:00
|
|
|
self.data = dbm.open(self.filepath, 'w')
|
2019-11-02 17:27:54 -05:00
|
|
|
self.notify.debug('Opening existing anydbm database at: %s.' % (self.filepath,))
|
|
|
|
else:
|
2019-12-30 00:07:56 -06:00
|
|
|
self.data = dbm.open(self.filepath, 'c')
|
2019-11-02 17:27:54 -05:00
|
|
|
self.notify.debug('Creating new anydbm database at: %s.' % (self.filepath,))
|
2019-12-30 00:07:56 -06:00
|
|
|
except dbm.error:
|
2019-11-02 17:27:54 -05:00
|
|
|
self.notify.warning('Cannot open anydbm database at: %s.' % (self.filepath,))
|
|
|
|
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
file = open(self.filepath + '.bu', 'r')
|
|
|
|
self.notify.debug('Opening backup pickle data file at %s.' % (self.filepath + '.bu',))
|
|
|
|
if os.path.exists(self.filepath):
|
|
|
|
os.remove(self.filepath)
|
|
|
|
except IOError:
|
|
|
|
try:
|
|
|
|
file = open(self.filepath, 'r')
|
|
|
|
self.notify.debug('Opening old pickle data file at %s..' % (self.filepath,))
|
|
|
|
except IOError:
|
|
|
|
file = None
|
|
|
|
self.notify.debug('New pickle data file will be written to %s.' % (self.filepath,))
|
|
|
|
|
|
|
|
if file:
|
2019-12-30 00:07:56 -06:00
|
|
|
data = pickle.load(file)
|
2019-11-02 17:27:54 -05:00
|
|
|
file.close()
|
|
|
|
self.data = data
|
|
|
|
else:
|
|
|
|
self.data = {}
|
|
|
|
return
|
|
|
|
|
|
|
|
def writeDataToFile(self):
|
|
|
|
if self.data is not None:
|
|
|
|
self.notify.debug('Data is now synced with disk at %s' % self.filepath)
|
|
|
|
if self.wantAnyDbm:
|
|
|
|
self.data.sync()
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
backuppath = self.filepath + '.bu'
|
|
|
|
if os.path.exists(self.filepath):
|
|
|
|
os.rename(self.filepath, backuppath)
|
|
|
|
outfile = open(self.filepath, 'w')
|
2019-12-30 00:07:56 -06:00
|
|
|
pickle.dump(self.data, outfile)
|
2019-11-02 17:27:54 -05:00
|
|
|
outfile.close()
|
|
|
|
if os.path.exists(backuppath):
|
|
|
|
os.remove(backuppath)
|
|
|
|
except EnvironmentError:
|
|
|
|
self.notify.warning(str(sys.exc_info()[1]))
|
|
|
|
|
|
|
|
else:
|
|
|
|
self.notify.warning('No data to write. Aborting sync.')
|
|
|
|
return
|
|
|
|
|
|
|
|
def syncTask(self, task):
|
|
|
|
task.timeElapsed += globalClock.getDt()
|
|
|
|
if task.timeElapsed > self.writePeriod:
|
|
|
|
if self.writeCount:
|
|
|
|
self.writeDataToFile()
|
|
|
|
self.resetWriteCount()
|
|
|
|
task.timeElapsed = 0.0
|
|
|
|
if self.writeCount > self.writeCountTrigger:
|
|
|
|
self.writeDataToFile()
|
|
|
|
self.resetWriteCount()
|
|
|
|
task.timeElapsed = 0.0
|
|
|
|
return Task.cont
|
|
|
|
|
|
|
|
def incrementWriteCount(self):
|
|
|
|
self.writeCount += 1
|
|
|
|
|
|
|
|
def resetWriteCount(self):
|
|
|
|
self.writeCount = 0
|
|
|
|
|
|
|
|
def close(self):
|
|
|
|
if self.data is not None:
|
|
|
|
self.writeDataToFile()
|
|
|
|
if self.wantAnyDbm:
|
|
|
|
self.data.close()
|
|
|
|
taskMgr.remove('%s-syncTask' % (self.className,))
|
|
|
|
self.data = None
|
|
|
|
return
|
|
|
|
|
|
|
|
def open(self):
|
|
|
|
self.close()
|
|
|
|
self.readDataFromFile()
|
|
|
|
self.resetWriteCount()
|
|
|
|
taskMgr.remove('%s-syncTask' % (self.className,))
|
|
|
|
t = taskMgr.add(self.syncTask, '%s-syncTask' % (self.className,))
|
|
|
|
t.timeElapsed = 0.0
|
|
|
|
|
|
|
|
def reset(self):
|
|
|
|
self.destroy()
|
|
|
|
self.open()
|
|
|
|
|
|
|
|
def destroy(self):
|
|
|
|
self.close()
|
|
|
|
if self.wantAnyDbm:
|
|
|
|
lt = time.asctime(time.localtime())
|
2019-12-30 00:59:01 -06:00
|
|
|
trans = ': '.maketrans('__')
|
2019-11-02 17:27:54 -05:00
|
|
|
t = lt.translate(trans)
|
|
|
|
head, tail = os.path.split(self.filepath)
|
|
|
|
newFileName = 'UDStoreBak' + t
|
|
|
|
if os.path.exists(self.filepath):
|
|
|
|
try:
|
|
|
|
os.rename(tail, newFileName)
|
|
|
|
uber.air.writeServerEvent('Uberdog data store Info', 0, 'Creating backup of file: %s saving as: %s' % (tail, newFileName))
|
|
|
|
except:
|
|
|
|
uber.air.writeServerEvent('Uberdog data store Info', 0, 'Unable to create backup of file: %s ' % tail)
|
|
|
|
|
|
|
|
else:
|
|
|
|
files = os.listdir(head)
|
|
|
|
for file in files:
|
|
|
|
if file.find(tail) > -1:
|
|
|
|
filename, ext = os.path.splitext(file)
|
|
|
|
try:
|
|
|
|
os.rename(file, newFileName + ext)
|
|
|
|
uber.air.writeServerEvent('Uberdog data store Info', 0, 'Creating backup of file: %s saving as: %s' % (file, newFileName + ext))
|
|
|
|
except:
|
|
|
|
uber.air.writeServerEvent('Uberdog data store Info', 0, 'Unable to create backup of file: %s ' % newFileName + ext)
|
|
|
|
|
|
|
|
else:
|
|
|
|
if os.path.exists(self.filepath + '.bu'):
|
|
|
|
os.remove(self.filepath + '.bu')
|
|
|
|
if os.path.exists(self.filepath):
|
|
|
|
os.remove(self.filepath)
|
|
|
|
|
|
|
|
def query(self, query):
|
|
|
|
if self.data is not None:
|
2019-12-30 00:07:56 -06:00
|
|
|
qData = pickle.loads(query)
|
2019-11-02 17:27:54 -05:00
|
|
|
results = self.handleQuery(qData)
|
2019-12-30 00:07:56 -06:00
|
|
|
qResults = pickle.dumps(results)
|
2019-11-02 17:27:54 -05:00
|
|
|
else:
|
|
|
|
results = None
|
2019-12-30 00:07:56 -06:00
|
|
|
qResults = pickle.dumps(results)
|
2019-11-02 17:27:54 -05:00
|
|
|
return qResults
|
|
|
|
|
|
|
|
def handleQuery(self, query):
|
|
|
|
results = None
|
|
|
|
return results
|