This commit is contained in:
John Cote 2015-06-05 01:05:01 -04:00
parent 0628201fca
commit 0d198f9f5d
8 changed files with 107 additions and 129 deletions

View file

@ -14,9 +14,9 @@ for line in prc.split('\n'):
line = line.strip() line = line.strip()
if line: if line:
loadPrcFileData('nirai config', line) loadPrcFileData('nirai config', line)
del prc del prc
# DC # DC
__builtin__.dcStream = StringStream() __builtin__.dcStream = StringStream()
@ -39,18 +39,18 @@ abort = False
for mf in mfs: for mf in mfs:
filename = 'resources/default/phase_%s.mf' % mf filename = 'resources/default/phase_%s.mf' % mf
if not os.path.isfile(filename): if not os.path.isfile(filename):
print 'Phase %s not found' % filename print 'Phase %s not found' % filename
abort = True abort = True
break break
mf = Multifile() mf = Multifile()
mf.openRead(filename) mf.openRead(filename)
if not vfs.mount(mf, '../resources', 0): if not vfs.mount(mf, '../resources', 0):
print 'Unable to mount %s' % filename print 'Unable to mount %s' % filename
abort = True abort = True
break break
# Packs # Packs
pack = os.environ.get('TT_STRIDE_CONTENT_PACK') pack = os.environ.get('TT_STRIDE_CONTENT_PACK')
if pack and pack != 'default': if pack and pack != 'default':
@ -63,14 +63,14 @@ if pack and pack != 'default':
ext = os.path.splitext(name)[1] ext = os.path.splitext(name)[1]
if ext not in ('.jpg', '.jpeg', '.ogg', '.rgb'): if ext not in ('.jpg', '.jpeg', '.ogg', '.rgb'):
mf.removeSubfile(name) mf.removeSubfile(name)
mf.flush() mf.flush()
if not vfs.mount(mf, '../resources', 0): if not vfs.mount(mf, '../resources', 0):
print 'Unable to mount %s' % filename print 'Unable to mount %s' % filename
abort = True abort = True
break break
if not abort: if not abort:
# Run # Run
import toontown.toonbase.ClientStart import toontown.toonbase.ClientStart

View file

@ -11,75 +11,75 @@ parser.add_argument('--make-nri', '-n', action='store_true',
help='Generate stride NRI.') help='Generate stride NRI.')
args = parser.parse_args() args = parser.parse_args()
# BEGIN (STRIPPED AND MODIFIED) COPY FROM niraitools.py # BEGIN (STRIPPED AND MODIFIED) COPY FROM niraitools.py
class NiraiPackager: class NiraiPackager:
HEADER = 'NRI\n' HEADER = 'NRI\n'
def __init__(self, outfile): def __init__(self, outfile):
self.modules = {} self.modules = {}
self.outfile = outfile self.outfile = outfile
def __read_file(self, filename, mangler=None): def __read_file(self, filename, mangler=None):
with open(filename, 'rb') as f: with open(filename, 'rb') as f:
data = f.read() data = f.read()
base = filename.rsplit('.', 1)[0].replace('\\', '/').replace('/', '.') base = filename.rsplit('.', 1)[0].replace('\\', '/').replace('/', '.')
pkg = base.endswith('.__init__') pkg = base.endswith('.__init__')
moduleName = base.rsplit('.', 1)[0] if pkg else base moduleName = base.rsplit('.', 1)[0] if pkg else base
name = moduleName name = moduleName
if mangler is not None: if mangler is not None:
name = mangler(name) name = mangler(name)
if not name: if not name:
return '', ('', 0) return '', ('', 0)
try: try:
data = self.compile_module(name, data) data = self.compile_module(name, data)
except: except:
print 'WARNING: Failed to compile', filename print 'WARNING: Failed to compile', filename
return '', ('', 0) return '', ('', 0)
size = len(data) * (-1 if pkg else 1) size = len(data) * (-1 if pkg else 1)
return name, (data, size) return name, (data, size)
def compile_module(self, name, data): def compile_module(self, name, data):
return marshal.dumps(compile(data, name, 'exec')) return marshal.dumps(compile(data, name, 'exec'))
def add_module(self, moduleName, data, size=None, compile=False): def add_module(self, moduleName, data, size=None, compile=False):
if compile: if compile:
data = self.compile_module(moduleName, data) data = self.compile_module(moduleName, data)
if size is None: if size is None:
size = len(data) size = len(data)
self.modules[moduleName] = (data, size) self.modules[moduleName] = (data, size)
def add_file(self, filename, mangler=None): def add_file(self, filename, mangler=None):
print 'Adding file', filename print 'Adding file', filename
moduleName, (data, size) = self.__read_file(filename, mangler) moduleName, (data, size) = self.__read_file(filename, mangler)
if moduleName: if moduleName:
moduleName = os.path.basename(filename).rsplit('.', 1)[0] moduleName = os.path.basename(filename).rsplit('.', 1)[0]
self.add_module(moduleName, data, size) self.add_module(moduleName, data, size)
def add_directory(self, dir, mangler=None): def add_directory(self, dir, mangler=None):
print 'Adding directory', dir print 'Adding directory', dir
def _recurse_dir(dir): def _recurse_dir(dir):
for f in os.listdir(dir): for f in os.listdir(dir):
f = os.path.join(dir, f) f = os.path.join(dir, f)
if os.path.isdir(f): if os.path.isdir(f):
_recurse_dir(f) _recurse_dir(f)
elif f.endswith('py'): elif f.endswith('py'):
moduleName, (data, size) = self.__read_file(f, mangler) moduleName, (data, size) = self.__read_file(f, mangler)
if moduleName: if moduleName:
self.add_module(moduleName, data, size) self.add_module(moduleName, data, size)
_recurse_dir(dir) _recurse_dir(dir)
def get_mangle_base(self, *path): def get_mangle_base(self, *path):
return len(os.path.join(*path).rsplit('.', 1)[0].replace('\\', '/').replace('/', '.')) + 1 return len(os.path.join(*path).rsplit('.', 1)[0].replace('\\', '/').replace('/', '.')) + 1
@ -88,112 +88,112 @@ class NiraiPackager:
f.write(self.HEADER) f.write(self.HEADER)
f.write(self.process_modules()) f.write(self.process_modules())
f.close() f.close()
def generate_key(self, size=256): def generate_key(self, size=256):
return os.urandom(size) return os.urandom(size)
def dump_key(self, key): def dump_key(self, key):
for k in key: for k in key:
print ord(k), print ord(k),
print print
def process_modules(self): def process_modules(self):
# Pure virtual # Pure virtual
raise NotImplementedError('process_datagram') raise NotImplementedError('process_datagram')
def get_file_contents(self, filename, keysize=0): def get_file_contents(self, filename, keysize=0):
with open(filename, 'rb') as f: with open(filename, 'rb') as f:
data = f.read() data = f.read()
if keysize: if keysize:
key = self.generate_key(keysize) key = self.generate_key(keysize)
rc4.rc4_setkey(key) rc4.rc4_setkey(key)
data = key + rc4.rc4(data) data = key + rc4.rc4(data)
return data return data
# END COPY FROM niraitools.py # END COPY FROM niraitools.py
class StridePackager(NiraiPackager): class StridePackager(NiraiPackager):
HEADER = 'STRIDETT' HEADER = 'STRIDETT'
BASEDIR = '..' + os.sep BASEDIR = '..' + os.sep
def __init__(self, outfile): def __init__(self, outfile):
NiraiPackager.__init__(self, outfile) NiraiPackager.__init__(self, outfile)
self.__manglebase = self.get_mangle_base(self.BASEDIR) self.__manglebase = self.get_mangle_base(self.BASEDIR)
def add_source_dir(self, dir): def add_source_dir(self, dir):
self.add_directory(self.BASEDIR + dir, mangler=self.__mangler) self.add_directory(self.BASEDIR + dir, mangler=self.__mangler)
def add_data_file(self, file): def add_data_file(self, file):
mb = self.get_mangle_base('data/') mb = self.get_mangle_base('data/')
self.add_file('data/%s.py' % file, mangler=lambda x: x[mb:]) self.add_file('data/%s.py' % file, mangler=lambda x: x[mb:])
def __mangler(self, name): def __mangler(self, name):
if name.endswith('AI') or name.endswith('UD') or name in ('ToontownAIRepository', 'ToontownUberRepository', if name.endswith('AI') or name.endswith('UD') or name in ('ToontownAIRepository', 'ToontownUberRepository',
'ToontownInternalRepository'): 'ToontownInternalRepository'):
if not 'NonRepeatableRandomSource' in name: if not 'NonRepeatableRandomSource' in name:
return '' return ''
return name[self.__manglebase:].strip('.') return name[self.__manglebase:].strip('.')
def generate_niraidata(self): def generate_niraidata(self):
print 'Generating niraidata' print 'Generating niraidata'
config = self.get_file_contents('../dependencies/config/release/en.prc') config = self.get_file_contents('../dependencies/config/release/en.prc')
config += '\n\n' + self.get_file_contents('../dependencies/config/general.prc') config += '\n\n' + self.get_file_contents('../dependencies/config/general.prc')
key = self.generate_key(128) key = self.generate_key(128)
rc4.rc4_setkey(key) rc4.rc4_setkey(key)
config = key + rc4.rc4(config) config = key + rc4.rc4(config)
niraidata = 'CONFIG = %r' % config niraidata = 'CONFIG = %r' % config
niraidata += '\nDC = %r' % self.get_file_contents('../dependencies/astron/dclass/stride.dc', 128) niraidata += '\nDC = %r' % self.get_file_contents('../dependencies/astron/dclass/stride.dc', 128)
self.add_module('niraidata', niraidata, compile=True) self.add_module('niraidata', niraidata, compile=True)
def process_modules(self): def process_modules(self):
with open('base.dg', 'rb') as f: with open('base.dg', 'rb') as f:
basesize, = struct.unpack('<I', f.read(4)) basesize, = struct.unpack('<I', f.read(4))
data = f.read() data = f.read()
dg = Datagram() dg = Datagram()
dg.addUint32(len(self.modules) + basesize) dg.addUint32(len(self.modules) + basesize)
dg.appendData(data) dg.appendData(data)
for moduleName in self.modules: for moduleName in self.modules:
data, size = self.modules[moduleName] data, size = self.modules[moduleName]
dg.addString(moduleName) dg.addString(moduleName)
dg.addInt32(size) dg.addInt32(size)
dg.appendData(data) dg.appendData(data)
data = dg.getMessage() data = dg.getMessage()
compressed = compressString(data, 9) compressed = compressString(data, 9)
key = self.generate_key(100) key = self.generate_key(100)
fixed = ''.join(chr((i ^ (5 * i + 7)) % ((i + 6) * 10)) for i in xrange(28)) fixed = ''.join(chr((i ^ (5 * i + 7)) % ((i + 6) * 10)) for i in xrange(28))
rc4.rc4_setkey(key + fixed) rc4.rc4_setkey(key + fixed)
data = rc4.rc4(compressed) data = rc4.rc4(compressed)
return key + data return key + data
# 1. Make the NRI # 1. Make the NRI
if args.make_nri: if args.make_nri:
pkg = StridePackager('built/stride.dist') pkg = StridePackager('built/stride.dist')
pkg.add_source_dir('otp') pkg.add_source_dir('otp')
pkg.add_source_dir('toontown') pkg.add_source_dir('toontown')
pkg.add_data_file('NiraiStart') pkg.add_data_file('NiraiStart')
pkg.generate_niraidata() pkg.generate_niraidata()
pkg.write_out() pkg.write_out()
# 2. Compile CXX stuff # 2. Compile CXX stuff
if args.compile_cxx: if args.compile_cxx:
sys.path.append('../../../N2') sys.path.append('../../../N2')
from niraitools import NiraiCompiler from niraitools import NiraiCompiler
compiler = NiraiCompiler('stride.exe', r'"C:\\Users\\Usuario\\workspace\\nirai-panda3d\\thirdparty\\win-libs-vc10"', compiler = NiraiCompiler('stride.exe', r'"C:\\Users\\Usuario\\workspace\\nirai-panda3d\\thirdparty\\win-libs-vc10"',
libs=set(glob.glob('libpandadna/libpandadna.dir/Release/*.obj'))) libs=set(glob.glob('libpandadna/libpandadna.dir/Release/*.obj')))
compiler.add_nirai_files() compiler.add_nirai_files()
compiler.add_source('src/stride.cxx') compiler.add_source('src/stride.cxx')
compiler.run() compiler.run()

View file

@ -18,55 +18,55 @@ int niraicall_onPreStart(int argc, char* argv[])
{ {
return 0; return 0;
} }
int niraicall_onLoadGameData() int niraicall_onLoadGameData()
{ {
fstream gd; fstream gd;
// Open the file // Open the file
gd.open("stride.dist", ios_base::in | ios_base::binary); gd.open("stride.dist", ios_base::in | ios_base::binary);
if (!gd.is_open()) if (!gd.is_open())
{ {
std::cerr << "unable to open game file" << std::endl; std::cerr << "Unable to open game file!" << std::endl;
return 1; return 1;
} }
// Check the header // Check the header
char* read_header = new char[header_size]; char* read_header = new char[header_size];
gd.read(read_header, header_size); gd.read(read_header, header_size);
if (memcmp(header, read_header, header_size)) if (memcmp(header, read_header, header_size))
{ {
std::cerr << "invalid header" << std::endl; std::cerr << "Invalid header" << std::endl;
return 1; return 1;
} }
delete[] read_header; delete[] read_header;
// Extract the key // Extract the key
char* key = new char[keysize + fixedsize]; char* key = new char[keysize + fixedsize];
char* fixed = new char[keysize]; char* fixed = new char[keysize];
for (int i = 0; i < fixedsize; ++i) for (int i = 0; i < fixedsize; ++i)
fixed[i] = (i ^ (5 * i + 7)) % ((i + 6) * 10); fixed[i] = (i ^ (5 * i + 7)) % ((i + 6) * 10);
gd.read(key, keysize); gd.read(key, keysize);
memcpy(&key[keysize], fixed, fixedsize); memcpy(&key[keysize], fixed, fixedsize);
std::stringstream ss; std::stringstream ss;
ss << gd.rdbuf(); ss << gd.rdbuf();
gd.close(); gd.close();
// Decrypt // Decrypt
std::string rawdata = ss.str(); std::string rawdata = ss.str();
std::string decrypted_data = rc4(rawdata.c_str(), key, rawdata.size(), std::string decrypted_data = rc4(rawdata.c_str(), key, rawdata.size(),
keysize + fixedsize); keysize + fixedsize);
delete[] key; delete[] key;
delete[] fixed; delete[] fixed;
// Decompress and read // Decompress and read
std::string decompressed = decompress_string(decrypted_data); std::string decompressed = decompress_string(decrypted_data);
Datagram dg(decompressed); Datagram dg(decompressed);
DatagramIterator dgi(dg); DatagramIterator dgi(dg);
@ -80,34 +80,34 @@ int niraicall_onLoadGameData()
module = dgi.get_string(); module = dgi.get_string();
size = dgi.get_int32(); size = dgi.get_int32();
data = dgi.extract_bytes(abs(size)); data = dgi.extract_bytes(abs(size));
char* name = new char[module.size() + 1]; char* name = new char[module.size() + 1];
memcpy(name, module.c_str(), module.size()); memcpy(name, module.c_str(), module.size());
memset(&name[module.size()], 0, 1); memset(&name[module.size()], 0, 1);
unsigned char* code = new unsigned char[data.size()]; unsigned char* code = new unsigned char[data.size()];
memcpy(code, data.c_str(), data.size()); memcpy(code, data.c_str(), data.size());
_frozen fz; _frozen fz;
fz.name = name; fz.name = name;
fz.code = code; fz.code = code;
fz.size = size; fz.size = size;
memcpy(&fzns[i], &fz, sizeof(_frozen)); memcpy(&fzns[i], &fz, sizeof(_frozen));
} }
nassertd(dgi.get_remaining_size() == 0) nassertd(dgi.get_remaining_size() == 0)
{ {
std::cerr << "corrupted data" << std::endl; std::cerr << "Corrupted data!" << std::endl;
return 1; return 1;
} }
memset(&fzns[num_modules], 0, sizeof(_frozen)); memset(&fzns[num_modules], 0, sizeof(_frozen));
PyImport_FrozenModules = fzns; PyImport_FrozenModules = fzns;
// libpandadna // libpandadna
init_libpandadna(); init_libpandadna();
initlibpandadna(); initlibpandadna();
return 0; return 0;
} }

View file

@ -211,14 +211,14 @@ class OTPClientRepository(ClientRepositoryBase):
def hasPlayToken(): def hasPlayToken():
return self.playToken != None return self.playToken != None
def readDCFile(self, dcFileNames=None): def readDCFile(self, dcFileNames=None):
dcFile = self.getDcFile() dcFile = self.getDcFile()
dcFile.clear() dcFile.clear()
self.dclassesByName = {} self.dclassesByName = {}
self.dclassesByNumber = {} self.dclassesByNumber = {}
self.hashVal = 0 self.hashVal = 0
if isinstance(dcFileNames, types.StringTypes): if isinstance(dcFileNames, types.StringTypes):
# If we were given a single string, make it a list. # If we were given a single string, make it a list.
dcFileNames = [dcFileNames] dcFileNames = [dcFileNames]
@ -229,13 +229,13 @@ class OTPClientRepository(ClientRepositoryBase):
# For Nirai # For Nirai
readResult = dcFile.read(dcStream, '__dc__') readResult = dcFile.read(dcStream, '__dc__')
del __builtin__.dcStream del __builtin__.dcStream
except NameError: except NameError:
readResult = dcFile.readAll() readResult = dcFile.readAll()
if not readResult: if not readResult:
self.notify.error("Could not read dc file.") self.notify.error("Could not read dc file.")
else: else:
searchPath = getModelPath().getValue() searchPath = getModelPath().getValue()
for dcFileName in dcFileNames: for dcFileName in dcFileNames:

View file

@ -34,7 +34,7 @@ args = parser.parse_args()
for prc in args.config: for prc in args.config:
loadPrcFile(prc) loadPrcFile(prc)
if os.path.isfile('dependencies/config/local.prc'): if os.path.isfile('dependencies/config/local.prc'):
loadPrcFile('dependencies/config/local.prc') loadPrcFile('dependencies/config/local.prc')

View file

@ -31,7 +31,7 @@ if __debug__:
loadPrcFile('dependencies/config/general.prc') loadPrcFile('dependencies/config/general.prc')
loadPrcFile('dependencies/config/release/dev.prc') loadPrcFile('dependencies/config/release/dev.prc')
if os.path.isfile('dependencies/config/local.prc'): if os.path.isfile('dependencies/config/local.prc'):
loadPrcFile('dependencies/config/local.prc') loadPrcFile('dependencies/config/local.prc')
@ -106,28 +106,6 @@ loadPrcFileData('Settings: musicVol', 'audio-master-music-volume %s' % settings[
loadPrcFileData('Settings: sfxVol', 'audio-master-sfx-volume %s' % settings['sfxVol']) loadPrcFileData('Settings: sfxVol', 'audio-master-sfx-volume %s' % settings['sfxVol'])
loadPrcFileData('Settings: loadDisplay', 'load-display %s' % settings['loadDisplay']) loadPrcFileData('Settings: loadDisplay', 'load-display %s' % settings['loadDisplay'])
import os
from toontown.toonbase.ContentPacksManager import ContentPackError
from toontown.toonbase.ContentPacksManager import ContentPacksManager
contentPacksFilepath = ConfigVariableString(
'content-packs-filepath', 'user/contentpacks/').getValue()
contentPacksSortFilename = ConfigVariableString(
'content-packs-sort-filename', 'sort.yaml').getValue()
if not os.path.exists(contentPacksFilepath):
os.makedirs(contentPacksFilepath)
__builtin__.ContentPackError = ContentPackError
__builtin__.contentPacksMgr = ContentPacksManager(
filepath=contentPacksFilepath, sortFilename=contentPacksSortFilename)
contentPacksMgr.applyAll()
languagePack = settings['language'].lower() + '.mf'
if contentPacksMgr.isApplicable(languagePack):
contentPacksMgr.applyMultifile(languagePack)
import time import time
import sys import sys
import random import random

View file

@ -29,7 +29,7 @@ def rejectConfig(issue, securityIssue=True, retarded=True):
print '"Either down\'s or autism"\n - JohnnyDaPirate, 2015' print '"Either down\'s or autism"\n - JohnnyDaPirate, 2015'
print 'Go fix that!' print 'Go fix that!'
exit() exit()
def entropy(string): def entropy(string):
prob = [float(string.count(c)) / len(string) for c in dict.fromkeys(list(string))] prob = [float(string.count(c)) / len(string) for c in dict.fromkeys(list(string))]
entropy = -sum([p * math.log(p) / math.log(2.0) for p in prob]) entropy = -sum([p * math.log(p) / math.log(2.0) for p in prob])
@ -45,10 +45,10 @@ accountServerHashAlgo = config.GetString('account-server-hash-algo', 'sha512')
if accountDBType == 'remote': if accountDBType == 'remote':
if accountServerSecret == 'dev': if accountServerSecret == 'dev':
rejectConfig('you have not changed the secret in config/local.prc') rejectConfig('you have not changed the secret in config/local.prc')
if len(accountServerSecret) < 16: if len(accountServerSecret) < 16:
rejectConfig('the secret is too small! Make it 16+ bytes', retarded=False) rejectConfig('the secret is too small! Make it 16+ bytes', retarded=False)
secretLength = len(accountServerSecret) secretLength = len(accountServerSecret)
ideal = entropyIdeal(secretLength) / 2 ideal = entropyIdeal(secretLength) / 2
entropy = entropy(accountServerSecret) entropy = entropy(accountServerSecret)
@ -56,11 +56,11 @@ if accountDBType == 'remote':
rejectConfig('the secret entropy is too low! For %d bytes,' rejectConfig('the secret entropy is too low! For %d bytes,'
' it should be %d. Currently it is %d' % (secretLength, ideal, entropy), ' it should be %d. Currently it is %d' % (secretLength, ideal, entropy),
retarded=False) retarded=False)
hashAlgo = getattr(hashlib, accountServerHashAlgo, None) hashAlgo = getattr(hashlib, accountServerHashAlgo, None)
if not hashAlgo: if not hashAlgo:
rejectConfig('%s is not a valid hash algo' % accountServerHashAlgo, securityIssue=False) rejectConfig('%s is not a valid hash algo' % accountServerHashAlgo, securityIssue=False)
hashSize = len(hashAlgo('').digest()) hashSize = len(hashAlgo('').digest())
minAccessLevel = config.GetInt('min-access-level', 100) minAccessLevel = config.GetInt('min-access-level', 100)
@ -147,16 +147,16 @@ class AccountDB:
def lookup(self, data, callback): def lookup(self, data, callback):
userId = data['userId'] userId = data['userId']
data['success'] = True data['success'] = True
data['accessLevel'] = max(data['accessLevel'], minAccessLevel) data['accessLevel'] = max(data['accessLevel'], minAccessLevel)
if str(userId) not in self.dbm: if str(userId) not in self.dbm:
data['accountId'] = 0 data['accountId'] = 0
else: else:
data['accountId'] = int(self.dbm[str(userId)]) data['accountId'] = int(self.dbm[str(userId)])
callback(data) callback(data)
return data return data
@ -171,7 +171,7 @@ class AccountDB:
class DeveloperAccountDB(AccountDB): class DeveloperAccountDB(AccountDB):
notify = directNotify.newCategory('DeveloperAccountDB') notify = directNotify.newCategory('DeveloperAccountDB')
def lookup(self, userId, callback): def lookup(self, userId, callback):
return AccountDB.lookup(self, {'userId': userId, return AccountDB.lookup(self, {'userId': userId,
'accessLevel': 700, 'accessLevel': 700,
@ -200,33 +200,33 @@ class RemoteAccountDB(AccountDB):
Token format: Token format:
The token is obfuscated a bit, but nothing too hard to read. The token is obfuscated a bit, but nothing too hard to read.
Most of the security is based on the hash. Most of the security is based on the hash.
I. Data contained in a token: I. Data contained in a token:
A json-encoded dict, which contains timestamp, userid and extra info A json-encoded dict, which contains timestamp, userid and extra info
II. Token format II. Token format
X = BASE64(ROT13(DATA)[::-1]) X = BASE64(ROT13(DATA)[::-1])
H = HASH(X)[::-1] H = HASH(X)[::-1]
Token = BASE64(H + X) Token = BASE64(H + X)
''' '''
try: try:
token = token.decode('base64') token = token.decode('base64')
hash, token = token[:hashSize], token[hashSize:] hash, token = token[:hashSize], token[hashSize:]
correctHash = hashAlgo(token + accountServerSecret).digest() correctHash = hashAlgo(token + accountServerSecret).digest()
if len(hash) != len(correctHash): if len(hash) != len(correctHash):
raise ValueError('invalid hash') raise ValueError('Invalid hash.')
value = 0 value = 0
for x, y in zip(hash[::-1], correctHash): for x, y in zip(hash[::-1], correctHash):
value |= ord(x) ^ ord(y) value |= ord(x) ^ ord(y)
if value: if value:
raise ValueError('invalid hash') raise ValueError('Invalid hash.')
token = json.loads(token.decode('base64')[::-1].decode('rot13')) token = json.loads(token.decode('base64')[::-1].decode('rot13'))
except: except:
resp = {'success': False} resp = {'success': False}
callback(resp) callback(resp)
@ -293,12 +293,12 @@ class LoginAccountFSM(OperationFSM):
return return
self.account = fields self.account = fields
if self.notAfter: if self.notAfter:
if self.account.get('LAST_LOGIN_TS', 0) > self.notAfter: if self.account.get('LAST_LOGIN_TS', 0) > self.notAfter:
self.notify.debug('Rejecting old token: %d, notAfter=%d' % (self.account.get('LAST_LOGIN_TS', 0), self.notAfter)) self.notify.debug('Rejecting old token: %d, notAfter=%d' % (self.account.get('LAST_LOGIN_TS', 0), self.notAfter))
return self.__handleLookup({'success': False}) return self.__handleLookup({'success': False})
self.demand('SetAccount') self.demand('SetAccount')
def enterCreateAccount(self): def enterCreateAccount(self):
@ -1001,7 +1001,7 @@ class ClientServicesManagerUD(DistributedObjectGlobalUD):
self.accountDB = RemoteAccountDB(self) self.accountDB = RemoteAccountDB(self)
else: else:
self.notify.error('Invalid accountdb-type: ' + accountDBType) self.notify.error('Invalid accountdb-type: ' + accountDBType)
def killConnection(self, connId, reason): def killConnection(self, connId, reason):
datagram = PyDatagram() datagram = PyDatagram()
datagram.addServerHeader( datagram.addServerHeader(

View file

@ -35,7 +35,7 @@ for prc in args.config:
if os.path.isfile('dependencies/config/local.prc'): if os.path.isfile('dependencies/config/local.prc'):
loadPrcFile('dependencies/config/local.prc') loadPrcFile('dependencies/config/local.prc')
localconfig = '' localconfig = ''
if args.base_channel: if args.base_channel:
localconfig += 'air-base-channel %s\n' % args.base_channel localconfig += 'air-base-channel %s\n' % args.base_channel