mirror of
https://github.com/Sneed-Group/Poodletooth-iLand
synced 2024-10-31 00:37:54 +00:00
commit
9b5a92c449
21 changed files with 807 additions and 428 deletions
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -1,5 +1,6 @@
|
||||||
# Python artifacts
|
# Python artifacts
|
||||||
*.pyc
|
*.pyc
|
||||||
|
*.pyo
|
||||||
|
|
||||||
# Batch
|
# Batch
|
||||||
*.bat
|
*.bat
|
||||||
|
@ -14,4 +15,7 @@
|
||||||
*.lnk
|
*.lnk
|
||||||
|
|
||||||
# Git
|
# Git
|
||||||
*.rej
|
*.rej
|
||||||
|
|
||||||
|
# Local config
|
||||||
|
dependencies/config/local.prc
|
||||||
|
|
3
build/.gitignore
vendored
Normal file
3
build/.gitignore
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
libpandadna/
|
||||||
|
built/
|
||||||
|
*.pdb
|
BIN
build/base.dg
Normal file
BIN
build/base.dg
Normal file
Binary file not shown.
76
build/data/NiraiStart.py
Normal file
76
build/data/NiraiStart.py
Normal file
|
@ -0,0 +1,76 @@
|
||||||
|
from panda3d.core import *
|
||||||
|
import __builtin__, os
|
||||||
|
import rc4
|
||||||
|
|
||||||
|
import niraidata
|
||||||
|
|
||||||
|
# Config
|
||||||
|
prc = niraidata.CONFIG
|
||||||
|
key, prc = prc[:32], prc[32:]
|
||||||
|
rc4.rc4_setkey(key)
|
||||||
|
prc = rc4.rc4(prc)
|
||||||
|
|
||||||
|
for line in prc.split('\n'):
|
||||||
|
line = line.strip()
|
||||||
|
if line:
|
||||||
|
loadPrcFileData('nirai config', line)
|
||||||
|
|
||||||
|
del prc
|
||||||
|
|
||||||
|
# DC
|
||||||
|
__builtin__.dcStream = StringStream()
|
||||||
|
|
||||||
|
dc = niraidata.DC
|
||||||
|
key, dc = dc[:32], dc[32:]
|
||||||
|
rc4.rc4_setkey(key)
|
||||||
|
dc = rc4.rc4(dc)
|
||||||
|
|
||||||
|
dcStream.setData(dc)
|
||||||
|
del dc
|
||||||
|
rc4.rc4_setkey('\0\0\0\0')
|
||||||
|
|
||||||
|
# Resources
|
||||||
|
# TO DO: sign and verify the phases to prevent edition
|
||||||
|
|
||||||
|
vfs = VirtualFileSystem.getGlobalPtr()
|
||||||
|
mfs = (3, 3.5, 4, 5, 5.5, 6, 7, 8, 9, 10, 11, 12, 13)
|
||||||
|
abort = False
|
||||||
|
|
||||||
|
for mf in mfs:
|
||||||
|
filename = 'resources/default/phase_%s.mf' % mf
|
||||||
|
if not os.path.isfile(filename):
|
||||||
|
print 'Phase %s not found' % filename
|
||||||
|
abort = True
|
||||||
|
break
|
||||||
|
|
||||||
|
mf = Multifile()
|
||||||
|
mf.openRead(filename)
|
||||||
|
|
||||||
|
if not vfs.mount(mf, '../resources', 0):
|
||||||
|
print 'Unable to mount %s' % filename
|
||||||
|
abort = True
|
||||||
|
break
|
||||||
|
|
||||||
|
# Packs
|
||||||
|
pack = os.environ.get('TT_STRIDE_CONTENT_PACK')
|
||||||
|
if pack and pack != 'default':
|
||||||
|
print 'Loading content pack', pack
|
||||||
|
for file in glob.glob('resources/%s/*.mf' % pack):
|
||||||
|
mf = Multifile()
|
||||||
|
mf.openReadWrite(Filename(file))
|
||||||
|
names = mf.getSubfileNames()
|
||||||
|
for name in names:
|
||||||
|
ext = os.path.splitext(name)[1]
|
||||||
|
if ext not in ('.jpg', '.jpeg', '.ogg', '.rgb'):
|
||||||
|
mf.removeSubfile(name)
|
||||||
|
|
||||||
|
mf.flush()
|
||||||
|
|
||||||
|
if not vfs.mount(mf, '../resources', 0):
|
||||||
|
print 'Unable to mount %s' % filename
|
||||||
|
abort = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not abort:
|
||||||
|
# Run
|
||||||
|
import toontown.toonbase.ClientStart
|
199
build/make.py
Normal file
199
build/make.py
Normal file
|
@ -0,0 +1,199 @@
|
||||||
|
from panda3d.core import *
|
||||||
|
|
||||||
|
import argparse, marshal, struct
|
||||||
|
import glob, sys, os
|
||||||
|
import rc4
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser()
|
||||||
|
parser.add_argument('--compile-cxx', '-c', action='store_true',
|
||||||
|
help='Compile the CXX codes and generate Nirai.exe into built.')
|
||||||
|
parser.add_argument('--make-nri', '-n', action='store_true',
|
||||||
|
help='Generate stride NRI.')
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# BEGIN (STRIPPED AND MODIFIED) COPY FROM niraitools.py
|
||||||
|
class NiraiPackager:
|
||||||
|
HEADER = 'NRI\n'
|
||||||
|
|
||||||
|
def __init__(self, outfile):
|
||||||
|
self.modules = {}
|
||||||
|
self.outfile = outfile
|
||||||
|
|
||||||
|
def __read_file(self, filename, mangler=None):
|
||||||
|
with open(filename, 'rb') as f:
|
||||||
|
data = f.read()
|
||||||
|
|
||||||
|
base = filename.rsplit('.', 1)[0].replace('\\', '/').replace('/', '.')
|
||||||
|
pkg = base.endswith('.__init__')
|
||||||
|
moduleName = base.rsplit('.', 1)[0] if pkg else base
|
||||||
|
|
||||||
|
name = moduleName
|
||||||
|
if mangler is not None:
|
||||||
|
name = mangler(name)
|
||||||
|
|
||||||
|
if not name:
|
||||||
|
return '', ('', 0)
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = self.compile_module(name, data)
|
||||||
|
|
||||||
|
except:
|
||||||
|
print 'WARNING: Failed to compile', filename
|
||||||
|
return '', ('', 0)
|
||||||
|
|
||||||
|
size = len(data) * (-1 if pkg else 1)
|
||||||
|
return name, (data, size)
|
||||||
|
|
||||||
|
def compile_module(self, name, data):
|
||||||
|
return marshal.dumps(compile(data, name, 'exec'))
|
||||||
|
|
||||||
|
def add_module(self, moduleName, data, size=None, compile=False):
|
||||||
|
if compile:
|
||||||
|
data = self.compile_module(moduleName, data)
|
||||||
|
|
||||||
|
if size is None:
|
||||||
|
size = len(data)
|
||||||
|
|
||||||
|
self.modules[moduleName] = (data, size)
|
||||||
|
|
||||||
|
def add_file(self, filename, mangler=None):
|
||||||
|
print 'Adding file', filename
|
||||||
|
moduleName, (data, size) = self.__read_file(filename, mangler)
|
||||||
|
if moduleName:
|
||||||
|
moduleName = os.path.basename(filename).rsplit('.', 1)[0]
|
||||||
|
self.add_module(moduleName, data, size)
|
||||||
|
|
||||||
|
def add_directory(self, dir, mangler=None):
|
||||||
|
print 'Adding directory', dir
|
||||||
|
|
||||||
|
def _recurse_dir(dir):
|
||||||
|
for f in os.listdir(dir):
|
||||||
|
f = os.path.join(dir, f)
|
||||||
|
|
||||||
|
if os.path.isdir(f):
|
||||||
|
_recurse_dir(f)
|
||||||
|
|
||||||
|
elif f.endswith('py'):
|
||||||
|
moduleName, (data, size) = self.__read_file(f, mangler)
|
||||||
|
if moduleName:
|
||||||
|
self.add_module(moduleName, data, size)
|
||||||
|
|
||||||
|
_recurse_dir(dir)
|
||||||
|
|
||||||
|
def get_mangle_base(self, *path):
|
||||||
|
return len(os.path.join(*path).rsplit('.', 1)[0].replace('\\', '/').replace('/', '.')) + 1
|
||||||
|
|
||||||
|
def write_out(self):
|
||||||
|
f = open(self.outfile, 'wb')
|
||||||
|
f.write(self.HEADER)
|
||||||
|
f.write(self.process_modules())
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
def generate_key(self, size=256):
|
||||||
|
return os.urandom(size)
|
||||||
|
|
||||||
|
def dump_key(self, key):
|
||||||
|
for k in key:
|
||||||
|
print ord(k),
|
||||||
|
|
||||||
|
print
|
||||||
|
|
||||||
|
def process_modules(self):
|
||||||
|
# Pure virtual
|
||||||
|
raise NotImplementedError('process_datagram')
|
||||||
|
|
||||||
|
def get_file_contents(self, filename, keysize=0):
|
||||||
|
with open(filename, 'rb') as f:
|
||||||
|
data = f.read()
|
||||||
|
|
||||||
|
if keysize:
|
||||||
|
key = self.generate_key(keysize)
|
||||||
|
rc4.rc4_setkey(key)
|
||||||
|
data = key + rc4.rc4(data)
|
||||||
|
|
||||||
|
return data
|
||||||
|
# END COPY FROM niraitools.py
|
||||||
|
|
||||||
|
class StridePackager(NiraiPackager):
|
||||||
|
HEADER = 'STRIDETT'
|
||||||
|
BASEDIR = '..' + os.sep
|
||||||
|
|
||||||
|
def __init__(self, outfile):
|
||||||
|
NiraiPackager.__init__(self, outfile)
|
||||||
|
self.__manglebase = self.get_mangle_base(self.BASEDIR)
|
||||||
|
|
||||||
|
def add_source_dir(self, dir):
|
||||||
|
self.add_directory(self.BASEDIR + dir, mangler=self.__mangler)
|
||||||
|
|
||||||
|
def add_data_file(self, file):
|
||||||
|
mb = self.get_mangle_base('data/')
|
||||||
|
self.add_file('data/%s.py' % file, mangler=lambda x: x[mb:])
|
||||||
|
|
||||||
|
def __mangler(self, name):
|
||||||
|
if name.endswith('AI') or name.endswith('UD') or name in ('ToontownAIRepository', 'ToontownUberRepository',
|
||||||
|
'ToontownInternalRepository'):
|
||||||
|
if not 'NonRepeatableRandomSource' in name:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
return name[self.__manglebase:].strip('.')
|
||||||
|
|
||||||
|
def generate_niraidata(self):
|
||||||
|
print 'Generating niraidata'
|
||||||
|
|
||||||
|
config = self.get_file_contents('../dependencies/config/release/en.prc')
|
||||||
|
config += '\n\n' + self.get_file_contents('../dependencies/config/general.prc')
|
||||||
|
key = self.generate_key(128)
|
||||||
|
rc4.rc4_setkey(key)
|
||||||
|
config = key + rc4.rc4(config)
|
||||||
|
|
||||||
|
niraidata = 'CONFIG = %r' % config
|
||||||
|
niraidata += '\nDC = %r' % self.get_file_contents('../dependencies/astron/dclass/stride.dc', 128)
|
||||||
|
self.add_module('niraidata', niraidata, compile=True)
|
||||||
|
|
||||||
|
def process_modules(self):
|
||||||
|
with open('base.dg', 'rb') as f:
|
||||||
|
basesize, = struct.unpack('<I', f.read(4))
|
||||||
|
data = f.read()
|
||||||
|
|
||||||
|
dg = Datagram()
|
||||||
|
dg.addUint32(len(self.modules) + basesize)
|
||||||
|
dg.appendData(data)
|
||||||
|
|
||||||
|
for moduleName in self.modules:
|
||||||
|
data, size = self.modules[moduleName]
|
||||||
|
|
||||||
|
dg.addString(moduleName)
|
||||||
|
dg.addInt32(size)
|
||||||
|
dg.appendData(data)
|
||||||
|
|
||||||
|
data = dg.getMessage()
|
||||||
|
compressed = compressString(data, 9)
|
||||||
|
key = self.generate_key(100)
|
||||||
|
fixed = ''.join(chr((i ^ (5 * i + 7)) % ((i + 6) * 10)) for i in xrange(28))
|
||||||
|
rc4.rc4_setkey(key + fixed)
|
||||||
|
data = rc4.rc4(compressed)
|
||||||
|
return key + data
|
||||||
|
|
||||||
|
# 1. Make the NRI
|
||||||
|
if args.make_nri:
|
||||||
|
pkg = StridePackager('built/stride.dist')
|
||||||
|
|
||||||
|
pkg.add_source_dir('otp')
|
||||||
|
pkg.add_source_dir('toontown')
|
||||||
|
|
||||||
|
pkg.add_data_file('NiraiStart')
|
||||||
|
|
||||||
|
pkg.generate_niraidata()
|
||||||
|
pkg.write_out()
|
||||||
|
|
||||||
|
# 2. Compile CXX stuff
|
||||||
|
if args.compile_cxx:
|
||||||
|
sys.path.append('../../../N2')
|
||||||
|
from niraitools import NiraiCompiler
|
||||||
|
|
||||||
|
compiler = NiraiCompiler('stride.exe', r'"C:\\Users\\Usuario\\workspace\\nirai-panda3d\\thirdparty\\win-libs-vc10"',
|
||||||
|
libs=set(glob.glob('libpandadna/libpandadna.dir/Release/*.obj')))
|
||||||
|
compiler.add_nirai_files()
|
||||||
|
compiler.add_source('src/stride.cxx')
|
||||||
|
|
||||||
|
compiler.run()
|
BIN
build/rc4.pyd
Normal file
BIN
build/rc4.pyd
Normal file
Binary file not shown.
113
build/src/stride.cxx
Normal file
113
build/src/stride.cxx
Normal file
|
@ -0,0 +1,113 @@
|
||||||
|
#include "nirai.h"
|
||||||
|
#include <datagram.h>
|
||||||
|
#include <datagramIterator.h>
|
||||||
|
#include <compress_string.h>
|
||||||
|
|
||||||
|
string rc4(const char* data, const char* key, int ds, int ks);
|
||||||
|
|
||||||
|
extern "C" __declspec(dllexport) void initlibpandadna();
|
||||||
|
void init_libpandadna();
|
||||||
|
|
||||||
|
const char* header = "STRIDETT";
|
||||||
|
const int header_size = 8;
|
||||||
|
|
||||||
|
const int keysize = 100;
|
||||||
|
const int fixedsize = 28;
|
||||||
|
|
||||||
|
int niraicall_onPreStart(int argc, char* argv[])
|
||||||
|
{
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
int niraicall_onLoadGameData()
|
||||||
|
{
|
||||||
|
fstream gd;
|
||||||
|
|
||||||
|
// Open the file
|
||||||
|
gd.open("stride.dist", ios_base::in | ios_base::binary);
|
||||||
|
if (!gd.is_open())
|
||||||
|
{
|
||||||
|
std::cerr << "Unable to open game file!" << std::endl;
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check the header
|
||||||
|
char* read_header = new char[header_size];
|
||||||
|
gd.read(read_header, header_size);
|
||||||
|
|
||||||
|
if (memcmp(header, read_header, header_size))
|
||||||
|
{
|
||||||
|
std::cerr << "Invalid header" << std::endl;
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
delete[] read_header;
|
||||||
|
|
||||||
|
// Extract the key
|
||||||
|
char* key = new char[keysize + fixedsize];
|
||||||
|
char* fixed = new char[keysize];
|
||||||
|
|
||||||
|
for (int i = 0; i < fixedsize; ++i)
|
||||||
|
fixed[i] = (i ^ (5 * i + 7)) % ((i + 6) * 10);
|
||||||
|
|
||||||
|
gd.read(key, keysize);
|
||||||
|
memcpy(&key[keysize], fixed, fixedsize);
|
||||||
|
|
||||||
|
std::stringstream ss;
|
||||||
|
ss << gd.rdbuf();
|
||||||
|
gd.close();
|
||||||
|
|
||||||
|
// Decrypt
|
||||||
|
std::string rawdata = ss.str();
|
||||||
|
std::string decrypted_data = rc4(rawdata.c_str(), key, rawdata.size(),
|
||||||
|
keysize + fixedsize);
|
||||||
|
delete[] key;
|
||||||
|
delete[] fixed;
|
||||||
|
|
||||||
|
// Decompress and read
|
||||||
|
std::string decompressed = decompress_string(decrypted_data);
|
||||||
|
|
||||||
|
Datagram dg(decompressed);
|
||||||
|
DatagramIterator dgi(dg);
|
||||||
|
|
||||||
|
unsigned int num_modules = dgi.get_uint32();
|
||||||
|
_frozen* fzns = new _frozen[num_modules + 1];
|
||||||
|
std::string module, data;
|
||||||
|
int size;
|
||||||
|
|
||||||
|
for (unsigned int i = 0; i < num_modules; ++i)
|
||||||
|
{
|
||||||
|
module = dgi.get_string();
|
||||||
|
size = dgi.get_int32();
|
||||||
|
data = dgi.extract_bytes(abs(size));
|
||||||
|
|
||||||
|
char* name = new char[module.size() + 1];
|
||||||
|
memcpy(name, module.c_str(), module.size());
|
||||||
|
memset(&name[module.size()], 0, 1);
|
||||||
|
|
||||||
|
unsigned char* code = new unsigned char[data.size()];
|
||||||
|
memcpy(code, data.c_str(), data.size());
|
||||||
|
|
||||||
|
_frozen fz;
|
||||||
|
fz.name = name;
|
||||||
|
fz.code = code;
|
||||||
|
fz.size = size;
|
||||||
|
|
||||||
|
memcpy(&fzns[i], &fz, sizeof(_frozen));
|
||||||
|
}
|
||||||
|
|
||||||
|
nassertd(dgi.get_remaining_size() == 0)
|
||||||
|
{
|
||||||
|
std::cerr << "Corrupted data!" << std::endl;
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
memset(&fzns[num_modules], 0, sizeof(_frozen));
|
||||||
|
PyImport_FrozenModules = fzns;
|
||||||
|
|
||||||
|
// libpandadna
|
||||||
|
init_libpandadna();
|
||||||
|
initlibpandadna();
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
1
dependencies/astron/dclass/stride.dc
vendored
1
dependencies/astron/dclass/stride.dc
vendored
|
@ -34,6 +34,7 @@ dclass Account {
|
||||||
string LAST_LOGIN db;
|
string LAST_LOGIN db;
|
||||||
string ACCOUNT_ID db;
|
string ACCOUNT_ID db;
|
||||||
uint16 ACCESS_LEVEL db;
|
uint16 ACCESS_LEVEL db;
|
||||||
|
uint64 LAST_LOGIN_TS db;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct BarrierData {
|
struct BarrierData {
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
cd ../../../
|
|
||||||
|
|
||||||
"dependencies/panda/python/ppython.exe" "dev/tools/dcimports/parse_dcimports.py" -o "otp/distributed/DCClassImports.py" "dependencies/astron/dclass/stride.dc"
|
|
||||||
|
|
||||||
pause
|
|
|
@ -1,50 +0,0 @@
|
||||||
import argparse
|
|
||||||
|
|
||||||
from pandac.PandaModules import *
|
|
||||||
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
|
||||||
parser.add_argument('--output', '-o', default='DCClassImports.py',
|
|
||||||
help='The filename of the generated Python module.')
|
|
||||||
parser.add_argument('filenames', nargs='+', default=['otp.dc', 'toon.dc'],
|
|
||||||
help='The DC class files to be included in the generated Python module.')
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
dcFile = DCFile()
|
|
||||||
for filename in args.filenames:
|
|
||||||
dcFile.read(Filename.fromOsSpecific(filename))
|
|
||||||
|
|
||||||
dcImports = {}
|
|
||||||
for n in xrange(dcFile.getNumImportModules()):
|
|
||||||
moduleName = dcFile.getImportModule(n)[:].split('/', 1)[0]
|
|
||||||
if moduleName not in dcImports:
|
|
||||||
dcImports[moduleName] = []
|
|
||||||
importSymbols = []
|
|
||||||
for i in xrange(dcFile.getNumImportSymbols(n)):
|
|
||||||
symbolName = dcFile.getImportSymbol(n, i).split('/', 1)[0]
|
|
||||||
importSymbols.append(symbolName)
|
|
||||||
dcImports[moduleName].extend(importSymbols)
|
|
||||||
|
|
||||||
data = '''\
|
|
||||||
# This file was generated by the parse_dclass.py utility.
|
|
||||||
from pandac.PandaModules import *
|
|
||||||
|
|
||||||
|
|
||||||
hashVal = %r
|
|
||||||
|
|
||||||
|
|
||||||
''' % dcFile.getHash()
|
|
||||||
|
|
||||||
for moduleName, importSymbols in dcImports.items():
|
|
||||||
data += 'from %s import %s\n' % (moduleName, ', '.join(importSymbols))
|
|
||||||
|
|
||||||
data += '''
|
|
||||||
|
|
||||||
dcImports = locals().copy()
|
|
||||||
'''
|
|
||||||
|
|
||||||
print 'Writing %s...' % args.output
|
|
||||||
with open(args.output, 'w') as f:
|
|
||||||
f.write(data)
|
|
||||||
|
|
||||||
print 'Done writing %s.' % args.output
|
|
|
@ -7,6 +7,7 @@ echo.
|
||||||
echo #1 - Localhost
|
echo #1 - Localhost
|
||||||
echo #2 - Dev Server
|
echo #2 - Dev Server
|
||||||
echo #3 - Custom
|
echo #3 - Custom
|
||||||
|
echo #4 - Local RemoteDB
|
||||||
echo.
|
echo.
|
||||||
|
|
||||||
:selection
|
:selection
|
||||||
|
@ -18,9 +19,11 @@ if %INPUT%==1 (
|
||||||
set TTS_GAMESERVER=127.0.0.1
|
set TTS_GAMESERVER=127.0.0.1
|
||||||
) else if %INPUT%==2 (
|
) else if %INPUT%==2 (
|
||||||
set TTS_GAMESERVER=167.114.220.172
|
set TTS_GAMESERVER=167.114.220.172
|
||||||
|
) else if %INPUT%==4 (
|
||||||
|
set TTS_GAMESERVER=127.0.0.1
|
||||||
) else if %INPUT%==3 (
|
) else if %INPUT%==3 (
|
||||||
echo.
|
echo.
|
||||||
set /P TTS_GAMESERVER=Gameserver:
|
set /P TTS_GAMESERVER=Gameserver:
|
||||||
) else (
|
) else (
|
||||||
goto selection
|
goto selection
|
||||||
)
|
)
|
||||||
|
@ -30,6 +33,9 @@ echo.
|
||||||
if %INPUT%==2 (
|
if %INPUT%==2 (
|
||||||
set /P ttsUsername="Username: "
|
set /P ttsUsername="Username: "
|
||||||
set /P ttsPassword="Password: "
|
set /P ttsPassword="Password: "
|
||||||
|
) else if %INPUT%==4 (
|
||||||
|
set /P ttsUsername="Username: "
|
||||||
|
set /P ttsPassword="Password: "
|
||||||
) else (
|
) else (
|
||||||
set /P TTS_PLAYCOOKIE=Username:
|
set /P TTS_PLAYCOOKIE=Username:
|
||||||
)
|
)
|
||||||
|
@ -53,6 +59,8 @@ cd ../../
|
||||||
|
|
||||||
if %INPUT%==2 (
|
if %INPUT%==2 (
|
||||||
"dependencies/panda/python/ppython.exe" -m toontown.toonbase.ClientStartRemoteDB
|
"dependencies/panda/python/ppython.exe" -m toontown.toonbase.ClientStartRemoteDB
|
||||||
|
) else if %INPUT%==4 (
|
||||||
|
"dependencies/panda/python/ppython.exe" -m toontown.toonbase.ClientStartRemoteDB
|
||||||
) else (
|
) else (
|
||||||
"dependencies/panda/python/ppython.exe" -m toontown.toonbase.ClientStart
|
"dependencies/panda/python/ppython.exe" -m toontown.toonbase.ClientStart
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,48 +0,0 @@
|
||||||
# This file was generated by the parse_dclass.py utility.
|
|
||||||
from pandac.PandaModules import *
|
|
||||||
|
|
||||||
|
|
||||||
hashVal = 3796631281L
|
|
||||||
|
|
||||||
|
|
||||||
from toontown.coghq import DistributedCashbotBossSafe, DistributedCashbotBossCrane, DistributedBattleFactory, DistributedCashbotBossTreasure, DistributedCogHQDoor, DistributedCogHQExteriorDoor, DistributedSellbotHQDoor, DistributedFactoryElevatorExt, DistributedMintElevatorExt, DistributedLawOfficeElevatorExt, DistributedLawOfficeElevatorInt, LobbyManager, DistributedMegaCorp, DistributedFactory, DistributedLawOffice, DistributedLawOfficeFloor, DistributedLift, DistributedDoorEntity, DistributedSwitch, DistributedButton, DistributedTrigger, DistributedCrushableEntity, DistributedCrusherEntity, DistributedStomper, DistributedStomperPair, DistributedLaserField, DistributedGolfGreenGame, DistributedSecurityCamera, DistributedMover, DistributedElevatorMarker, DistributedBarrelBase, DistributedGagBarrel, DistributedBeanBarrel, DistributedHealBarrel, DistributedGrid, ActiveCell, DirectionalCell, CrusherCell, DistributedCrate, DistributedSinkingPlatform, BattleBlocker, DistributedMint, DistributedMintRoom, DistributedMintBattle, DistributedStage, DistributedStageRoom, DistributedStageBattle, DistributedLawbotBossGavel, DistributedLawbotCannon, DistributedLawbotChair, DistributedCogKart, DistributedCountryClub, DistributedCountryClubRoom, DistributedMoleField, DistributedCountryClubBattle, DistributedMaze, DistributedFoodBelt, DistributedBanquetTable, DistributedGolfSpot
|
|
||||||
from toontown.golf import DistributedPhysicsWorld, DistributedGolfHole, DistributedGolfCourse
|
|
||||||
from toontown.building import DistributedAnimatedProp, DistributedTrophyMgr, DistributedBuilding, DistributedBuildingQueryMgr, DistributedToonInterior, DistributedToonHallInterior, DistributedSuitInterior, DistributedHQInterior, DistributedGagshopInterior, DistributedPetshopInterior, DistributedKartShopInterior, DistributedDoor, DistributedKnockKnockDoor, DistributedElevator, DistributedElevatorFSM, DistributedElevatorExt, DistributedElevatorInt, DistributedElevatorFloor, DistributedBossElevator, DistributedVPElevator, DistributedCFOElevator, DistributedCJElevator, DistributedBBElevator, DistributedBoardingParty, DistributedTutorialInterior, DistributedClubElevator
|
|
||||||
from toontown.uberdog.DistributedPartyManager import DistributedPartyManager
|
|
||||||
from otp.friends import FriendManager
|
|
||||||
from otp.level import DistributedLevel, DistributedEntity, DistributedInteractiveEntity
|
|
||||||
from toontown.shtiker import DeleteManager, PurchaseManager, NewbiePurchaseManager
|
|
||||||
from toontown.groups import GroupManager
|
|
||||||
from toontown.uberdog.ClientServicesManager import ClientServicesManager
|
|
||||||
from toontown.ai import WelcomeValleyManager, NewsManager, DistributedAprilToonsMgr, DistributedBlackCatMgr, DistributedReportMgr, DistributedPolarPlaceEffectMgr, DistributedGreenToonEffectMgr, DistributedResistanceEmoteMgr, DistributedScavengerHuntTarget, DistributedTrickOrTreatTarget, DistributedWinterCarolingTarget, DistributedJorElCam
|
|
||||||
from otp.chat import ChatAgent
|
|
||||||
from toontown.parties.GlobalPartyManager import GlobalPartyManager
|
|
||||||
from toontown.racing.DistributedStartingBlock import DistributedViewingBlock
|
|
||||||
from toontown.suit import DistributedSuitPlanner, DistributedSuitBase, DistributedSuit, DistributedTutorialSuit, DistributedFactorySuit, DistributedMintSuit, DistributedStageSuit, DistributedSellbotBoss, DistributedCashbotBoss, DistributedCashbotBossGoon, DistributedGoon, DistributedGridGoon, DistributedLawbotBoss, DistributedLawbotBossSuit, DistributedBossbotBoss
|
|
||||||
from toontown.distributed import ToontownDistrict, ToontownDistrictStats, DistributedTimer
|
|
||||||
from toontown.effects import DistributedFireworkShow
|
|
||||||
from toontown.safezone import DistributedTrolley, DistributedPillow, DistributedPartyGate, DistributedBoat, DistributedButterfly, DistributedMMPiano, DistributedDGFlower, DistributedFishingSpot, SafeZoneManager, DistributedTreasure, DistributedGolfKart, DistributedPicnicBasket, DistributedPicnicTable, DistributedChineseCheckers, DistributedCheckers, DistributedFindFour
|
|
||||||
from toontown.fishing import DistributedFishingPond, DistributedFishingTarget, DistributedPondBingoManager
|
|
||||||
from toontown.minigame import DistributedMinigame, DistributedMinigameTemplate, DistributedRaceGame, DistributedCannonGame, DistributedPatternGame, DistributedRingGame, DistributedTagGame, DistributedMazeGame, DistributedTugOfWarGame, DistributedCatchGame, DistributedDivingGame, DistributedTargetGame, DistributedVineGame, DistributedIceGame, DistributedCogThiefGame, DistributedTwoDGame
|
|
||||||
from toontown.racing import DistributedVehicle, DistributedStartingBlock, DistributedRace, DistributedKartPad, DistributedRacePad, DistributedViewPad, DistributedStartingBlock, DistributedLeaderBoard, DistributedGag, DistributedProjectile
|
|
||||||
from toontown.catalog import CatalogManager, AccountDate
|
|
||||||
from toontown.parties import DistributedParty, DistributedPartyActivity, DistributedPartyTeamActivity, DistributedPartyCannon, DistributedPartyCannonActivity, DistributedPartyCatchActivity, DistributedPartyWinterCatchActivity, DistributedPartyCogActivity, DistributedPartyWinterCogActivity, DistributedPartyFireworksActivity, DistributedPartyDanceActivityBase, DistributedPartyDanceActivity, DistributedPartyDance20Activity, DistributedPartyValentineDanceActivity, DistributedPartyValentineDance20Activity, DistributedPartyTrampolineActivity, DistributedPartyValentineTrampolineActivity, DistributedPartyVictoryTrampolineActivity, DistributedPartyWinterTrampolineActivity, DistributedPartyTugOfWarActivity, DistributedPartyJukeboxActivityBase, DistributedPartyJukeboxActivity, DistributedPartyJukebox40Activity, DistributedPartyValentineJukeboxActivity, DistributedPartyValentineJukebox40Activity
|
|
||||||
from toontown.pets.DistributedPet import *
|
|
||||||
from toontown.friends import TTSFriendsManager
|
|
||||||
from toontown.cogdominium import DistributedCogdoInterior, DistributedCogdoBattleBldg, DistributedCogdoElevatorExt, DistributedCogdoElevatorInt, DistributedCogdoBarrel, DistCogdoGame, DistCogdoLevelGame, DistCogdoBoardroomGame, DistCogdoCraneGame, DistCogdoMazeGame, DistCogdoFlyingGame, DistCogdoCrane, DistCogdoCraneMoneyBag, DistCogdoCraneCog
|
|
||||||
from toontown.uberdog.GlobalLobbyManager import GlobalLobbyManager
|
|
||||||
from toontown.uberdog.ARGManager import ARGManager
|
|
||||||
from otp.distributed import Account, DistributedDistrict, DistributedDirectory
|
|
||||||
from toontown.estate import DistributedCannon, DistributedTarget, EstateManager, DistributedEstate, DistributedHouse, DistributedHouseInterior, DistributedGarden, DistributedHouseDoor, DistributedMailbox, DistributedFurnitureManager, DistributedFurnitureItem, DistributedBank, DistributedCloset, DistributedTrunk, DistributedPhone, DistributedFireworksCannon, DistributedLawnDecor, DistributedGardenPlot, DistributedGardenBox, DistributedFlower, DistributedGagTree, DistributedStatuary, DistributedToonStatuary, DistributedChangingStatuary, DistributedAnimatedStatuary, DistributedPlantBase, DistributedLawnDecor
|
|
||||||
from toontown.uberdog.DistributedLobbyManager import DistributedLobbyManager
|
|
||||||
from toontown.toon import DistributedToon, DistributedNPCToonBase, DistributedNPCToon, DistributedSmartNPC, DistributedNPCSpecialQuestGiver, DistributedNPCFlippyInToonHall, DistributedNPCScientist, DistributedNPCClerk, DistributedNPCTailor, DistributedNPCBlocker, DistributedNPCFisherman, DistributedNPCPartyPerson, DistributedNPCPetclerk, DistributedNPCKartClerk, DistributedNPCGlove, DistributedNPCLaffRestock
|
|
||||||
from toontown.tutorial import DistributedBattleTutorial, TutorialManager
|
|
||||||
from toontown.pets import DistributedPetProxy
|
|
||||||
from toontown.coderedemption.TTCodeRedemptionMgr import TTCodeRedemptionMgr
|
|
||||||
from direct.distributed import DistributedObject, DistributedNode, DistributedSmoothNode, DistributedCartesianGrid, DistributedCamera, DistributedObjectGlobal
|
|
||||||
from otp.ai import TimeManager, MagicWordManager
|
|
||||||
from otp.avatar import DistributedAvatar, DistributedPlayer, AvatarHandle
|
|
||||||
from toontown.battle import DistributedBattleBase, DistributedBattle, DistributedBattleBldg, DistributedBattleFinal, DistributedBattleWaiters, DistributedBattleDiners
|
|
||||||
|
|
||||||
|
|
||||||
dcImports = locals().copy()
|
|
|
@ -11,7 +11,7 @@ from direct.task import Task
|
||||||
from pandac.PandaModules import *
|
from pandac.PandaModules import *
|
||||||
from otp.avatar import Avatar, DistributedAvatar
|
from otp.avatar import Avatar, DistributedAvatar
|
||||||
from otp.avatar.DistributedPlayer import DistributedPlayer
|
from otp.avatar.DistributedPlayer import DistributedPlayer
|
||||||
from otp.distributed import DCClassImports, OtpDoGlobals
|
from otp.distributed import OtpDoGlobals
|
||||||
from otp.distributed.OtpDoGlobals import *
|
from otp.distributed.OtpDoGlobals import *
|
||||||
from otp.distributed.TelemetryLimiter import TelemetryLimiter
|
from otp.distributed.TelemetryLimiter import TelemetryLimiter
|
||||||
from otp.otpbase import OTPGlobals, OTPLocalizer
|
from otp.otpbase import OTPGlobals, OTPLocalizer
|
||||||
|
@ -211,57 +211,161 @@ class OTPClientRepository(ClientRepositoryBase):
|
||||||
|
|
||||||
def hasPlayToken():
|
def hasPlayToken():
|
||||||
return self.playToken != None
|
return self.playToken != None
|
||||||
|
|
||||||
def readDCFile(self, dcFileNames=None):
|
def readDCFile(self, dcFileNames=None):
|
||||||
dcFile = self.getDcFile()
|
dcFile = self.getDcFile()
|
||||||
dcFile.clear()
|
dcFile.clear()
|
||||||
self.dclassesByName = {}
|
self.dclassesByName = {}
|
||||||
self.dclassesByNumber = {}
|
self.dclassesByNumber = {}
|
||||||
self.hashVal = 0
|
self.hashVal = 0
|
||||||
try:
|
|
||||||
dcStream
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
self.notify.info('Detected DC file stream, reading it...')
|
|
||||||
dcFileNames = [dcStream]
|
|
||||||
|
|
||||||
if isinstance(dcFileNames, str):
|
if isinstance(dcFileNames, types.StringTypes):
|
||||||
|
# If we were given a single string, make it a list.
|
||||||
dcFileNames = [dcFileNames]
|
dcFileNames = [dcFileNames]
|
||||||
|
|
||||||
if dcFileNames is not None:
|
dcImports = {}
|
||||||
for dcFileName in dcFileNames:
|
if dcFileNames == None:
|
||||||
if isinstance(dcFileName, StringStream):
|
try:
|
||||||
readResult = dcFile.read(dcFileName, 'DC stream')
|
# For Nirai
|
||||||
else:
|
readResult = dcFile.read(dcStream, '__dc__')
|
||||||
readResult = dcFile.read(dcFileName)
|
del __builtin__.dcStream
|
||||||
if not readResult:
|
|
||||||
self.notify.error('Could not read DC file.')
|
|
||||||
else:
|
|
||||||
dcFile.readAll()
|
|
||||||
|
|
||||||
self.hashVal = DCClassImports.hashVal
|
except NameError:
|
||||||
for i in xrange(dcFile.getNumClasses()):
|
readResult = dcFile.readAll()
|
||||||
|
|
||||||
|
if not readResult:
|
||||||
|
self.notify.error("Could not read dc file.")
|
||||||
|
|
||||||
|
else:
|
||||||
|
searchPath = getModelPath().getValue()
|
||||||
|
for dcFileName in dcFileNames:
|
||||||
|
pathname = Filename(dcFileName)
|
||||||
|
vfs.resolveFilename(pathname, searchPath)
|
||||||
|
readResult = dcFile.read(pathname)
|
||||||
|
if not readResult:
|
||||||
|
self.notify.error("Could not read dc file: %s" % (pathname))
|
||||||
|
|
||||||
|
self.hashVal = dcFile.getHash()
|
||||||
|
|
||||||
|
# Now import all of the modules required by the DC file.
|
||||||
|
for n in range(dcFile.getNumImportModules()):
|
||||||
|
moduleName = dcFile.getImportModule(n)[:]
|
||||||
|
|
||||||
|
# Maybe the module name is represented as "moduleName/AI".
|
||||||
|
suffix = moduleName.split('/')
|
||||||
|
moduleName = suffix[0]
|
||||||
|
suffix=suffix[1:]
|
||||||
|
if self.dcSuffix in suffix:
|
||||||
|
moduleName += self.dcSuffix
|
||||||
|
elif self.dcSuffix == 'UD' and 'AI' in suffix: #HACK:
|
||||||
|
moduleName += 'AI'
|
||||||
|
|
||||||
|
importSymbols = []
|
||||||
|
for i in range(dcFile.getNumImportSymbols(n)):
|
||||||
|
symbolName = dcFile.getImportSymbol(n, i)
|
||||||
|
|
||||||
|
# Maybe the symbol name is represented as "symbolName/AI".
|
||||||
|
suffix = symbolName.split('/')
|
||||||
|
symbolName = suffix[0]
|
||||||
|
suffix=suffix[1:]
|
||||||
|
if self.dcSuffix in suffix:
|
||||||
|
symbolName += self.dcSuffix
|
||||||
|
elif self.dcSuffix == 'UD' and 'AI' in suffix: #HACK:
|
||||||
|
symbolName += 'AI'
|
||||||
|
|
||||||
|
importSymbols.append(symbolName)
|
||||||
|
|
||||||
|
self.importModule(dcImports, moduleName, importSymbols)
|
||||||
|
|
||||||
|
# Now get the class definition for the classes named in the DC
|
||||||
|
# file.
|
||||||
|
for i in range(dcFile.getNumClasses()):
|
||||||
dclass = dcFile.getClass(i)
|
dclass = dcFile.getClass(i)
|
||||||
number = dclass.getNumber()
|
number = dclass.getNumber()
|
||||||
className = dclass.getName()
|
className = dclass.getName() + self.dcSuffix
|
||||||
classDef = DCClassImports.dcImports.get(className)
|
|
||||||
|
# Does the class have a definition defined in the newly
|
||||||
|
# imported namespace?
|
||||||
|
classDef = dcImports.get(className)
|
||||||
|
if classDef is None and self.dcSuffix == 'UD': #HACK:
|
||||||
|
className = dclass.getName() + 'AI'
|
||||||
|
classDef = dcImports.get(className)
|
||||||
|
|
||||||
|
# Also try it without the dcSuffix.
|
||||||
|
if classDef == None:
|
||||||
|
className = dclass.getName()
|
||||||
|
classDef = dcImports.get(className)
|
||||||
if classDef is None:
|
if classDef is None:
|
||||||
self.notify.debug('No class definition for %s.' % className)
|
self.notify.debug("No class definition for %s." % (className))
|
||||||
else:
|
else:
|
||||||
if type(classDef) == types.ModuleType:
|
if type(classDef) == types.ModuleType:
|
||||||
if not hasattr(classDef, className):
|
if not hasattr(classDef, className):
|
||||||
self.notify.warning('Module %s does not define class %s.' % (className, className))
|
self.notify.warning("Module %s does not define class %s." % (className, className))
|
||||||
continue
|
continue
|
||||||
classDef = getattr(classDef, className)
|
classDef = getattr(classDef, className)
|
||||||
if (type(classDef) != types.ClassType) and (type(classDef) != types.TypeType):
|
|
||||||
self.notify.error('Symbol %s is not a class name.' % className)
|
if type(classDef) != types.ClassType and type(classDef) != types.TypeType:
|
||||||
|
self.notify.error("Symbol %s is not a class name." % (className))
|
||||||
else:
|
else:
|
||||||
dclass.setClassDef(classDef)
|
dclass.setClassDef(classDef)
|
||||||
|
|
||||||
self.dclassesByName[className] = dclass
|
self.dclassesByName[className] = dclass
|
||||||
if number >= 0:
|
if number >= 0:
|
||||||
self.dclassesByNumber[number] = dclass
|
self.dclassesByNumber[number] = dclass
|
||||||
|
|
||||||
|
# Owner Views
|
||||||
|
if self.hasOwnerView():
|
||||||
|
ownerDcSuffix = self.dcSuffix + 'OV'
|
||||||
|
# dict of class names (without 'OV') that have owner views
|
||||||
|
ownerImportSymbols = {}
|
||||||
|
|
||||||
|
# Now import all of the modules required by the DC file.
|
||||||
|
for n in range(dcFile.getNumImportModules()):
|
||||||
|
moduleName = dcFile.getImportModule(n)
|
||||||
|
|
||||||
|
# Maybe the module name is represented as "moduleName/AI".
|
||||||
|
suffix = moduleName.split('/')
|
||||||
|
moduleName = suffix[0]
|
||||||
|
suffix=suffix[1:]
|
||||||
|
if ownerDcSuffix in suffix:
|
||||||
|
moduleName = moduleName + ownerDcSuffix
|
||||||
|
|
||||||
|
importSymbols = []
|
||||||
|
for i in range(dcFile.getNumImportSymbols(n)):
|
||||||
|
symbolName = dcFile.getImportSymbol(n, i)
|
||||||
|
|
||||||
|
# Check for the OV suffix
|
||||||
|
suffix = symbolName.split('/')
|
||||||
|
symbolName = suffix[0]
|
||||||
|
suffix=suffix[1:]
|
||||||
|
if ownerDcSuffix in suffix:
|
||||||
|
symbolName += ownerDcSuffix
|
||||||
|
importSymbols.append(symbolName)
|
||||||
|
ownerImportSymbols[symbolName] = None
|
||||||
|
|
||||||
|
self.importModule(dcImports, moduleName, importSymbols)
|
||||||
|
|
||||||
|
# Now get the class definition for the owner classes named
|
||||||
|
# in the DC file.
|
||||||
|
for i in range(dcFile.getNumClasses()):
|
||||||
|
dclass = dcFile.getClass(i)
|
||||||
|
if ((dclass.getName()+ownerDcSuffix) in ownerImportSymbols):
|
||||||
|
number = dclass.getNumber()
|
||||||
|
className = dclass.getName() + ownerDcSuffix
|
||||||
|
|
||||||
|
# Does the class have a definition defined in the newly
|
||||||
|
# imported namespace?
|
||||||
|
classDef = dcImports.get(className)
|
||||||
|
if classDef is None:
|
||||||
|
self.notify.error("No class definition for %s." % className)
|
||||||
|
else:
|
||||||
|
if type(classDef) == types.ModuleType:
|
||||||
|
if not hasattr(classDef, className):
|
||||||
|
self.notify.error("Module %s does not define class %s." % (className, className))
|
||||||
|
classDef = getattr(classDef, className)
|
||||||
|
dclass.setOwnerClassDef(classDef)
|
||||||
|
self.dclassesByName[className] = dclass
|
||||||
|
|
||||||
def getGameDoId(self):
|
def getGameDoId(self):
|
||||||
return self.GameGlobalsId
|
return self.GameGlobalsId
|
||||||
|
|
||||||
|
|
|
@ -35,6 +35,9 @@ args = parser.parse_args()
|
||||||
for prc in args.config:
|
for prc in args.config:
|
||||||
loadPrcFile(prc)
|
loadPrcFile(prc)
|
||||||
|
|
||||||
|
if os.path.isfile('dependencies/config/local.prc'):
|
||||||
|
loadPrcFile('dependencies/config/local.prc')
|
||||||
|
|
||||||
localconfig = ''
|
localconfig = ''
|
||||||
if args.base_channel: localconfig += 'air-base-channel %s\n' % args.base_channel
|
if args.base_channel: localconfig += 'air-base-channel %s\n' % args.base_channel
|
||||||
if args.max_channels: localconfig += 'air-channel-allocation %s\n' % args.max_channels
|
if args.max_channels: localconfig += 'air-channel-allocation %s\n' % args.max_channels
|
||||||
|
|
|
@ -407,34 +407,22 @@ MonthlySchedule = ((7,
|
||||||
1,
|
1,
|
||||||
9,
|
9,
|
||||||
30,
|
30,
|
||||||
(
|
(CatalogGardenItem(135, 1),)),
|
||||||
CatalogGardenItem(135, 1)
|
|
||||||
)
|
|
||||||
),
|
|
||||||
(1,
|
(1,
|
||||||
1,
|
1,
|
||||||
1,
|
1,
|
||||||
31,
|
31,
|
||||||
(
|
(CatalogGardenItem(135, 1),)),
|
||||||
CatalogGardenItem(135, 1)
|
|
||||||
)
|
|
||||||
),
|
|
||||||
(4,
|
(4,
|
||||||
1,
|
1,
|
||||||
4,
|
4,
|
||||||
30,
|
30,
|
||||||
(
|
(CatalogGardenItem(135, 1),)),
|
||||||
CatalogGardenItem(135, 1)
|
|
||||||
)
|
|
||||||
),
|
|
||||||
(6,
|
(6,
|
||||||
1,
|
1,
|
||||||
6,
|
6,
|
||||||
30,
|
30,
|
||||||
(
|
(CatalogGardenItem(135, 1),)),
|
||||||
CatalogGardenItem(135, 1)
|
|
||||||
)
|
|
||||||
),
|
|
||||||
(6,
|
(6,
|
||||||
26,
|
26,
|
||||||
7,
|
7,
|
||||||
|
|
|
@ -681,47 +681,51 @@ def validatePlantAttributes(notify):
|
||||||
notify.debug(' %s, beans = %s, color=%s' % (flower[0], flower[1], flower[2]))
|
notify.debug(' %s, beans = %s, color=%s' % (flower[0], flower[1], flower[2]))
|
||||||
|
|
||||||
notify.debug('plant attributes are ok')
|
notify.debug('plant attributes are ok')
|
||||||
|
#PLOT 0 = Red house DONE
|
||||||
|
#PLOT 1 = Green house DONE
|
||||||
plots0 = [(0,
|
#PLOT 2 = Blue house DONE
|
||||||
0,
|
#PLOT 3 = Light-Blue house DONE
|
||||||
0.0,
|
#PLOT 4 = Purple house DONE
|
||||||
|
#PLOT 5 = Brown house DONE
|
||||||
|
plots0 = [(-62.575,
|
||||||
|
-52.4983,
|
||||||
|
5.144,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(1,
|
(-52.071,
|
||||||
0,
|
-52.130,
|
||||||
0.0,
|
5.144,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(-49.018,
|
||||||
0,
|
-46.6321,
|
||||||
0.0,
|
5.178,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(-48.875,
|
||||||
1,
|
-43.2925,
|
||||||
0.0,
|
5.178,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(-48.572,
|
||||||
2,
|
-40.1868,
|
||||||
0.0,
|
5.178,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(-64.631,
|
||||||
0,
|
-38.778,
|
||||||
0.0,
|
5.158,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(-64.454,
|
||||||
1,
|
-41.9345,
|
||||||
0.0,
|
5.158,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(-64.382,
|
||||||
2,
|
-45.0799,
|
||||||
0.0,
|
5.158,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(4,
|
(-55.192,
|
||||||
0,
|
-32.723,
|
||||||
0.0,
|
5.158,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(4,
|
(-58.720,
|
||||||
1,
|
-32.9295,
|
||||||
0.0,
|
5.158,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(-54,
|
(-54,
|
||||||
-13.5,
|
-13.5,
|
||||||
|
@ -767,28 +771,28 @@ plots1 = [(85.0,
|
||||||
-72,
|
-72,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(89.865,
|
(89.578,
|
||||||
-71.7725,
|
-71.,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(91.0,
|
(91.0,
|
||||||
-74.0,
|
-74.0,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(92.135,
|
(92.622,
|
||||||
-76.2275,
|
-77.2275,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(75.865,
|
(75.2671,
|
||||||
-78.7725,
|
-78.02,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(77,
|
(77,
|
||||||
-81,
|
-81,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(78.135,
|
(78.504,
|
||||||
-83.2275,
|
-83.7906,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(86.6518,
|
(86.6518,
|
||||||
|
@ -835,44 +839,44 @@ plots1 = [(85.0,
|
||||||
-82,
|
-82,
|
||||||
-30.0,
|
-30.0,
|
||||||
STATUARY_TYPE)]
|
STATUARY_TYPE)]
|
||||||
plots2 = [(0,
|
plots2 = [(-61.956,
|
||||||
0,
|
-111.738,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(1,
|
(-71.942,
|
||||||
0,
|
-109.921,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(-75.6116,
|
||||||
0,
|
-114.521,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(-76.150,
|
||||||
1,
|
-117.799,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(-76.772,
|
||||||
2,
|
-120.659,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(-73.072,
|
||||||
0,
|
-128.744,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(-69.446,
|
||||||
1,
|
-129.419,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(-62.743,
|
||||||
2,
|
-125.292,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(4,
|
(-61.758,
|
||||||
0,
|
-121.99,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(4,
|
(-61.403,
|
||||||
1,
|
-119.349,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(-40,
|
(-40,
|
||||||
|
@ -911,44 +915,44 @@ plots2 = [(0,
|
||||||
-114,
|
-114,
|
||||||
-60.0,
|
-60.0,
|
||||||
STATUARY_TYPE)]
|
STATUARY_TYPE)]
|
||||||
plots3 = [(0,
|
plots3 = [(72.67,
|
||||||
0,
|
16.13,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(1,
|
(71.93,
|
||||||
0,
|
4.97,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(67.75,
|
||||||
0,
|
18.9,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(64.09,
|
||||||
1,
|
19.001,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(60.53,
|
||||||
2,
|
19.355,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(59.44,
|
||||||
0,
|
2.807,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(62.90,
|
||||||
1,
|
2.83,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(65.85,
|
||||||
2,
|
2.753,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(4,
|
(53.97,
|
||||||
0,
|
13.995,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(4,
|
(53.99,
|
||||||
1,
|
9.85,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(59,
|
(59,
|
||||||
|
@ -987,44 +991,44 @@ plots3 = [(0,
|
||||||
27,
|
27,
|
||||||
102.0,
|
102.0,
|
||||||
STATUARY_TYPE)]
|
STATUARY_TYPE)]
|
||||||
plots4 = [(0,
|
plots4 = [(35.4771,
|
||||||
0,
|
69.97,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(1,
|
(46.039,
|
||||||
0,
|
65.9762,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(50.118,
|
||||||
0,
|
70.88,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(51.4956,
|
||||||
1,
|
73.9103,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(52.6171,
|
||||||
2,
|
76.6618,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(48.955,
|
||||||
0,
|
85.4584,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(45.402,
|
||||||
1,
|
86.652,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(37.499,
|
||||||
2,
|
82.5899,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(4,
|
(36.394,
|
||||||
0,
|
79.6288,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(4,
|
(35.254,
|
||||||
1,
|
76.778,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(37,
|
(37,
|
||||||
|
@ -1063,44 +1067,44 @@ plots4 = [(0,
|
||||||
87,
|
87,
|
||||||
-140.0,
|
-140.0,
|
||||||
STATUARY_TYPE)]
|
STATUARY_TYPE)]
|
||||||
plots5 = [(0,
|
plots5 = [(-26.276,
|
||||||
0,
|
37.5757,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(1,
|
(-33.058,
|
||||||
0,
|
45.9437,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(-39.405,
|
||||||
0,
|
44.1741,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(-42.145,
|
||||||
1,
|
42.1331,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(2,
|
(-44.400,
|
||||||
2,
|
40.326,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(-46.176,
|
||||||
0,
|
32.4361,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(-44.039,
|
||||||
1,
|
29.378,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(3,
|
(-34.272,
|
||||||
2,
|
27.9571,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(4,
|
(-32.010,
|
||||||
0,
|
29.8193,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(4,
|
(-29.712,
|
||||||
1,
|
31.6731,
|
||||||
0.0,
|
0.0,
|
||||||
FLOWER_TYPE),
|
FLOWER_TYPE),
|
||||||
(-26,
|
(-26,
|
||||||
|
|
|
@ -1,21 +1,23 @@
|
||||||
from pandac.PandaModules import *
|
from pandac.PandaModules import *
|
||||||
|
|
||||||
def makeCard(book=False):
|
def makeCard(book=False):
|
||||||
cardMaker = CardMaker('laughing-man-cm')
|
cardMaker = CardMaker('king-jake-cm')
|
||||||
cardMaker.setHasUvs(1)
|
cardMaker.setHasUvs(1)
|
||||||
cardMaker.setFrame(-0.5, 0.5, -0.5, 0.5)
|
cardMaker.setFrame(-0.5, 0.5, -0.5, 0.5)
|
||||||
|
|
||||||
nodePath = NodePath('laughing-man')
|
nodePath = NodePath('king-jake')
|
||||||
nodePath.setBillboardPointEye()
|
nodePath.setBillboardPointEye()
|
||||||
|
|
||||||
lmBase = nodePath.attachNewNode(cardMaker.generate())
|
lmBase = nodePath.attachNewNode(cardMaker.generate())
|
||||||
lmBase.setTexture(loader.loadTexture('phase_3/maps/lm_base.rgba'))
|
lmBase.setTexture(loader.loadTexture('phase_3/maps/kj_base.png'))
|
||||||
lmBase.setY(-0.3)
|
lmBase.setY(-0.3)
|
||||||
|
lmBase.setScale(0.845)
|
||||||
lmBase.setTransparency(True)
|
lmBase.setTransparency(True)
|
||||||
|
|
||||||
lmText = nodePath.attachNewNode(cardMaker.generate())
|
lmText = nodePath.attachNewNode(cardMaker.generate())
|
||||||
lmText.setTexture(loader.loadTexture('phase_3/maps/lm_text.rgba'))
|
lmText.setTexture(loader.loadTexture('phase_3/maps/kj_text.png'))
|
||||||
lmText.setY(-0.301)
|
lmText.setY(-0.301)
|
||||||
|
lmText.setScale(0.845)
|
||||||
lmText.setTransparency(True)
|
lmText.setTransparency(True)
|
||||||
lmText.hprInterval(10, (0, 0, -360)).loop()
|
lmText.hprInterval(10, (0, 0, -360)).loop()
|
||||||
|
|
||||||
|
@ -38,4 +40,9 @@ def addHeadEffect(head, book=False):
|
||||||
|
|
||||||
def addToonEffect(toon):
|
def addToonEffect(toon):
|
||||||
for lod in toon.getLODNames():
|
for lod in toon.getLODNames():
|
||||||
addHeadEffect(toon.getPart('head', lod))
|
addHeadEffect(toon.getPart('head', lod))
|
||||||
|
|
||||||
|
"""
|
||||||
|
from toontown.toon import LaughingManGlobals
|
||||||
|
LaughingManGlobals.addToonEffect(base.localAvatar)
|
||||||
|
"""
|
||||||
|
|
|
@ -32,6 +32,9 @@ if __debug__:
|
||||||
loadPrcFile('dependencies/config/general.prc')
|
loadPrcFile('dependencies/config/general.prc')
|
||||||
loadPrcFile('dependencies/config/release/dev.prc')
|
loadPrcFile('dependencies/config/release/dev.prc')
|
||||||
|
|
||||||
|
if os.path.isfile('dependencies/config/local.prc'):
|
||||||
|
loadPrcFile('dependencies/config/local.prc')
|
||||||
|
|
||||||
defaultText = ""
|
defaultText = ""
|
||||||
|
|
||||||
def __inject_wx(_):
|
def __inject_wx(_):
|
||||||
|
|
|
@ -24,6 +24,6 @@ else:
|
||||||
print response['reason']
|
print response['reason']
|
||||||
else:
|
else:
|
||||||
os.environ['TTS_PLAYCOOKIE'] = response['token']
|
os.environ['TTS_PLAYCOOKIE'] = response['token']
|
||||||
|
|
||||||
# Start the game:
|
# Start the game:
|
||||||
import toontown.toonbase.ClientStart
|
import toontown.toonbase.ClientStart
|
||||||
|
|
|
@ -1,45 +1,74 @@
|
||||||
import anydbm
|
|
||||||
import base64
|
|
||||||
from direct.directnotify.DirectNotifyGlobal import directNotify
|
from direct.directnotify.DirectNotifyGlobal import directNotify
|
||||||
from direct.distributed.DistributedObjectGlobalUD import DistributedObjectGlobalUD
|
from direct.distributed.DistributedObjectGlobalUD import DistributedObjectGlobalUD
|
||||||
from direct.distributed.PyDatagram import *
|
from direct.distributed.PyDatagram import *
|
||||||
from direct.fsm.FSM import FSM
|
from direct.fsm.FSM import FSM
|
||||||
import hashlib
|
|
||||||
import hmac
|
|
||||||
import json
|
|
||||||
from pandac.PandaModules import *
|
|
||||||
import time
|
|
||||||
import urllib2
|
|
||||||
import os
|
|
||||||
from otp.ai.MagicWordGlobal import *
|
from otp.ai.MagicWordGlobal import *
|
||||||
from otp.distributed import OtpDoGlobals
|
from otp.distributed import OtpDoGlobals
|
||||||
|
|
||||||
from toontown.makeatoon.NameGenerator import NameGenerator
|
from toontown.makeatoon.NameGenerator import NameGenerator
|
||||||
from toontown.toon.ToonDNA import ToonDNA
|
from toontown.toon.ToonDNA import ToonDNA
|
||||||
from toontown.toonbase import TTLocalizer
|
from toontown.toonbase import TTLocalizer
|
||||||
from toontown.uberdog import NameJudgeBlacklist
|
from toontown.uberdog import NameJudgeBlacklist
|
||||||
|
|
||||||
|
from pandac.PandaModules import *
|
||||||
|
|
||||||
# Import from PyCrypto only if we are using a database that requires it. This
|
import hashlib, hmac, json
|
||||||
# allows local hosted and developer builds of the game to run without it:
|
import anydbm, math, os
|
||||||
accountDBType = simbase.config.GetString('accountdb-type', 'developer')
|
import urllib2, time
|
||||||
|
|
||||||
|
def rejectConfig(issue, securityIssue=True, retarded=True):
|
||||||
|
print
|
||||||
|
print
|
||||||
|
print 'Lemme get this straight....'
|
||||||
|
print 'You are trying to use remote account database type...'
|
||||||
|
print 'However,', issue + '!!!!'
|
||||||
|
if securityIssue:
|
||||||
|
print 'Do you want this server to get hacked?'
|
||||||
|
if retarded:
|
||||||
|
print '"Either down\'s or autism"\n - JohnnyDaPirate, 2015'
|
||||||
|
print 'Go fix that!'
|
||||||
|
exit()
|
||||||
|
|
||||||
|
def entropy(string):
|
||||||
|
prob = [float(string.count(c)) / len(string) for c in dict.fromkeys(list(string))]
|
||||||
|
entropy = -sum([p * math.log(p) / math.log(2.0) for p in prob])
|
||||||
|
return entropy
|
||||||
|
|
||||||
|
def entropyIdeal(length):
|
||||||
|
prob = 1.0 / length
|
||||||
|
return -length * prob * math.log(prob) / math.log(2.0)
|
||||||
|
|
||||||
|
accountDBType = config.GetString('accountdb-type', 'developer')
|
||||||
|
accountServerSecret = config.GetString('account-server-secret', '9sj6816aj1hs795j') # Changed in server distribution.
|
||||||
|
accountServerHashAlgo = config.GetString('account-server-hash-algo', 'sha512')
|
||||||
if accountDBType == 'remote':
|
if accountDBType == 'remote':
|
||||||
from Crypto.Cipher import AES
|
if accountServerSecret == 'dev':
|
||||||
|
rejectConfig('you have not changed the secret in config/local.prc')
|
||||||
|
|
||||||
# Sometimes we'll want to force a specific access level, such as on the
|
if len(accountServerSecret) < 16:
|
||||||
# developer server:
|
rejectConfig('the secret is too small! Make it 16+ bytes', retarded=False)
|
||||||
minAccessLevel = simbase.config.GetInt('min-access-level', 100)
|
|
||||||
|
|
||||||
accountServerEndpoint = simbase.config.GetString(
|
secretLength = len(accountServerSecret)
|
||||||
'account-server-endpoint', 'http://tigercat1.me/tmpremote/api/')
|
ideal = entropyIdeal(secretLength) / 2
|
||||||
accountServerSecret = simbase.config.GetString(
|
entropy = entropy(accountServerSecret)
|
||||||
'account-server-secret', '9sj6816aj1hs795j')
|
if entropy < ideal:
|
||||||
|
rejectConfig('the secret entropy is too low! For %d bytes,'
|
||||||
|
' it should be %d. Currently it is %d' % (secretLength, ideal, entropy),
|
||||||
|
retarded=False)
|
||||||
|
|
||||||
|
hashAlgo = getattr(hashlib, accountServerHashAlgo, None)
|
||||||
|
if not hashAlgo:
|
||||||
|
rejectConfig('%s is not a valid hash algo' % accountServerHashAlgo, securityIssue=False)
|
||||||
|
|
||||||
http = HTTPClient()
|
hashSize = len(hashAlgo('').digest())
|
||||||
http.setVerifySsl(0)
|
|
||||||
|
|
||||||
|
minAccessLevel = config.GetInt('min-access-level', 100)
|
||||||
|
|
||||||
def executeHttpRequest(url, **extras):
|
def executeHttpRequest(url, **extras):
|
||||||
|
# TO DO: THIS IS QUITE DISGUSTING
|
||||||
|
# INSTEAD OF USING THE SAME SECRET, WE SHOULD HAVE AN API KEY EXCLUSIVE TO THAT
|
||||||
|
# MOVE THIS TO ToontownInternalRepository (this might be interesting for AI)
|
||||||
request = urllib2.Request('http://tigercat1.me/tmpremote/api/' + url)
|
request = urllib2.Request('http://tigercat1.me/tmpremote/api/' + url)
|
||||||
timestamp = str(int(time.time()))
|
timestamp = str(int(time.time()))
|
||||||
signature = hashlib.sha256(timestamp + accountServerSecret + "h*^ahJGHA017JI&A&*uyhU07")
|
signature = hashlib.sha256(timestamp + accountServerSecret + "h*^ahJGHA017JI&A&*uyhU07")
|
||||||
|
@ -53,10 +82,10 @@ def executeHttpRequest(url, **extras):
|
||||||
except:
|
except:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
notify = directNotify.newCategory('ClientServicesManagerUD')
|
notify = directNotify.newCategory('ClientServicesManagerUD')
|
||||||
|
|
||||||
def executeHttpRequestAndLog(url, **extras):
|
def executeHttpRequestAndLog(url, **extras):
|
||||||
|
# SEE ABOVE
|
||||||
response = executeHttpRequest(url, extras)
|
response = executeHttpRequest(url, extras)
|
||||||
|
|
||||||
if response is None:
|
if response is None:
|
||||||
|
@ -93,21 +122,18 @@ def judgeName(name):
|
||||||
|
|
||||||
# --- ACCOUNT DATABASES ---
|
# --- ACCOUNT DATABASES ---
|
||||||
# These classes make up the available account databases for Toontown Stride.
|
# These classes make up the available account databases for Toontown Stride.
|
||||||
# Databases with login tokens use the PyCrypto module for decrypting them.
|
|
||||||
# DeveloperAccountDB is a special database that accepts a username, and assigns
|
# DeveloperAccountDB is a special database that accepts a username, and assigns
|
||||||
# each user with 700 access automatically upon login.
|
# each user with 700 access automatically upon login.
|
||||||
|
|
||||||
|
|
||||||
class AccountDB:
|
class AccountDB:
|
||||||
notify = directNotify.newCategory('AccountDB')
|
notify = directNotify.newCategory('AccountDB')
|
||||||
|
|
||||||
def __init__(self, csm):
|
def __init__(self, csm):
|
||||||
self.csm = csm
|
self.csm = csm
|
||||||
|
|
||||||
filename = simbase.config.GetString(
|
filename = config.GetString('account-bridge-filename', 'account-bridge.db')
|
||||||
'account-bridge-filename', 'account-bridge.db')
|
filename = os.path.join('dependencies', filename)
|
||||||
filename = os.path.join("dependencies", filename)
|
|
||||||
|
|
||||||
self.dbm = anydbm.open(filename, 'c')
|
self.dbm = anydbm.open(filename, 'c')
|
||||||
|
|
||||||
def addNameRequest(self, avId, name):
|
def addNameRequest(self, avId, name):
|
||||||
|
@ -119,8 +145,20 @@ class AccountDB:
|
||||||
def removeNameRequest(self, avId):
|
def removeNameRequest(self, avId):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def lookup(self, username, callback):
|
def lookup(self, data, callback):
|
||||||
pass # Inheritors should override this.
|
userId = data['userId']
|
||||||
|
|
||||||
|
data['success'] = True
|
||||||
|
data['accessLevel'] = max(data['accessLevel'], minAccessLevel)
|
||||||
|
|
||||||
|
if str(userId) not in self.dbm:
|
||||||
|
data['accountId'] = 0
|
||||||
|
|
||||||
|
else:
|
||||||
|
data['accountId'] = int(self.dbm[str(userId)])
|
||||||
|
|
||||||
|
callback(data)
|
||||||
|
return data
|
||||||
|
|
||||||
def storeAccountID(self, userId, accountId, callback):
|
def storeAccountID(self, userId, accountId, callback):
|
||||||
self.dbm[str(userId)] = str(accountId) # anydbm only allows strings.
|
self.dbm[str(userId)] = str(accountId) # anydbm only allows strings.
|
||||||
|
@ -131,33 +169,20 @@ class AccountDB:
|
||||||
self.notify.warning('Unable to associate user %s with account %d!' % (userId, accountId))
|
self.notify.warning('Unable to associate user %s with account %d!' % (userId, accountId))
|
||||||
callback(False)
|
callback(False)
|
||||||
|
|
||||||
|
|
||||||
class DeveloperAccountDB(AccountDB):
|
class DeveloperAccountDB(AccountDB):
|
||||||
notify = directNotify.newCategory('DeveloperAccountDB')
|
notify = directNotify.newCategory('DeveloperAccountDB')
|
||||||
|
|
||||||
def lookup(self, username, callback):
|
def lookup(self, userId, callback):
|
||||||
# Let's check if this user's ID is in your account database bridge:
|
return AccountDB.lookup(self, {'userId': userId,
|
||||||
if str(username) not in self.dbm:
|
'accessLevel': 700,
|
||||||
# Nope. Let's associate them with a brand new Account object! We
|
'notAfter': 0},
|
||||||
# will assign them with 700 access just because they are a
|
callback)
|
||||||
# developer:
|
|
||||||
response = {
|
|
||||||
'success': True,
|
|
||||||
'userId': username,
|
|
||||||
'accountId': 0,
|
|
||||||
'accessLevel': max(700, minAccessLevel)
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
# We have an account already, let's return what we've got:
|
|
||||||
response = {
|
|
||||||
'success': True,
|
|
||||||
'userId': username,
|
|
||||||
'accountId': int(self.dbm[str(username)]),
|
|
||||||
}
|
|
||||||
callback(response)
|
|
||||||
return response
|
|
||||||
|
|
||||||
class RemoteAccountDB(AccountDB):
|
class RemoteAccountDB(AccountDB):
|
||||||
|
# TO DO FOR NAMES:
|
||||||
|
# CURRENTLY IT MAKES n REQUESTS FOR EACH AVATAR
|
||||||
|
# IN THE FUTURE, MAKE ONLY 1 REQUEST
|
||||||
|
# WHICH RETURNS ALL PENDING AVS
|
||||||
notify = directNotify.newCategory('RemoteAccountDB')
|
notify = directNotify.newCategory('RemoteAccountDB')
|
||||||
|
|
||||||
def addNameRequest(self, avId, name):
|
def addNameRequest(self, avId, name):
|
||||||
|
@ -171,105 +196,43 @@ class RemoteAccountDB(AccountDB):
|
||||||
return executeHttpRequest('names/remove', ID=str(avId))
|
return executeHttpRequest('names/remove', ID=str(avId))
|
||||||
|
|
||||||
def lookup(self, token, callback):
|
def lookup(self, token, callback):
|
||||||
# First, base64 decode the token:
|
'''
|
||||||
|
Token format:
|
||||||
|
The token is obfuscated a bit, but nothing too hard to read.
|
||||||
|
Most of the security is based on the hash.
|
||||||
|
|
||||||
|
I. Data contained in a token:
|
||||||
|
A json-encoded dict, which contains timestamp, userid and extra info
|
||||||
|
|
||||||
|
II. Token format
|
||||||
|
X = BASE64(ROT13(DATA)[::-1])
|
||||||
|
H = HASH(X)[::-1]
|
||||||
|
Token = BASE64(H + X)
|
||||||
|
'''
|
||||||
|
|
||||||
try:
|
try:
|
||||||
token = base64.b64decode(token)
|
token = token.decode('base64')
|
||||||
except TypeError:
|
hash, token = token[:hashSize], token[hashSize:]
|
||||||
self.notify.warning('Could not decode the provided token!')
|
|
||||||
response = {
|
|
||||||
'success': False,
|
|
||||||
'reason': "Can't decode this token."
|
|
||||||
}
|
|
||||||
callback(response)
|
|
||||||
return response
|
|
||||||
|
|
||||||
# Ensure this token is a valid size:
|
correctHash = hashAlgo(token + accountServerSecret).digest()
|
||||||
if (not token) or ((len(token) % 16) != 0):
|
if len(hash) != len(correctHash):
|
||||||
self.notify.warning('Invalid token length!')
|
raise ValueError('Invalid hash.')
|
||||||
response = {
|
|
||||||
'success': False,
|
|
||||||
'reason': 'Invalid token length.'
|
|
||||||
}
|
|
||||||
callback(response)
|
|
||||||
return response
|
|
||||||
|
|
||||||
# Next, decrypt the token using AES-128 in CBC mode:
|
value = 0
|
||||||
accountServerSecret = simbase.config.GetString(
|
for x, y in zip(hash[::-1], correctHash):
|
||||||
'account-server-secret', '9sj6816aj1hs795j')
|
value |= ord(x) ^ ord(y)
|
||||||
|
|
||||||
# Ensure that our secret is the correct size:
|
if value:
|
||||||
if len(accountServerSecret) > AES.block_size:
|
raise ValueError('Invalid hash.')
|
||||||
self.notify.warning('account-server-secret is too big!')
|
|
||||||
accountServerSecret = accountServerSecret[:AES.block_size]
|
|
||||||
elif len(accountServerSecret) < AES.block_size:
|
|
||||||
self.notify.warning('account-server-secret is too small!')
|
|
||||||
accountServerSecret += '\x80'
|
|
||||||
while len(accountServerSecret) < AES.block_size:
|
|
||||||
accountServerSecret += '\x00'
|
|
||||||
|
|
||||||
# Take the initialization vector off the front of the token:
|
token = json.loads(token.decode('base64')[::-1].decode('rot13'))
|
||||||
iv = token[:AES.block_size]
|
|
||||||
|
|
||||||
# Truncate the token to get our cipher text:
|
except:
|
||||||
cipherText = token[AES.block_size:]
|
resp = {'success': False}
|
||||||
|
callback(resp)
|
||||||
# Decrypt!
|
return resp
|
||||||
cipher = AES.new(accountServerSecret, mode=AES.MODE_CBC, IV=iv)
|
|
||||||
try:
|
|
||||||
token = json.loads(cipher.decrypt(cipherText).replace('\x00', ''))
|
|
||||||
if ('timestamp' not in token) or (not isinstance(token['timestamp'], int)):
|
|
||||||
raise ValueError
|
|
||||||
if ('userid' not in token) or (not isinstance(token['userid'], int)):
|
|
||||||
raise ValueError
|
|
||||||
if ('accesslevel' not in token) or (not isinstance(token['accesslevel'], int)):
|
|
||||||
raise ValueError
|
|
||||||
except ValueError, e:
|
|
||||||
print e
|
|
||||||
self.notify.warning('Invalid token.')
|
|
||||||
response = {
|
|
||||||
'success': False,
|
|
||||||
'reason': 'Invalid token.'
|
|
||||||
}
|
|
||||||
callback(response)
|
|
||||||
return response
|
|
||||||
|
|
||||||
# Next, check if this token has expired:
|
|
||||||
expiration = simbase.config.GetInt('account-token-expiration', 1800)
|
|
||||||
tokenDelta = int(time.time()) - token['timestamp']
|
|
||||||
if tokenDelta > expiration:
|
|
||||||
response = {
|
|
||||||
'success': False,
|
|
||||||
'reason': 'This token has expired.'
|
|
||||||
}
|
|
||||||
callback(response)
|
|
||||||
return response
|
|
||||||
|
|
||||||
# This token is valid. That's all we need to know. Next, let's check if
|
|
||||||
# this user's ID is in your account database bridge:
|
|
||||||
if str(token['userid']) not in self.dbm:
|
|
||||||
|
|
||||||
# Nope. Let's associate them with a brand new Account object!
|
|
||||||
response = {
|
|
||||||
'success': True,
|
|
||||||
'userId': token['userid'],
|
|
||||||
'accountId': 0,
|
|
||||||
'accessLevel': max(int(token['accesslevel']), minAccessLevel)
|
|
||||||
}
|
|
||||||
callback(response)
|
|
||||||
return response
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
# Yep. Let's return their account ID and access level!
|
|
||||||
response = {
|
|
||||||
'success': True,
|
|
||||||
'userId': token['userid'],
|
|
||||||
'accountId': int(self.dbm[str(token['userid'])]),
|
|
||||||
'accessLevel': max(int(token['accesslevel']), minAccessLevel)
|
|
||||||
}
|
|
||||||
callback(response)
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
return AccountDB.lookup(self, token, callback)
|
||||||
|
|
||||||
# --- FSMs ---
|
# --- FSMs ---
|
||||||
class OperationFSM(FSM):
|
class OperationFSM(FSM):
|
||||||
|
@ -294,7 +257,6 @@ class OperationFSM(FSM):
|
||||||
else:
|
else:
|
||||||
del self.csm.account2fsm[self.target]
|
del self.csm.account2fsm[self.target]
|
||||||
|
|
||||||
|
|
||||||
class LoginAccountFSM(OperationFSM):
|
class LoginAccountFSM(OperationFSM):
|
||||||
notify = directNotify.newCategory('LoginAccountFSM')
|
notify = directNotify.newCategory('LoginAccountFSM')
|
||||||
TARGET_CONNECTION = True
|
TARGET_CONNECTION = True
|
||||||
|
@ -315,6 +277,7 @@ class LoginAccountFSM(OperationFSM):
|
||||||
self.userId = result.get('userId', 0)
|
self.userId = result.get('userId', 0)
|
||||||
self.accountId = result.get('accountId', 0)
|
self.accountId = result.get('accountId', 0)
|
||||||
self.accessLevel = result.get('accessLevel', 0)
|
self.accessLevel = result.get('accessLevel', 0)
|
||||||
|
self.notAfter = result.get('notAfter', 0)
|
||||||
if self.accountId:
|
if self.accountId:
|
||||||
self.demand('RetrieveAccount')
|
self.demand('RetrieveAccount')
|
||||||
else:
|
else:
|
||||||
|
@ -330,6 +293,12 @@ class LoginAccountFSM(OperationFSM):
|
||||||
return
|
return
|
||||||
|
|
||||||
self.account = fields
|
self.account = fields
|
||||||
|
|
||||||
|
if self.notAfter:
|
||||||
|
if self.account.get('LAST_LOGIN_TS', 0) > self.notAfter:
|
||||||
|
self.notify.debug('Rejecting old token: %d, notAfter=%d' % (self.account.get('LAST_LOGIN_TS', 0), self.notAfter))
|
||||||
|
return self.__handleLookup({'success': False})
|
||||||
|
|
||||||
self.demand('SetAccount')
|
self.demand('SetAccount')
|
||||||
|
|
||||||
def enterCreateAccount(self):
|
def enterCreateAccount(self):
|
||||||
|
@ -339,6 +308,7 @@ class LoginAccountFSM(OperationFSM):
|
||||||
'ACCOUNT_AV_SET_DEL': [],
|
'ACCOUNT_AV_SET_DEL': [],
|
||||||
'CREATED': time.ctime(),
|
'CREATED': time.ctime(),
|
||||||
'LAST_LOGIN': time.ctime(),
|
'LAST_LOGIN': time.ctime(),
|
||||||
|
'LAST_LOGIN_TS': time.time(),
|
||||||
'ACCOUNT_ID': str(self.userId),
|
'ACCOUNT_ID': str(self.userId),
|
||||||
'ACCESS_LEVEL': self.accessLevel
|
'ACCESS_LEVEL': self.accessLevel
|
||||||
}
|
}
|
||||||
|
@ -403,7 +373,7 @@ class LoginAccountFSM(OperationFSM):
|
||||||
datagram.addChannel(self.csm.GetAccountConnectionChannel(self.accountId))
|
datagram.addChannel(self.csm.GetAccountConnectionChannel(self.accountId))
|
||||||
self.csm.air.send(datagram)
|
self.csm.air.send(datagram)
|
||||||
|
|
||||||
# Subscribe to any "staff" channels that the account has access to.
|
# Subscribe to any "staff" channels that the account has access to.
|
||||||
access = self.account.get('ADMIN_ACCESS', 0)
|
access = self.account.get('ADMIN_ACCESS', 0)
|
||||||
if access >= 200:
|
if access >= 200:
|
||||||
# Subscribe to the moderator channel.
|
# Subscribe to the moderator channel.
|
||||||
|
@ -449,6 +419,7 @@ class LoginAccountFSM(OperationFSM):
|
||||||
self.accountId,
|
self.accountId,
|
||||||
self.csm.air.dclassesByName['AccountUD'],
|
self.csm.air.dclassesByName['AccountUD'],
|
||||||
{'LAST_LOGIN': time.ctime(),
|
{'LAST_LOGIN': time.ctime(),
|
||||||
|
'LAST_LOGIN_TS': time.time(),
|
||||||
'ACCOUNT_ID': str(self.userId)})
|
'ACCOUNT_ID': str(self.userId)})
|
||||||
|
|
||||||
# We're done.
|
# We're done.
|
||||||
|
@ -547,7 +518,6 @@ class CreateAvatarFSM(OperationFSM):
|
||||||
self.csm.sendUpdateToAccountId(self.target, 'createAvatarResp', [self.avId])
|
self.csm.sendUpdateToAccountId(self.target, 'createAvatarResp', [self.avId])
|
||||||
self.demand('Off')
|
self.demand('Off')
|
||||||
|
|
||||||
|
|
||||||
class AvatarOperationFSM(OperationFSM):
|
class AvatarOperationFSM(OperationFSM):
|
||||||
POST_ACCOUNT_STATE = 'Off' # This needs to be overridden.
|
POST_ACCOUNT_STATE = 'Off' # This needs to be overridden.
|
||||||
|
|
||||||
|
@ -570,7 +540,6 @@ class AvatarOperationFSM(OperationFSM):
|
||||||
|
|
||||||
self.demand(self.POST_ACCOUNT_STATE)
|
self.demand(self.POST_ACCOUNT_STATE)
|
||||||
|
|
||||||
|
|
||||||
class GetAvatarsFSM(AvatarOperationFSM):
|
class GetAvatarsFSM(AvatarOperationFSM):
|
||||||
notify = directNotify.newCategory('GetAvatarsFSM')
|
notify = directNotify.newCategory('GetAvatarsFSM')
|
||||||
POST_ACCOUNT_STATE = 'QueryAvatars'
|
POST_ACCOUNT_STATE = 'QueryAvatars'
|
||||||
|
@ -807,7 +776,6 @@ class SetNamePatternFSM(AvatarOperationFSM):
|
||||||
self.csm.sendUpdateToAccountId(self.target, 'setNamePatternResp', [self.avId, 1])
|
self.csm.sendUpdateToAccountId(self.target, 'setNamePatternResp', [self.avId, 1])
|
||||||
self.demand('Off')
|
self.demand('Off')
|
||||||
|
|
||||||
|
|
||||||
class AcknowledgeNameFSM(AvatarOperationFSM):
|
class AcknowledgeNameFSM(AvatarOperationFSM):
|
||||||
notify = directNotify.newCategory('AcknowledgeNameFSM')
|
notify = directNotify.newCategory('AcknowledgeNameFSM')
|
||||||
POST_ACCOUNT_STATE = 'GetTargetAvatar'
|
POST_ACCOUNT_STATE = 'GetTargetAvatar'
|
||||||
|
@ -863,7 +831,6 @@ class AcknowledgeNameFSM(AvatarOperationFSM):
|
||||||
self.csm.sendUpdateToAccountId(self.target, 'acknowledgeAvatarNameResp', [])
|
self.csm.sendUpdateToAccountId(self.target, 'acknowledgeAvatarNameResp', [])
|
||||||
self.demand('Off')
|
self.demand('Off')
|
||||||
|
|
||||||
|
|
||||||
class LoadAvatarFSM(AvatarOperationFSM):
|
class LoadAvatarFSM(AvatarOperationFSM):
|
||||||
notify = directNotify.newCategory('LoadAvatarFSM')
|
notify = directNotify.newCategory('LoadAvatarFSM')
|
||||||
POST_ACCOUNT_STATE = 'GetTargetAvatar'
|
POST_ACCOUNT_STATE = 'GetTargetAvatar'
|
||||||
|
@ -1007,7 +974,6 @@ class UnloadAvatarFSM(OperationFSM):
|
||||||
self.csm.air.writeServerEvent('avatarUnload', self.avId)
|
self.csm.air.writeServerEvent('avatarUnload', self.avId)
|
||||||
self.demand('Off')
|
self.demand('Off')
|
||||||
|
|
||||||
|
|
||||||
# --- CLIENT SERVICES MANAGER UBERDOG ---
|
# --- CLIENT SERVICES MANAGER UBERDOG ---
|
||||||
class ClientServicesManagerUD(DistributedObjectGlobalUD):
|
class ClientServicesManagerUD(DistributedObjectGlobalUD):
|
||||||
notify = directNotify.newCategory('ClientServicesManagerUD')
|
notify = directNotify.newCategory('ClientServicesManagerUD')
|
||||||
|
|
|
@ -33,6 +33,9 @@ args = parser.parse_args()
|
||||||
for prc in args.config:
|
for prc in args.config:
|
||||||
loadPrcFile(prc)
|
loadPrcFile(prc)
|
||||||
|
|
||||||
|
if os.path.isfile('dependencies/config/local.prc'):
|
||||||
|
loadPrcFile('dependencies/config/local.prc')
|
||||||
|
|
||||||
localconfig = ''
|
localconfig = ''
|
||||||
if args.base_channel:
|
if args.base_channel:
|
||||||
localconfig += 'air-base-channel %s\n' % args.base_channel
|
localconfig += 'air-base-channel %s\n' % args.base_channel
|
||||||
|
|
Loading…
Reference in a new issue