mirror of
https://github.com/imayushsaini/Bombsquad-Ballistica-Modded-Server.git
synced 2025-10-20 00:00:39 +00:00
Private server
This commit is contained in:
commit
be7c837e33
668 changed files with 151282 additions and 0 deletions
18
README.txt
Normal file
18
README.txt
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
To run this, simply cd into this directory and run ./ballisticacore_server
|
||||
(on mac or linux) or launch_ballisticacore_server.bat (on windows).
|
||||
You'll need to open a UDP port (43210 by default) so that the world can
|
||||
communicate with your server.
|
||||
You can configure your server by editing the config.yaml file.
|
||||
(if you only see config_template.yaml, you can copy/rename that to config.yaml)
|
||||
|
||||
-Add your account-id in dist/ba_root/mods/privateserver.py -> admin[]
|
||||
-Restart server twice
|
||||
-Add players account-id (pb-id) in whitelist.json manually or use chat command while whitelist is off.
|
||||
-Use "/whitelist" to turn on/off whitelist.
|
||||
-Use "/spectators" to turn on/off lobby kick.
|
||||
-Use "/add <client-id>" to whitelist player (turn off whitelist or spectators mode first).
|
||||
-In config.yaml set party type to PUBLIC ; party will be PRIVATE automatically by smoothy haxx
|
||||
-Increased Kickvote cooldown
|
||||
-Kickvote logs with be logged in terminal (who kicking whom).
|
||||
-player joined the party/player left the party message removed
|
||||
|
||||
847
ballisticacore_server
Normal file
847
ballisticacore_server
Normal file
|
|
@ -0,0 +1,847 @@
|
|||
#!/usr/bin/env python3.8
|
||||
# Released under the MIT License. See LICENSE for details.
|
||||
#
|
||||
"""BallisticaCore server manager."""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from pathlib import Path
|
||||
from threading import Lock, Thread, current_thread
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
# We make use of the bacommon and efro packages as well as site-packages
|
||||
# included with our bundled Ballistica dist, so we need to add those paths
|
||||
# before we import them.
|
||||
sys.path += [
|
||||
str(Path(Path(__file__).parent, 'dist', 'ba_data', 'python')),
|
||||
str(Path(Path(__file__).parent, 'dist', 'ba_data', 'python-site-packages'))
|
||||
]
|
||||
|
||||
from bacommon.servermanager import ServerConfig, StartServerModeCommand
|
||||
from efro.dataclasses import dataclass_from_dict, dataclass_validate
|
||||
from efro.error import CleanError
|
||||
from efro.terminal import Clr
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Optional, List, Dict, Union, Tuple
|
||||
from types import FrameType
|
||||
from bacommon.servermanager import ServerCommand
|
||||
|
||||
VERSION_STR = '1.2'
|
||||
|
||||
# Version history:
|
||||
# 1.2:
|
||||
# Added optional --help arg
|
||||
# Added --config arg for setting config path and --root for ba_root path
|
||||
# Added noninteractive mode and --interactive/--noninteractive args to
|
||||
# explicitly specify
|
||||
# Added explicit control for auto-restart: --no-auto-restart
|
||||
# Config file is now reloaded each time server binary is restarted; no more
|
||||
# need to bring down server wrapper to pick up changes
|
||||
# Now automatically restarts server binary when config file is modified
|
||||
# (use --no-config-auto-restart to disable that behavior)
|
||||
# 1.1.1:
|
||||
# Switched config reading to use efro.dataclasses.dataclass_from_dict()
|
||||
# 1.1.0:
|
||||
# Added shutdown command
|
||||
# Changed restart to default to immediate=True
|
||||
# Added clean_exit_minutes, unclean_exit_minutes, and idle_exit_minutes
|
||||
# 1.0.0:
|
||||
# Initial release
|
||||
|
||||
|
||||
class ServerManagerApp:
|
||||
"""An app which manages BallisticaCore server execution.
|
||||
|
||||
Handles configuring, launching, re-launching, and otherwise
|
||||
managing BallisticaCore operating in server mode.
|
||||
"""
|
||||
|
||||
# How many seconds we wait after asking our subprocess to do an immediate
|
||||
# shutdown before bringing down the hammer.
|
||||
IMMEDIATE_SHUTDOWN_TIME_LIMIT = 5.0
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._config_path = 'config.yaml'
|
||||
self._user_provided_config_path = False
|
||||
self._config = ServerConfig()
|
||||
self._ba_root_path = os.path.abspath('dist/ba_root')
|
||||
self._interactive = sys.stdin.isatty()
|
||||
self._wrapper_shutdown_desired = False
|
||||
self._done = False
|
||||
self._subprocess_commands: List[Union[str, ServerCommand]] = []
|
||||
self._subprocess_commands_lock = Lock()
|
||||
self._subprocess_force_kill_time: Optional[float] = None
|
||||
self._auto_restart = True
|
||||
self._config_auto_restart = True
|
||||
self._config_mtime: Optional[float] = None
|
||||
self._last_config_mtime_check_time: Optional[float] = None
|
||||
self._should_report_subprocess_error = False
|
||||
self._running = False
|
||||
self._interpreter_start_time: Optional[float] = None
|
||||
self._subprocess: Optional[subprocess.Popen[bytes]] = None
|
||||
self._subprocess_launch_time: Optional[float] = None
|
||||
self._subprocess_sent_config_auto_restart = False
|
||||
self._subprocess_sent_clean_exit = False
|
||||
self._subprocess_sent_unclean_exit = False
|
||||
self._subprocess_thread: Optional[Thread] = None
|
||||
self._subprocess_exited_cleanly: Optional[bool] = None
|
||||
|
||||
# This may override the above defaults.
|
||||
self._parse_command_line_args()
|
||||
|
||||
# Do an initial config-load. If the config is invalid at this point
|
||||
# we can cleanly die (we're more lenient later on reloads).
|
||||
self.load_config(strict=True, print_confirmation=False)
|
||||
|
||||
@property
|
||||
def config(self) -> ServerConfig:
|
||||
"""The current config for the app."""
|
||||
return self._config
|
||||
|
||||
@config.setter
|
||||
def config(self, value: ServerConfig) -> None:
|
||||
dataclass_validate(value)
|
||||
self._config = value
|
||||
|
||||
def _prerun(self) -> None:
|
||||
"""Common code at the start of any run."""
|
||||
|
||||
# Make sure we don't call run multiple times.
|
||||
if self._running:
|
||||
raise RuntimeError('Already running.')
|
||||
self._running = True
|
||||
|
||||
dbgstr = 'debug' if __debug__ else 'opt'
|
||||
print(
|
||||
f'{Clr.CYN}{Clr.BLD}BallisticaCore server manager {VERSION_STR}'
|
||||
f' starting up ({dbgstr} mode)...{Clr.RST}',
|
||||
flush=True)
|
||||
|
||||
# Python will handle SIGINT for us (as KeyboardInterrupt) but we
|
||||
# need to register a SIGTERM handler so we have a chance to clean
|
||||
# up our subprocess when someone tells us to die. (and avoid
|
||||
# zombie processes)
|
||||
signal.signal(signal.SIGTERM, self._handle_term_signal)
|
||||
|
||||
# During a run, we make the assumption that cwd is the dir
|
||||
# containing this script, so make that so. Up until now that may
|
||||
# not be the case (we support being called from any location).
|
||||
os.chdir(os.path.abspath(os.path.dirname(__file__)))
|
||||
|
||||
# Fire off a background thread to wrangle our server binaries.
|
||||
self._subprocess_thread = Thread(target=self._bg_thread_main)
|
||||
self._subprocess_thread.start()
|
||||
|
||||
def _postrun(self) -> None:
|
||||
"""Common code at the end of any run."""
|
||||
print(f'{Clr.CYN}Server manager shutting down...{Clr.RST}', flush=True)
|
||||
|
||||
assert self._subprocess_thread is not None
|
||||
if self._subprocess_thread.is_alive():
|
||||
print(f'{Clr.CYN}Waiting for subprocess exit...{Clr.RST}',
|
||||
flush=True)
|
||||
|
||||
# Mark ourselves as shutting down and wait for the process to wrap up.
|
||||
self._done = True
|
||||
self._subprocess_thread.join()
|
||||
|
||||
# If there's a server error we should care about, exit the
|
||||
# entire wrapper uncleanly.
|
||||
if self._should_report_subprocess_error:
|
||||
raise CleanError('Server subprocess exited uncleanly.')
|
||||
|
||||
def run(self) -> None:
|
||||
"""Do the thing."""
|
||||
if self._interactive:
|
||||
self._run_interactive()
|
||||
else:
|
||||
self._run_noninteractive()
|
||||
|
||||
def _run_noninteractive(self) -> None:
|
||||
"""Run the app loop to completion noninteractively."""
|
||||
self._prerun()
|
||||
try:
|
||||
while True:
|
||||
time.sleep(1.234)
|
||||
except KeyboardInterrupt:
|
||||
# Gracefully bow out if we kill ourself via keyboard.
|
||||
pass
|
||||
except SystemExit:
|
||||
# We get this from the builtin quit(), our signal handler, etc.
|
||||
# Need to catch this so we can clean up, otherwise we'll be
|
||||
# left in limbo with our process thread still running.
|
||||
pass
|
||||
self._postrun()
|
||||
|
||||
def _run_interactive(self) -> None:
|
||||
"""Run the app loop to completion interactively."""
|
||||
import code
|
||||
self._prerun()
|
||||
|
||||
# Print basic usage info for interactive mode.
|
||||
print(
|
||||
f"{Clr.CYN}Interactive mode enabled; use the 'mgr' object"
|
||||
f' to interact with the server.\n'
|
||||
f"Type 'help(mgr)' for more information.{Clr.RST}",
|
||||
flush=True)
|
||||
|
||||
context = {'__name__': '__console__', '__doc__': None, 'mgr': self}
|
||||
|
||||
# Enable tab-completion if possible.
|
||||
self._enable_tab_completion(context)
|
||||
|
||||
# Now just sit in an interpreter.
|
||||
# TODO: make it possible to use IPython if the user has it available.
|
||||
try:
|
||||
self._interpreter_start_time = time.time()
|
||||
code.interact(local=context, banner='', exitmsg='')
|
||||
except SystemExit:
|
||||
# We get this from the builtin quit(), our signal handler, etc.
|
||||
# Need to catch this so we can clean up, otherwise we'll be
|
||||
# left in limbo with our process thread still running.
|
||||
pass
|
||||
except BaseException as exc:
|
||||
print(
|
||||
f'{Clr.SRED}Unexpected interpreter exception:'
|
||||
f' {exc} ({type(exc)}){Clr.RST}',
|
||||
flush=True)
|
||||
|
||||
self._postrun()
|
||||
|
||||
def cmd(self, statement: str) -> None:
|
||||
"""Exec a Python command on the current running server subprocess.
|
||||
|
||||
Note that commands are executed asynchronously and no status or
|
||||
return value is accessible from this manager app.
|
||||
"""
|
||||
if not isinstance(statement, str):
|
||||
raise TypeError(f'Expected a string arg; got {type(statement)}')
|
||||
with self._subprocess_commands_lock:
|
||||
self._subprocess_commands.append(statement)
|
||||
self._block_for_command_completion()
|
||||
|
||||
def _block_for_command_completion(self) -> None:
|
||||
# Ideally we'd block here until the command was run so our prompt would
|
||||
# print after it's results. We currently don't get any response from
|
||||
# the app so the best we can do is block until our bg thread has sent
|
||||
# it. In the future we can perhaps add a proper 'command port'
|
||||
# interface for proper blocking two way communication.
|
||||
while True:
|
||||
with self._subprocess_commands_lock:
|
||||
if not self._subprocess_commands:
|
||||
break
|
||||
time.sleep(0.1)
|
||||
|
||||
# One last short delay so if we come out *just* as the command is sent
|
||||
# we'll hopefully still give it enough time to process/print.
|
||||
time.sleep(0.1)
|
||||
|
||||
def screenmessage(self,
|
||||
message: str,
|
||||
color: Optional[Tuple[float, float, float]] = None,
|
||||
clients: Optional[List[int]] = None) -> None:
|
||||
"""Display a screen-message.
|
||||
|
||||
This will have no name attached and not show up in chat history.
|
||||
They will show up in replays, however (unless clients is passed).
|
||||
"""
|
||||
from bacommon.servermanager import ScreenMessageCommand
|
||||
self._enqueue_server_command(
|
||||
ScreenMessageCommand(message=message, color=color,
|
||||
clients=clients))
|
||||
|
||||
def chatmessage(self,
|
||||
message: str,
|
||||
clients: Optional[List[int]] = None) -> None:
|
||||
"""Send a chat message from the server.
|
||||
|
||||
This will have the server's name attached and will be logged
|
||||
in client chat windows, just like other chat messages.
|
||||
"""
|
||||
from bacommon.servermanager import ChatMessageCommand
|
||||
self._enqueue_server_command(
|
||||
ChatMessageCommand(message=message, clients=clients))
|
||||
|
||||
def clientlist(self) -> None:
|
||||
"""Print a list of connected clients."""
|
||||
from bacommon.servermanager import ClientListCommand
|
||||
self._enqueue_server_command(ClientListCommand())
|
||||
self._block_for_command_completion()
|
||||
|
||||
def kick(self, client_id: int, ban_time: Optional[int] = None) -> None:
|
||||
"""Kick the client with the provided id.
|
||||
|
||||
If ban_time is provided, the client will be banned for that
|
||||
length of time in seconds. If it is None, ban duration will
|
||||
be determined automatically. Pass 0 or a negative number for no
|
||||
ban time.
|
||||
"""
|
||||
from bacommon.servermanager import KickCommand
|
||||
self._enqueue_server_command(
|
||||
KickCommand(client_id=client_id, ban_time=ban_time))
|
||||
|
||||
def restart(self, immediate: bool = True) -> None:
|
||||
"""Restart the server subprocess.
|
||||
|
||||
By default, the current server process will exit immediately.
|
||||
If 'immediate' is passed as False, however, it will instead exit at
|
||||
the next clean transition point (the end of a series, etc).
|
||||
"""
|
||||
from bacommon.servermanager import ShutdownCommand, ShutdownReason
|
||||
self._enqueue_server_command(
|
||||
ShutdownCommand(reason=ShutdownReason.RESTARTING,
|
||||
immediate=immediate))
|
||||
|
||||
# If we're asking for an immediate restart but don't get one within
|
||||
# the grace period, bring down the hammer.
|
||||
if immediate:
|
||||
self._subprocess_force_kill_time = (
|
||||
time.time() + self.IMMEDIATE_SHUTDOWN_TIME_LIMIT)
|
||||
|
||||
def shutdown(self, immediate: bool = True) -> None:
|
||||
"""Shut down the server subprocess and exit the wrapper.
|
||||
|
||||
By default, the current server process will exit immediately.
|
||||
If 'immediate' is passed as False, however, it will instead exit at
|
||||
the next clean transition point (the end of a series, etc).
|
||||
"""
|
||||
from bacommon.servermanager import ShutdownCommand, ShutdownReason
|
||||
self._enqueue_server_command(
|
||||
ShutdownCommand(reason=ShutdownReason.NONE, immediate=immediate))
|
||||
|
||||
# An explicit shutdown means we know to bail completely once this
|
||||
# subprocess completes.
|
||||
self._wrapper_shutdown_desired = True
|
||||
|
||||
# If we're asking for an immediate shutdown but don't get one within
|
||||
# the grace period, bring down the hammer.
|
||||
if immediate:
|
||||
self._subprocess_force_kill_time = (
|
||||
time.time() + self.IMMEDIATE_SHUTDOWN_TIME_LIMIT)
|
||||
|
||||
def _parse_command_line_args(self) -> None:
|
||||
"""Parse command line args."""
|
||||
# pylint: disable=too-many-branches
|
||||
|
||||
i = 1
|
||||
argc = len(sys.argv)
|
||||
did_set_interactive = False
|
||||
while i < argc:
|
||||
arg = sys.argv[i]
|
||||
if arg == '--help':
|
||||
self.print_help()
|
||||
sys.exit(0)
|
||||
elif arg == '--config':
|
||||
if i + 1 >= argc:
|
||||
raise CleanError('Expected a config path as next arg.')
|
||||
path = sys.argv[i + 1]
|
||||
if not os.path.exists(path):
|
||||
raise CleanError(
|
||||
f"Supplied path does not exist: '{path}'.")
|
||||
# We need an abs path because we may be in a different
|
||||
# cwd currently than we will be during the run.
|
||||
self._config_path = os.path.abspath(path)
|
||||
self._user_provided_config_path = True
|
||||
i += 2
|
||||
elif arg == '--root':
|
||||
if i + 1 >= argc:
|
||||
raise CleanError('Expected a path as next arg.')
|
||||
path = sys.argv[i + 1]
|
||||
# Unlike config_path, this one doesn't have to exist now.
|
||||
# We do however need an abs path because we may be in a
|
||||
# different cwd currently than we will be during the run.
|
||||
self._ba_root_path = os.path.abspath(path)
|
||||
i += 2
|
||||
elif arg == '--interactive':
|
||||
if did_set_interactive:
|
||||
raise CleanError('interactive/noninteractive can only'
|
||||
' be specified once.')
|
||||
self._interactive = True
|
||||
did_set_interactive = True
|
||||
i += 1
|
||||
elif arg == '--noninteractive':
|
||||
if did_set_interactive:
|
||||
raise CleanError('interactive/noninteractive can only'
|
||||
' be specified once.')
|
||||
self._interactive = False
|
||||
did_set_interactive = True
|
||||
i += 1
|
||||
elif arg == '--no-auto-restart':
|
||||
self._auto_restart = False
|
||||
i += 1
|
||||
elif arg == '--no-config-auto-restart':
|
||||
self._config_auto_restart = False
|
||||
i += 1
|
||||
else:
|
||||
raise CleanError(f"Invalid arg: '{arg}'.")
|
||||
|
||||
@classmethod
|
||||
def _par(cls, txt: str) -> str:
|
||||
"""Spit out a pretty paragraph for our help text."""
|
||||
import textwrap
|
||||
ind = ' ' * 2
|
||||
out = textwrap.fill(txt, 80, initial_indent=ind, subsequent_indent=ind)
|
||||
return f'{out}\n'
|
||||
|
||||
@classmethod
|
||||
def print_help(cls) -> None:
|
||||
"""Print app help."""
|
||||
filename = os.path.basename(__file__)
|
||||
out = (
|
||||
f'{Clr.BLD}{filename} usage:{Clr.RST}\n' + cls._par(
|
||||
'This script handles configuring, launching, re-launching,'
|
||||
' and otherwise managing BallisticaCore operating'
|
||||
' in server mode. It can be run with no arguments, but'
|
||||
' accepts the following optional ones:') + f'\n'
|
||||
f'{Clr.BLD}--help:{Clr.RST}\n'
|
||||
f' Show this help.\n'
|
||||
f'\n'
|
||||
f'{Clr.BLD}--config [path]{Clr.RST}\n' + cls._par(
|
||||
'Set the config file read by the server script. The config'
|
||||
' file contains most options for what kind of game to host.'
|
||||
' It should be in yaml format. Note that yaml is backwards'
|
||||
' compatible with json so you can just write json if you'
|
||||
' want to. If not specified, the script will look for a'
|
||||
' file named \'config.yaml\' in the same directory as the'
|
||||
' script.') + '\n'
|
||||
f'{Clr.BLD}--root [path]{Clr.RST}\n' + cls._par(
|
||||
'Set the ballistica root directory. This is where the server'
|
||||
' binary will read and write its caches, state files,'
|
||||
' downloaded assets, etc. It needs to be a writable'
|
||||
' directory. If not specified, the script will use the'
|
||||
' \'dist/ba_root\' directory relative to itself.') + '\n'
|
||||
f'{Clr.BLD}--interactive{Clr.RST}\n'
|
||||
f'{Clr.BLD}--noninteractive{Clr.RST}\n' + cls._par(
|
||||
'Specify whether the script should run interactively.'
|
||||
' In interactive mode, the script creates a Python interpreter'
|
||||
' and reads commands from stdin, allowing for live interaction'
|
||||
' with the server. The server script will then exit when '
|
||||
'end-of-file is reached in stdin. Noninteractive mode creates'
|
||||
' no interpreter and is more suited to being run in automated'
|
||||
' scenarios. By default, interactive mode will be used if'
|
||||
' a terminal is detected and noninteractive mode otherwise.') +
|
||||
'\n'
|
||||
f'{Clr.BLD}--no-auto-restart{Clr.RST}\n' +
|
||||
cls._par('Auto-restart is enabled by default, which means the'
|
||||
' server manager will restart the server binary whenever'
|
||||
' it exits (even when uncleanly). Disabling auto-restart'
|
||||
' will cause the server manager to instead exit after a'
|
||||
' single run and also to return error codes if the'
|
||||
' server binary did so.') + '\n'
|
||||
f'{Clr.BLD}--no-config-auto-restart{Clr.RST}\n' + cls._par(
|
||||
'By default, when auto-restart is enabled, the server binary'
|
||||
' will be automatically restarted if changes to the server'
|
||||
' config file are detected. This disables that behavior.'))
|
||||
print(out)
|
||||
|
||||
def load_config(self, strict: bool, print_confirmation: bool) -> None:
|
||||
"""Load the config.
|
||||
|
||||
If strict is True, errors will propagate upward.
|
||||
Otherwise, warnings will be printed and repeated attempts will be
|
||||
made to load the config. Eventually the function will give up
|
||||
and leave the existing config as-is.
|
||||
"""
|
||||
retry_seconds = 3
|
||||
maxtries = 11
|
||||
for trynum in range(maxtries):
|
||||
try:
|
||||
self._config = self._load_config_from_file(
|
||||
print_confirmation=print_confirmation)
|
||||
return
|
||||
except Exception as exc:
|
||||
if strict:
|
||||
raise CleanError(
|
||||
f'Error loading config file:\n{exc}') from exc
|
||||
print(f'{Clr.RED}Error loading config file:\n{exc}.{Clr.RST}',
|
||||
flush=True)
|
||||
if trynum == maxtries - 1:
|
||||
print(
|
||||
f'{Clr.RED}Max-tries reached; giving up.'
|
||||
f' Existing config values will be used.{Clr.RST}',
|
||||
flush=True)
|
||||
break
|
||||
print(
|
||||
f'{Clr.CYN}Please correct the error.'
|
||||
f' Will re-attempt load in {retry_seconds}'
|
||||
f' seconds. (attempt {trynum+1} of'
|
||||
f' {maxtries-1}).{Clr.RST}',
|
||||
flush=True)
|
||||
|
||||
for _j in range(retry_seconds):
|
||||
# If the app is trying to die, drop what we're doing.
|
||||
if self._done:
|
||||
return
|
||||
time.sleep(1)
|
||||
|
||||
def _load_config_from_file(self, print_confirmation: bool) -> ServerConfig:
|
||||
|
||||
out: Optional[ServerConfig] = None
|
||||
|
||||
if not os.path.exists(self._config_path):
|
||||
|
||||
# Special case:
|
||||
# If the user didn't specify a particular config file, allow
|
||||
# gracefully falling back to defaults if the default one is
|
||||
# missing.
|
||||
if not self._user_provided_config_path:
|
||||
if print_confirmation:
|
||||
print(
|
||||
f'{Clr.YLW}Default config file not found'
|
||||
f' (\'{self._config_path}\'); using default'
|
||||
f' settings.{Clr.RST}',
|
||||
flush=True)
|
||||
self._config_mtime = None
|
||||
self._last_config_mtime_check_time = time.time()
|
||||
return ServerConfig()
|
||||
|
||||
# Don't be so lenient if the user pointed us at one though.
|
||||
raise RuntimeError(
|
||||
f"Config file not found: '{self._config_path}'.")
|
||||
|
||||
import yaml
|
||||
with open(self._config_path) as infile:
|
||||
user_config_raw = yaml.safe_load(infile.read())
|
||||
|
||||
# An empty config file will yield None, and that's ok.
|
||||
if user_config_raw is not None:
|
||||
out = dataclass_from_dict(ServerConfig, user_config_raw)
|
||||
|
||||
# Update our known mod-time since we know it exists.
|
||||
self._config_mtime = Path(self._config_path).stat().st_mtime
|
||||
self._last_config_mtime_check_time = time.time()
|
||||
|
||||
# Go with defaults if we weren't able to load anything.
|
||||
if out is None:
|
||||
out = ServerConfig()
|
||||
|
||||
if print_confirmation:
|
||||
print(f'{Clr.CYN}Valid server config file loaded.{Clr.RST}',
|
||||
flush=True)
|
||||
return out
|
||||
|
||||
def _enable_tab_completion(self, locs: Dict) -> None:
|
||||
"""Enable tab-completion on platforms where available (linux/mac)."""
|
||||
try:
|
||||
import readline
|
||||
import rlcompleter
|
||||
readline.set_completer(rlcompleter.Completer(locs).complete)
|
||||
readline.parse_and_bind('tab:complete')
|
||||
except ImportError:
|
||||
# This is expected (readline doesn't exist under windows).
|
||||
pass
|
||||
|
||||
def _bg_thread_main(self) -> None:
|
||||
"""Top level method run by our bg thread."""
|
||||
while not self._done:
|
||||
self._run_server_cycle()
|
||||
|
||||
def _handle_term_signal(self, sig: int, frame: FrameType) -> None:
|
||||
"""Handle signals (will always run in the main thread)."""
|
||||
del sig, frame # Unused.
|
||||
sys.exit(1 if self._should_report_subprocess_error else 0)
|
||||
|
||||
def _run_server_cycle(self) -> None:
|
||||
"""Spin up the server subprocess and run it until exit."""
|
||||
|
||||
# Reload our config, and update our overall behavior based on it.
|
||||
# We do non-strict this time to give the user repeated attempts if
|
||||
# if they mess up while modifying the config on the fly.
|
||||
self.load_config(strict=False, print_confirmation=True)
|
||||
|
||||
self._prep_subprocess_environment()
|
||||
|
||||
# Launch the binary and grab its stdin;
|
||||
# we'll use this to feed it commands.
|
||||
self._subprocess_launch_time = time.time()
|
||||
|
||||
# Set an environment var so the server process knows its being
|
||||
# run under us. This causes it to ignore ctrl-c presses and other
|
||||
# slight behavior tweaks. Hmm; should this be an argument instead?
|
||||
os.environ['BA_SERVER_WRAPPER_MANAGED'] = '1'
|
||||
|
||||
print(f'{Clr.CYN}Launching server subprocess...{Clr.RST}', flush=True)
|
||||
binary_name = ('ballisticacore_headless.exe'
|
||||
if os.name == 'nt' else './ballisticacore_headless')
|
||||
assert self._ba_root_path is not None
|
||||
self._subprocess = None
|
||||
|
||||
# Launch!
|
||||
try:
|
||||
self._subprocess = subprocess.Popen(
|
||||
[binary_name, '-cfgdir', self._ba_root_path],
|
||||
stdin=subprocess.PIPE,
|
||||
cwd='dist')
|
||||
except Exception as exc:
|
||||
self._subprocess_exited_cleanly = False
|
||||
print(
|
||||
f'{Clr.RED}Error launching server subprocess: {exc}{Clr.RST}',
|
||||
flush=True)
|
||||
|
||||
# Do the thing.
|
||||
try:
|
||||
self._run_subprocess_until_exit()
|
||||
except Exception as exc:
|
||||
print(f'{Clr.RED}Error running server subprocess: {exc}{Clr.RST}',
|
||||
flush=True)
|
||||
|
||||
self._kill_subprocess()
|
||||
|
||||
assert self._subprocess_exited_cleanly is not None
|
||||
|
||||
# EW: it seems that if we die before the main thread has fully started
|
||||
# up the interpreter, its possible that it will not break out of its
|
||||
# loop via the usual SystemExit that gets sent when we die.
|
||||
if self._interactive:
|
||||
while (self._interpreter_start_time is None
|
||||
or time.time() - self._interpreter_start_time < 0.5):
|
||||
time.sleep(0.1)
|
||||
|
||||
# Avoid super fast death loops.
|
||||
if (not self._subprocess_exited_cleanly and self._auto_restart
|
||||
and not self._done):
|
||||
time.sleep(5.0)
|
||||
|
||||
# If they don't want auto-restart, we'll exit the whole wrapper.
|
||||
# (and with an error code if things ended badly).
|
||||
if not self._auto_restart:
|
||||
self._wrapper_shutdown_desired = True
|
||||
if not self._subprocess_exited_cleanly:
|
||||
self._should_report_subprocess_error = True
|
||||
|
||||
self._reset_subprocess_vars()
|
||||
|
||||
# If we want to die completely after this subprocess has ended,
|
||||
# tell the main thread to die.
|
||||
if self._wrapper_shutdown_desired:
|
||||
|
||||
# Only do this if the main thread is not already waiting for
|
||||
# us to die; otherwise it can lead to deadlock.
|
||||
# (we hang in os.kill while main thread is blocked in Thread.join)
|
||||
if not self._done:
|
||||
self._done = True
|
||||
|
||||
# This should break the main thread out of its blocking
|
||||
# interpreter call.
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
|
||||
def _prep_subprocess_environment(self) -> None:
|
||||
"""Write files that must exist at process launch."""
|
||||
|
||||
assert self._ba_root_path is not None
|
||||
os.makedirs(self._ba_root_path, exist_ok=True)
|
||||
cfgpath = os.path.join(self._ba_root_path, 'config.json')
|
||||
if os.path.exists(cfgpath):
|
||||
with open(cfgpath) as infile:
|
||||
bincfg = json.loads(infile.read())
|
||||
else:
|
||||
bincfg = {}
|
||||
|
||||
# Some of our config values translate directly into the
|
||||
# ballisticacore config file; the rest we pass at runtime.
|
||||
bincfg['Port'] = self._config.port
|
||||
bincfg['Auto Balance Teams'] = self._config.auto_balance_teams
|
||||
bincfg['Show Tutorial'] = False
|
||||
bincfg['Idle Exit Minutes'] = self._config.idle_exit_minutes
|
||||
with open(cfgpath, 'w') as outfile:
|
||||
outfile.write(json.dumps(bincfg))
|
||||
|
||||
def _enqueue_server_command(self, command: ServerCommand) -> None:
|
||||
"""Enqueue a command to be sent to the server.
|
||||
|
||||
Can be called from any thread.
|
||||
"""
|
||||
with self._subprocess_commands_lock:
|
||||
self._subprocess_commands.append(command)
|
||||
|
||||
def _send_server_command(self, command: ServerCommand) -> None:
|
||||
"""Send a command to the server.
|
||||
|
||||
Must be called from the server process thread.
|
||||
"""
|
||||
import pickle
|
||||
assert current_thread() is self._subprocess_thread
|
||||
assert self._subprocess is not None
|
||||
assert self._subprocess.stdin is not None
|
||||
val = repr(pickle.dumps(command))
|
||||
assert '\n' not in val
|
||||
execcode = (f'import ba._servermode;'
|
||||
f' ba._servermode._cmd({val})\n').encode()
|
||||
self._subprocess.stdin.write(execcode)
|
||||
self._subprocess.stdin.flush()
|
||||
|
||||
def _run_subprocess_until_exit(self) -> None:
|
||||
if self._subprocess is None:
|
||||
return
|
||||
|
||||
assert current_thread() is self._subprocess_thread
|
||||
assert self._subprocess.stdin is not None
|
||||
|
||||
# Send the initial server config which should kick things off.
|
||||
# (but make sure its values are still valid first)
|
||||
dataclass_validate(self._config)
|
||||
self._send_server_command(StartServerModeCommand(self._config))
|
||||
|
||||
while True:
|
||||
|
||||
# If the app is trying to shut down, nope out immediately.
|
||||
if self._done:
|
||||
break
|
||||
|
||||
# Pass along any commands to our process.
|
||||
with self._subprocess_commands_lock:
|
||||
for incmd in self._subprocess_commands:
|
||||
# If we're passing a raw string to exec, no need to wrap it
|
||||
# in any proper structure.
|
||||
if isinstance(incmd, str):
|
||||
self._subprocess.stdin.write((incmd + '\n').encode())
|
||||
self._subprocess.stdin.flush()
|
||||
else:
|
||||
self._send_server_command(incmd)
|
||||
self._subprocess_commands = []
|
||||
|
||||
# Request restarts/shut-downs for various reasons.
|
||||
self._request_shutdowns_or_restarts()
|
||||
|
||||
# If they want to force-kill our subprocess, simply exit this
|
||||
# loop; the cleanup code will kill the process if its still
|
||||
# alive.
|
||||
if (self._subprocess_force_kill_time is not None
|
||||
and time.time() > self._subprocess_force_kill_time):
|
||||
print(f'{Clr.CYN}Force-killing subprocess...{Clr.RST}',
|
||||
flush=True)
|
||||
break
|
||||
|
||||
# Watch for the server process exiting..
|
||||
code: Optional[int] = self._subprocess.poll()
|
||||
if code is not None:
|
||||
|
||||
clr = Clr.CYN if code == 0 else Clr.RED
|
||||
print(
|
||||
f'{clr}Server subprocess exited'
|
||||
f' with code {code}.{Clr.RST}',
|
||||
flush=True)
|
||||
self._subprocess_exited_cleanly = (code == 0)
|
||||
break
|
||||
|
||||
time.sleep(0.25)
|
||||
|
||||
def _request_shutdowns_or_restarts(self) -> None:
|
||||
# pylint: disable=too-many-branches
|
||||
assert current_thread() is self._subprocess_thread
|
||||
assert self._subprocess_launch_time is not None
|
||||
now = time.time()
|
||||
minutes_since_launch = (now - self._subprocess_launch_time) / 60.0
|
||||
|
||||
# If we're doing auto-restart with config changes, handle that.
|
||||
if (self._auto_restart and self._config_auto_restart
|
||||
and not self._subprocess_sent_config_auto_restart):
|
||||
if (self._last_config_mtime_check_time is None
|
||||
or (now - self._last_config_mtime_check_time) > 3.123):
|
||||
self._last_config_mtime_check_time = now
|
||||
mtime: Optional[float]
|
||||
if os.path.isfile(self._config_path):
|
||||
mtime = Path(self._config_path).stat().st_mtime
|
||||
else:
|
||||
mtime = None
|
||||
if mtime != self._config_mtime:
|
||||
print(
|
||||
f'{Clr.CYN}Config-file change detected;'
|
||||
f' requesting immediate restart.{Clr.RST}',
|
||||
flush=True)
|
||||
self.restart(immediate=True)
|
||||
self._subprocess_sent_config_auto_restart = True
|
||||
|
||||
# Attempt clean exit if our clean-exit-time passes.
|
||||
# (and enforce a 6 hour max if not provided)
|
||||
clean_exit_minutes = 360.0
|
||||
if self._config.clean_exit_minutes is not None:
|
||||
clean_exit_minutes = min(clean_exit_minutes,
|
||||
self._config.clean_exit_minutes)
|
||||
if clean_exit_minutes is not None:
|
||||
if (minutes_since_launch > clean_exit_minutes
|
||||
and not self._subprocess_sent_clean_exit):
|
||||
opname = 'restart' if self._auto_restart else 'shutdown'
|
||||
print(
|
||||
f'{Clr.CYN}clean_exit_minutes'
|
||||
f' ({clean_exit_minutes})'
|
||||
f' elapsed; requesting soft'
|
||||
f' {opname}.{Clr.RST}',
|
||||
flush=True)
|
||||
if self._auto_restart:
|
||||
self.restart(immediate=False)
|
||||
else:
|
||||
self.shutdown(immediate=False)
|
||||
self._subprocess_sent_clean_exit = True
|
||||
|
||||
# Attempt unclean exit if our unclean-exit-time passes.
|
||||
# (and enforce a 7 hour max if not provided)
|
||||
unclean_exit_minutes = 420.0
|
||||
if self._config.unclean_exit_minutes is not None:
|
||||
unclean_exit_minutes = min(unclean_exit_minutes,
|
||||
self._config.unclean_exit_minutes)
|
||||
if unclean_exit_minutes is not None:
|
||||
if (minutes_since_launch > unclean_exit_minutes
|
||||
and not self._subprocess_sent_unclean_exit):
|
||||
opname = 'restart' if self._auto_restart else 'shutdown'
|
||||
print(
|
||||
f'{Clr.CYN}unclean_exit_minutes'
|
||||
f' ({unclean_exit_minutes})'
|
||||
f' elapsed; requesting immediate'
|
||||
f' {opname}.{Clr.RST}',
|
||||
flush=True)
|
||||
if self._auto_restart:
|
||||
self.restart(immediate=True)
|
||||
else:
|
||||
self.shutdown(immediate=True)
|
||||
self._subprocess_sent_unclean_exit = True
|
||||
|
||||
def _reset_subprocess_vars(self) -> None:
|
||||
self._subprocess = None
|
||||
self._subprocess_launch_time = None
|
||||
self._subprocess_sent_config_auto_restart = False
|
||||
self._subprocess_sent_clean_exit = False
|
||||
self._subprocess_sent_unclean_exit = False
|
||||
self._subprocess_force_kill_time = None
|
||||
self._subprocess_exited_cleanly = None
|
||||
|
||||
def _kill_subprocess(self) -> None:
|
||||
"""End the server subprocess if it still exists."""
|
||||
assert current_thread() is self._subprocess_thread
|
||||
if self._subprocess is None:
|
||||
return
|
||||
|
||||
print(f'{Clr.CYN}Stopping subprocess...{Clr.RST}', flush=True)
|
||||
|
||||
# First, ask it nicely to die and give it a moment.
|
||||
# If that doesn't work, bring down the hammer.
|
||||
self._subprocess.terminate()
|
||||
try:
|
||||
self._subprocess.wait(timeout=10)
|
||||
self._subprocess_exited_cleanly = (
|
||||
self._subprocess.returncode == 0)
|
||||
except subprocess.TimeoutExpired:
|
||||
self._subprocess_exited_cleanly = False
|
||||
self._subprocess.kill()
|
||||
print(f'{Clr.CYN}Subprocess stopped.{Clr.RST}', flush=True)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Run the BallisticaCore server manager."""
|
||||
try:
|
||||
ServerManagerApp().run()
|
||||
except CleanError as exc:
|
||||
# For clean errors, do a simple print and fail; no tracebacks/etc.
|
||||
# Any others will bubble up and give us the usual mess.
|
||||
exc.pretty_print()
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
104
config.yaml
Normal file
104
config.yaml
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
# To configure your server, create a config.yaml file in the same directory
|
||||
# as the ballisticacore_server script. The config_template.yaml file can be
|
||||
# copied or renamed as a convenient starting point.
|
||||
|
||||
# Uncomment any of these values to override defaults.
|
||||
|
||||
# Name of our server in the public parties list.
|
||||
party_name: Smoothy PRIVATE PARTY
|
||||
|
||||
|
||||
# KEEP THIS TRUE .... DONT WORRY PARTY WILL BE PRIVATE ONLY .. LET IT BE TRUE FOR NOW.
|
||||
party_is_public: true
|
||||
|
||||
# If true, all connecting clients will be authenticated through the master
|
||||
# server to screen for fake account info. Generally this should always
|
||||
# be enabled unless you are hosting on a LAN with no internet connection.
|
||||
authenticate_clients: true
|
||||
|
||||
# IDs of server admins. Server admins are not kickable through the default
|
||||
# kick vote system and they are able to kick players without a vote. To get
|
||||
# your account id, enter 'getaccountid' in settings->advanced->enter-code.
|
||||
admins:
|
||||
- pb-yOuRAccOuNtIdHErE
|
||||
- pb-aNdMayBeAnotherHeRE
|
||||
|
||||
# Whether the default kick-voting system is enabled.
|
||||
#enable_default_kick_voting: true
|
||||
|
||||
# UDP port to host on. Change this to work around firewalls or run multiple
|
||||
# servers on one machine.
|
||||
# 43210 is the default and the only port that will show up in the LAN
|
||||
# browser tab.
|
||||
port: 43210
|
||||
|
||||
# Max devices in the party. Note that this does *NOT* mean max players.
|
||||
# Any device in the party can have more than one player on it if they have
|
||||
# multiple controllers. Also, this number currently includes the server so
|
||||
# generally make it 1 bigger than you need. Max-players is not currently
|
||||
# exposed but I'll try to add that soon.
|
||||
max_party_size: 8
|
||||
|
||||
# Options here are 'ffa' (free-for-all) and 'teams'
|
||||
# This value is only used if you do not supply a playlist_code (see below).
|
||||
# In that case the default teams or free-for-all playlist gets used.
|
||||
#session_type: ffa
|
||||
|
||||
# To host your own custom playlists, use the 'share' functionality in the
|
||||
# playlist editor in the regular version of the game.
|
||||
# This will give you a numeric code you can enter here to host that
|
||||
# playlist.
|
||||
#playlist_code: 12345
|
||||
|
||||
# Whether to shuffle the playlist or play its games in designated order.
|
||||
#playlist_shuffle: true
|
||||
|
||||
# If true, keeps team sizes equal by disallowing joining the largest team
|
||||
# (teams mode only).
|
||||
#auto_balance_teams: true
|
||||
|
||||
# Whether to enable telnet access.
|
||||
# IMPORTANT: This option is no longer available, as it was being used
|
||||
# for exploits. Live access to the running server is still possible through
|
||||
# the mgr.cmd() function in the server script. Run your server through
|
||||
# tools such as 'screen' or 'tmux' and you can reconnect to it remotely
|
||||
# over a secure ssh connection.
|
||||
#enable_telnet: false
|
||||
|
||||
# Series length in teams mode (7 == 'best-of-7' series; a team must
|
||||
# get 4 wins)
|
||||
#teams_series_length: 7
|
||||
|
||||
# Points to win in free-for-all mode (Points are awarded per game based on
|
||||
# performance)
|
||||
#ffa_series_length: 24
|
||||
|
||||
# If you provide a custom stats webpage for your server, you can use
|
||||
# this to provide a convenient in-game link to it in the server-browser
|
||||
# beside the server name.
|
||||
# if ${ACCOUNT} is present in the string, it will be replaced by the
|
||||
# currently-signed-in account's id. To fetch info about an account,
|
||||
# your backend server can use the following url:
|
||||
# http://bombsquadgame.com/accountquery?id=ACCOUNT_ID_HERE
|
||||
#stats_url: https://mystatssite.com/showstats?player=${ACCOUNT}
|
||||
|
||||
# If present, the server subprocess will attempt to gracefully exit after
|
||||
# this amount of time. A graceful exit can occur at the end of a series
|
||||
# or other opportune time. Server-managers set to auto-restart (the
|
||||
# default) will then spin up a fresh subprocess. This mechanism can be
|
||||
# useful to clear out any memory leaks or other accumulated bad state
|
||||
# in the server subprocess.
|
||||
#clean_exit_minutes: 60
|
||||
|
||||
# If present, the server subprocess will shut down immediately after this
|
||||
# amount of time. This can be useful as a fallback for clean_exit_time.
|
||||
# The server manager will then spin up a fresh server subprocess if
|
||||
# auto-restart is enabled (the default).
|
||||
#unclean_exit_minutes: 90
|
||||
|
||||
# If present, the server subprocess will shut down immediately if this
|
||||
# amount of time passes with no activity from any players. The server
|
||||
# manager will then spin up a fresh server subprocess if
|
||||
# auto-restart is enabled (the default).
|
||||
#idle_exit_minutes: 20
|
||||
|
||||
1471
dist/ba_data/data/langdata.json
vendored
Normal file
1471
dist/ba_data/data/langdata.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1848
dist/ba_data/data/languages/arabic.json
vendored
Normal file
1848
dist/ba_data/data/languages/arabic.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1840
dist/ba_data/data/languages/belarussian.json
vendored
Normal file
1840
dist/ba_data/data/languages/belarussian.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1869
dist/ba_data/data/languages/chinese.json
vendored
Normal file
1869
dist/ba_data/data/languages/chinese.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1844
dist/ba_data/data/languages/chinesetraditional.json
vendored
Normal file
1844
dist/ba_data/data/languages/chinesetraditional.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1838
dist/ba_data/data/languages/croatian.json
vendored
Normal file
1838
dist/ba_data/data/languages/croatian.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1869
dist/ba_data/data/languages/czech.json
vendored
Normal file
1869
dist/ba_data/data/languages/czech.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1626
dist/ba_data/data/languages/danish.json
vendored
Normal file
1626
dist/ba_data/data/languages/danish.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1922
dist/ba_data/data/languages/dutch.json
vendored
Normal file
1922
dist/ba_data/data/languages/dutch.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1857
dist/ba_data/data/languages/english.json
vendored
Normal file
1857
dist/ba_data/data/languages/english.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1527
dist/ba_data/data/languages/esperanto.json
vendored
Normal file
1527
dist/ba_data/data/languages/esperanto.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1955
dist/ba_data/data/languages/french.json
vendored
Normal file
1955
dist/ba_data/data/languages/french.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1968
dist/ba_data/data/languages/german.json
vendored
Normal file
1968
dist/ba_data/data/languages/german.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1999
dist/ba_data/data/languages/gibberish.json
vendored
Normal file
1999
dist/ba_data/data/languages/gibberish.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1828
dist/ba_data/data/languages/greek.json
vendored
Normal file
1828
dist/ba_data/data/languages/greek.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1832
dist/ba_data/data/languages/hindi.json
vendored
Normal file
1832
dist/ba_data/data/languages/hindi.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1834
dist/ba_data/data/languages/hungarian.json
vendored
Normal file
1834
dist/ba_data/data/languages/hungarian.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1854
dist/ba_data/data/languages/indonesian.json
vendored
Normal file
1854
dist/ba_data/data/languages/indonesian.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1952
dist/ba_data/data/languages/italian.json
vendored
Normal file
1952
dist/ba_data/data/languages/italian.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1789
dist/ba_data/data/languages/korean.json
vendored
Normal file
1789
dist/ba_data/data/languages/korean.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1853
dist/ba_data/data/languages/persian.json
vendored
Normal file
1853
dist/ba_data/data/languages/persian.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1942
dist/ba_data/data/languages/polish.json
vendored
Normal file
1942
dist/ba_data/data/languages/polish.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1978
dist/ba_data/data/languages/portuguese.json
vendored
Normal file
1978
dist/ba_data/data/languages/portuguese.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1683
dist/ba_data/data/languages/romanian.json
vendored
Normal file
1683
dist/ba_data/data/languages/romanian.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1946
dist/ba_data/data/languages/russian.json
vendored
Normal file
1946
dist/ba_data/data/languages/russian.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1861
dist/ba_data/data/languages/serbian.json
vendored
Normal file
1861
dist/ba_data/data/languages/serbian.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1809
dist/ba_data/data/languages/slovak.json
vendored
Normal file
1809
dist/ba_data/data/languages/slovak.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1969
dist/ba_data/data/languages/spanish.json
vendored
Normal file
1969
dist/ba_data/data/languages/spanish.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1733
dist/ba_data/data/languages/swedish.json
vendored
Normal file
1733
dist/ba_data/data/languages/swedish.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1849
dist/ba_data/data/languages/turkish.json
vendored
Normal file
1849
dist/ba_data/data/languages/turkish.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1850
dist/ba_data/data/languages/ukrainian.json
vendored
Normal file
1850
dist/ba_data/data/languages/ukrainian.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1819
dist/ba_data/data/languages/venetian.json
vendored
Normal file
1819
dist/ba_data/data/languages/venetian.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
1820
dist/ba_data/data/languages/vietnamese.json
vendored
Normal file
1820
dist/ba_data/data/languages/vietnamese.json
vendored
Normal file
File diff suppressed because it is too large
Load diff
85
dist/ba_data/data/maps/big_g.json
vendored
Normal file
85
dist/ba_data/data/maps/big_g.json
vendored
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [-0.4, 2.33, -0.54], "size": [19.12, 10.2, 23.5]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [3.14, 1.17, 6.17], "size": [4.74, 1.0, 1.03]},
|
||||
{"center": [5.42, 1.18, -0.17], "size": [2.95, 0.62, 0.5]},
|
||||
{"center": [-0.37, 2.89, -6.91], "size": [7.58, 0.62, 0.5]},
|
||||
{"center": [-2.39, 1.12, -3.42], "size": [2.93, 0.62, 0.98]},
|
||||
{"center": [-7.46, 2.86, 4.94], "size": [0.87, 0.62, 2.23]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [7.56, 2.89, -7.21]},
|
||||
{"center": [7.7, 1.1, 6.1]},
|
||||
{"center": [-8.12, 2.84, 6.1]},
|
||||
{"center": [-8.02, 2.84, -6.2]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [-7.56, 2.85, 0.09]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [-0.19, 8.76, 0.2], "size": [27.42, 18.47, 22.17]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [7.83, 2.12, -0.05]},
|
||||
{"center": [-5.19, 1.48, -3.8]},
|
||||
{"center": [-8.54, 3.76, -7.28]},
|
||||
{"center": [7.37, 3.76, -3.09]},
|
||||
{"center": [-8.69, 3.69, 6.63]}
|
||||
],
|
||||
"race_mine": [
|
||||
{"center": [-0.06, 1.12, 4.97]},
|
||||
{"center": [-0.06, 1.12, 7.0]},
|
||||
{"center": [-0.73, 1.12, -2.83]},
|
||||
{"center": [-3.29, 1.12, 0.85]},
|
||||
{"center": [5.08, 2.85, -5.25]},
|
||||
{"center": [6.29, 2.85, -5.25]},
|
||||
{"center": [0.97, 2.85, -7.89]},
|
||||
{"center": [-2.98, 2.85, -6.24]},
|
||||
{"center": [-6.96, 2.85, -2.12]},
|
||||
{"center": [-6.87, 2.85, 2.72]}
|
||||
],
|
||||
"race_point": [
|
||||
{"center": [2.28, 1.17, 6.02], "size": [0.71, 4.67, 1.32]},
|
||||
{"center": [4.85, 1.17, 6.04], "size": [0.39, 4.58, 1.35]},
|
||||
{"center": [6.91, 1.17, 1.14], "size": [1.61, 3.52, 0.11]},
|
||||
{"center": [2.68, 1.17, 0.77], "size": [0.65, 3.6, 0.11]},
|
||||
{"center": [-0.38, 1.23, 1.92], "size": [0.11, 4.25, 0.59]},
|
||||
{"center": [-4.37, 1.17, -0.36], "size": [1.63, 4.55, 0.11]},
|
||||
{"center": [0.41, 1.17, -3.39], "size": [0.11, 4.95, 1.31]},
|
||||
{"center": [4.27, 2.2, -3.34], "size": [0.11, 4.39, 1.2]},
|
||||
{"center": [2.55, 2.88, -7.12], "size": [0.11, 5.51, 1.0]},
|
||||
{"center": [-4.2, 2.88, -7.11], "size": [0.11, 5.5, 1.03]},
|
||||
{"center": [-7.63, 2.88, -3.62], "size": [1.44, 5.16, 0.06]},
|
||||
{"center": [-7.54, 2.88, 3.29], "size": [1.67, 5.52, 0.06]}
|
||||
],
|
||||
"shadow_lower_bottom": [
|
||||
{"center": [-0.22, 0.29, 2.68]}
|
||||
],
|
||||
"shadow_lower_top": [
|
||||
{"center": [-0.22, 0.88, 2.68]}
|
||||
],
|
||||
"shadow_upper_bottom": [
|
||||
{"center": [-0.22, 6.31, 2.68]}
|
||||
],
|
||||
"shadow_upper_top": [
|
||||
{"center": [-0.22, 9.47, 2.68]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [7.18, 2.86, -4.41], "size": [0.76, 1.0, 1.82]},
|
||||
{"center": [5.88, 1.14, 6.17], "size": [1.82, 1.0, 0.77]}
|
||||
],
|
||||
"spawn_by_flag": [
|
||||
{"center": [7.18, 2.86, -4.41], "size": [0.76, 1.0, 1.82]},
|
||||
{"center": [5.88, 1.14, 6.17], "size": [1.82, 1.0, 0.77]},
|
||||
{"center": [-6.67, 3.55, 5.82], "size": [1.1, 1.0, 1.29]},
|
||||
{"center": [-6.84, 3.55, -6.17], "size": [0.82, 1.0, 1.29]}
|
||||
],
|
||||
"tnt": [
|
||||
{"center": [-3.4, 2.07, -1.9]}
|
||||
]
|
||||
}
|
||||
}
|
||||
95
dist/ba_data/data/maps/bridgit.json
vendored
Normal file
95
dist/ba_data/data/maps/bridgit.json
vendored
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"globals": {
|
||||
"ambient_color": [1.1, 1.2, 1.3],
|
||||
"tint": [1.1, 1.2, 1.3],
|
||||
"vignette_inner": [0.9, 0.9, 0.93],
|
||||
"vignette_outer": [0.65, 0.6, 0.55]
|
||||
},
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [-0.25, 3.83, -1.53], "size": [19.15, 7.31, 8.44]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [-5.87, 3.72, -1.62], "size": [0.94, 1.0, 1.82]},
|
||||
{"center": [5.16, 3.76, -1.44], "size": [0.77, 1.0, 1.82]},
|
||||
{"center": [-0.43, 3.76, -1.56], "size": [4.03, 1.0, 0.27]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-7.35, 3.77, -1.62]},
|
||||
{"center": [6.89, 3.77, -1.44]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [-0.22, 3.8, -1.56]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [-0.19, 7.48, -1.31], "size": [27.42, 18.47, 19.52]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [6.83, 4.66, 0.19]},
|
||||
{"center": [-7.25, 4.73, 0.25]},
|
||||
{"center": [6.83, 4.66, -3.46]},
|
||||
{"center": [-7.25, 4.73, -3.4]}
|
||||
],
|
||||
"shadow_lower_bottom": [
|
||||
{"center": [-0.22, 2.83, 2.68]}
|
||||
],
|
||||
"shadow_lower_top": [
|
||||
{"center": [-0.22, 3.5, 2.68]}
|
||||
],
|
||||
"shadow_upper_bottom": [
|
||||
{"center": [-0.22, 6.31, 2.68]}
|
||||
],
|
||||
"shadow_upper_top": [
|
||||
{"center": [-0.22, 9.47, 2.68]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-5.87, 3.72, -1.62], "size": [0.94, 1.0, 1.82]},
|
||||
{"center": [5.16, 3.76, -1.44], "size": [0.77, 1.0, 1.82]}
|
||||
]
|
||||
},
|
||||
"name": "Bridgit",
|
||||
"play_types": ["melee", "team_flag", "keep_away"],
|
||||
"preview_texture": "bridgitPreview",
|
||||
"terrain_nodes": [
|
||||
{
|
||||
"collide_model": "bridgitLevelBottom",
|
||||
"color_texture": "bridgitLevelColor",
|
||||
"comment": "Top portion of bridge.",
|
||||
"materials": ["footing"],
|
||||
"model": "bridgitLevelTop"
|
||||
},
|
||||
{
|
||||
"color_texture": "bridgitLevelColor",
|
||||
"comment": "Bottom portion of bridge with no lighting effects.",
|
||||
"lighting": false,
|
||||
"model": "bridgitLevelBottom"
|
||||
},
|
||||
{
|
||||
"background": true,
|
||||
"color_texture": "natureBackgroundColor",
|
||||
"comment": "Visible background.",
|
||||
"lighting": false,
|
||||
"model": "natureBackground"
|
||||
},
|
||||
{
|
||||
"background": true,
|
||||
"color_texture": "model_bg_tex",
|
||||
"comment": "360 degree bg for vr.",
|
||||
"lighting": false,
|
||||
"model": "bg_vr_fill_model",
|
||||
"vr_only": true
|
||||
},
|
||||
{
|
||||
"bumper": true,
|
||||
"collide_model": "railing_collide_model",
|
||||
"comment": "Invisible railing to help prevent falls.",
|
||||
"materials": ["railing"]
|
||||
},
|
||||
{
|
||||
"collide_model": "collide_bg",
|
||||
"comment": "Collision shape for bg",
|
||||
"materials": ["footing", "friction@10", "death"]
|
||||
}
|
||||
]
|
||||
}
|
||||
130
dist/ba_data/data/maps/courtyard.json
vendored
Normal file
130
dist/ba_data/data/maps/courtyard.json
vendored
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [0.35, 3.96, -2.18], "size": [16.38, 7.76, 13.39]}
|
||||
],
|
||||
"bot_spawn_bottom": [
|
||||
{"center": [-0.06, 2.81, 1.95]}
|
||||
],
|
||||
"bot_spawn_bottom_half_left": [
|
||||
{"center": [-2.05, 2.81, 1.95]}
|
||||
],
|
||||
"bot_spawn_bottom_half_right": [
|
||||
{"center": [1.86, 2.81, 1.95]}
|
||||
],
|
||||
"bot_spawn_bottom_left": [
|
||||
{"center": [-3.68, 2.81, 1.95]}
|
||||
],
|
||||
"bot_spawn_bottom_right": [
|
||||
{"center": [3.59, 2.81, 1.95]}
|
||||
],
|
||||
"bot_spawn_left": [
|
||||
{"center": [-6.45, 2.81, -2.32]}
|
||||
],
|
||||
"bot_spawn_left_lower": [
|
||||
{"center": [-6.45, 2.81, -1.51]}
|
||||
],
|
||||
"bot_spawn_left_lower_more": [
|
||||
{"center": [-6.45, 2.81, -0.48]}
|
||||
],
|
||||
"bot_spawn_left_upper": [
|
||||
{"center": [-6.45, 2.81, -3.18]}
|
||||
],
|
||||
"bot_spawn_left_upper_more": [
|
||||
{"center": [-6.45, 2.81, -4.01]}
|
||||
],
|
||||
"bot_spawn_right": [
|
||||
{"center": [6.54, 2.81, -2.32]}
|
||||
],
|
||||
"bot_spawn_right_lower": [
|
||||
{"center": [6.54, 2.81, -1.4]}
|
||||
],
|
||||
"bot_spawn_right_lower_more": [
|
||||
{"center": [6.54, 2.81, -0.36]}
|
||||
],
|
||||
"bot_spawn_right_upper": [
|
||||
{"center": [6.54, 2.81, -3.13]}
|
||||
],
|
||||
"bot_spawn_right_upper_more": [
|
||||
{"center": [6.54, 2.81, -3.98]}
|
||||
],
|
||||
"bot_spawn_top": [
|
||||
{"center": [-0.06, 2.81, -5.83]}
|
||||
],
|
||||
"bot_spawn_top_half_left": [
|
||||
{"center": [-1.49, 2.81, -5.83]}
|
||||
],
|
||||
"bot_spawn_top_half_right": [
|
||||
{"center": [1.6, 2.81, -5.83]}
|
||||
],
|
||||
"bot_spawn_top_left": [
|
||||
{"center": [-3.12, 2.81, -5.95]}
|
||||
],
|
||||
"bot_spawn_top_right": [
|
||||
{"center": [3.4, 2.81, -5.95]}
|
||||
],
|
||||
"bot_spawn_turret_bottom_left": [
|
||||
{"center": [-6.13, 3.33, 1.91]}
|
||||
],
|
||||
"bot_spawn_turret_bottom_right": [
|
||||
{"center": [6.37, 3.33, 1.8]}
|
||||
],
|
||||
"bot_spawn_turret_top_left": [
|
||||
{"center": [-6.13, 3.33, -6.57]}
|
||||
],
|
||||
"bot_spawn_turret_top_middle": [
|
||||
{"center": [0.08, 4.27, -8.52]}
|
||||
],
|
||||
"bot_spawn_turret_top_middle_left": [
|
||||
{"center": [-1.27, 4.27, -8.52]}
|
||||
],
|
||||
"bot_spawn_turret_top_middle_right": [
|
||||
{"center": [1.13, 4.27, -8.52]}
|
||||
],
|
||||
"bot_spawn_turret_top_right": [
|
||||
{"center": [6.37, 3.33, -6.6]}
|
||||
],
|
||||
"edge_box": [
|
||||
{"center": [0.0, 1.04, -2.14], "size": [12.02, 11.41, 7.81]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [-6.23, 3.77, -5.16], "size": [1.48, 1.0, 0.07]},
|
||||
{"center": [6.29, 3.77, -4.92], "size": [1.42, 1.0, 0.07]},
|
||||
{"center": [-0.02, 4.4, -6.96], "size": [1.51, 1.0, 0.25]},
|
||||
{"center": [-0.02, 3.79, 3.45], "size": [4.99, 1.0, 0.15]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-5.97, 2.82, -2.43]},
|
||||
{"center": [5.91, 2.8, -2.22]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [0.25, 2.78, -2.64]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [0.26, 4.9, -3.54], "size": [29.24, 14.2, 29.93]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [-3.56, 3.17, 0.37]},
|
||||
{"center": [3.63, 3.17, 0.41]},
|
||||
{"center": [3.63, 3.17, -4.99]},
|
||||
{"center": [-3.56, 3.17, -5.02]}
|
||||
],
|
||||
"shadow_lower_bottom": [
|
||||
{"center": [0.52, 0.02, 5.34]}
|
||||
],
|
||||
"shadow_lower_top": [
|
||||
{"center": [0.52, 1.21, 5.34]}
|
||||
],
|
||||
"shadow_upper_bottom": [
|
||||
{"center": [0.52, 6.36, 5.34]}
|
||||
],
|
||||
"shadow_upper_top": [
|
||||
{"center": [0.52, 10.12, 5.34]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-7.51, 3.8, -2.1], "size": [0.09, 1.0, 2.2]},
|
||||
{"center": [7.46, 3.77, -1.84], "size": [0.03, 1.0, 2.22]}
|
||||
]
|
||||
}
|
||||
}
|
||||
48
dist/ba_data/data/maps/crag_castle.json
vendored
Normal file
48
dist/ba_data/data/maps/crag_castle.json
vendored
Normal file
|
|
@ -0,0 +1,48 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [0.7, 6.56, -3.15], "size": [16.74, 14.95, 11.6]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [-4.04, 7.55, -3.54], "size": [2.47, 1.16, 0.18]},
|
||||
{"center": [5.43, 7.58, -3.5], "size": [2.42, 1.13, 0.18]},
|
||||
{"center": [4.86, 9.31, -6.01], "size": [1.62, 1.13, 0.18]},
|
||||
{"center": [-3.63, 9.31, -6.01], "size": [1.62, 1.13, 0.18]},
|
||||
{"center": [-2.41, 5.93, 0.03], "size": [1.62, 1.13, 0.18]},
|
||||
{"center": [3.52, 5.93, 0.03], "size": [1.62, 1.13, 0.18]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-1.9, 9.36, -6.44]},
|
||||
{"center": [3.24, 9.32, -6.39]},
|
||||
{"center": [-6.88, 7.48, 0.21]},
|
||||
{"center": [8.19, 7.48, 0.15]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [0.63, 6.22, -0.04]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [0.48, 9.09, -3.27], "size": [22.96, 9.91, 14.18]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [7.92, 7.84, -5.99]},
|
||||
{"center": [-0.7, 7.88, -6.07]},
|
||||
{"center": [1.86, 7.89, -6.08]},
|
||||
{"center": [-6.67, 7.99, -6.12]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-5.17, 7.55, -3.54], "size": [1.06, 1.16, 0.18]},
|
||||
{"center": [6.2, 7.58, -3.5], "size": [1.01, 1.13, 0.18]}
|
||||
],
|
||||
"spawn_by_flag": [
|
||||
{"center": [-2.87, 9.36, -6.04]},
|
||||
{"center": [4.31, 9.36, -6.04]},
|
||||
{"center": [-6.63, 7.51, -0.59]},
|
||||
{"center": [7.87, 7.51, -0.59]}
|
||||
],
|
||||
"tnt": [
|
||||
{"center": [-5.04, 10.01, -6.16]},
|
||||
{"center": [6.2, 10.01, -6.16]}
|
||||
]
|
||||
}
|
||||
}
|
||||
46
dist/ba_data/data/maps/doom_shroom.json
vendored
Normal file
46
dist/ba_data/data/maps/doom_shroom.json
vendored
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [0.47, 2.32, -3.22], "size": [21.35, 10.26, 14.67]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [-5.83, 2.3, -3.45], "size": [1.0, 1.0, 2.68]},
|
||||
{"center": [6.5, 2.4, -3.57], "size": [1.0, 1.0, 2.68]},
|
||||
{"center": [0.88, 2.31, -0.36], "size": [4.46, 1.0, 0.27]},
|
||||
{"center": [0.88, 2.31, -7.12], "size": [4.46, 1.0, 0.27]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-7.15, 2.25, -3.43]},
|
||||
{"center": [8.1, 2.32, -3.55]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [0.6, 2.37, -4.24]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [0.46, 1.33, -3.81], "size": [27.75, 14.45, 22.99]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [5.18, 4.28, -7.28]},
|
||||
{"center": [-3.24, 4.16, -0.32]},
|
||||
{"center": [5.08, 4.16, -0.32]},
|
||||
{"center": [-3.4, 4.28, -7.43]}
|
||||
],
|
||||
"shadow_lower_bottom": [
|
||||
{"center": [0.6, -0.23, 3.37]}
|
||||
],
|
||||
"shadow_lower_top": [
|
||||
{"center": [0.6, 0.7, 3.37]}
|
||||
],
|
||||
"shadow_upper_bottom": [
|
||||
{"center": [0.6, 5.41, 3.37]}
|
||||
],
|
||||
"shadow_upper_top": [
|
||||
{"center": [0.6, 7.89, 3.37]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-5.83, 2.3, -3.45], "size": [1.0, 1.0, 2.68]},
|
||||
{"center": [6.5, 2.4, -3.57], "size": [1.0, 1.0, 2.68]}
|
||||
]
|
||||
}
|
||||
}
|
||||
42
dist/ba_data/data/maps/football_stadium.json
vendored
Normal file
42
dist/ba_data/data/maps/football_stadium.json
vendored
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [0.0, 1.19, 0.43], "size": [29.82, 11.57, 18.89]}
|
||||
],
|
||||
"edge_box": [
|
||||
{"center": [-0.1, 0.41, 0.43], "size": [22.48, 1.29, 8.99]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [-0.08, 0.02, -4.37], "size": [8.9, 1.0, 0.44]},
|
||||
{"center": [-0.08, 0.02, 4.08], "size": [8.9, 1.0, 0.44]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-10.99, 0.06, 0.11]},
|
||||
{"center": [11.01, 0.04, 0.11]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [-0.1, 0.04, 0.11]}
|
||||
],
|
||||
"goal": [
|
||||
{"center": [12.22, 1.0, 0.11], "size": [2.0, 2.0, 12.97]},
|
||||
{"center": [-12.16, 1.0, 0.11], "size": [2.0, 2.0, 13.12]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [0.0, 1.19, 0.43], "size": [42.1, 22.81, 29.77]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [5.41, 0.95, -5.04]},
|
||||
{"center": [-5.56, 0.95, -5.04]},
|
||||
{"center": [5.41, 0.95, 5.15]},
|
||||
{"center": [-5.74, 0.95, 5.15]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-10.04, 0.02, 0.0], "size": [0.5, 1.0, 4.0]},
|
||||
{"center": [9.82, 0.01, 0.0], "size": [0.5, 1.0, 4.0]}
|
||||
],
|
||||
"tnt": [
|
||||
{"center": [-0.08, 0.95, -0.78]}
|
||||
]
|
||||
}
|
||||
}
|
||||
44
dist/ba_data/data/maps/happy_thoughts.json
vendored
Normal file
44
dist/ba_data/data/maps/happy_thoughts.json
vendored
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [-1.05, 12.68, -5.4], "size": [34.46, 20.94, 0.69]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [-9.3, 8.01, -5.44], "size": [1.56, 1.45, 0.12]},
|
||||
{"center": [7.48, 8.17, -5.61], "size": [1.55, 1.45, 0.04]},
|
||||
{"center": [9.56, 11.31, -5.61], "size": [1.34, 1.45, 0.04]},
|
||||
{"center": [-11.56, 10.99, -5.61], "size": [1.34, 1.45, 0.04]},
|
||||
{"center": [-1.88, 9.46, -5.61], "size": [1.34, 1.45, 0.04]},
|
||||
{"center": [-0.49, 5.08, -5.52], "size": [1.88, 1.45, 0.01]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-11.75, 8.06, -5.52]},
|
||||
{"center": [9.84, 8.19, -5.52]},
|
||||
{"center": [-0.22, 5.01, -5.52]},
|
||||
{"center": [-0.05, 12.73, -5.52]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [-0.04, 12.72, -5.52]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [-0.87, 9.21, -5.73], "size": [36.1, 26.2, 7.9]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [1.16, 6.75, -5.47]},
|
||||
{"center": [-1.9, 10.56, -5.51]},
|
||||
{"center": [10.56, 12.25, -5.58]},
|
||||
{"center": [-12.34, 12.25, -5.58]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-9.3, 8.01, -5.44], "size": [1.56, 1.45, 0.12]},
|
||||
{"center": [7.48, 8.17, -5.61], "size": [1.55, 1.45, 0.04]}
|
||||
],
|
||||
"spawn_by_flag": [
|
||||
{"center": [-9.3, 8.01, -5.44], "size": [1.56, 1.45, 0.12]},
|
||||
{"center": [7.48, 8.17, -5.61], "size": [1.55, 1.45, 0.04]},
|
||||
{"center": [-1.46, 5.04, -5.54], "size": [0.95, 0.67, 0.09]},
|
||||
{"center": [0.49, 12.74, -5.6], "size": [0.52, 0.52, 0.02]}
|
||||
]
|
||||
}
|
||||
}
|
||||
39
dist/ba_data/data/maps/hockey_stadium.json
vendored
Normal file
39
dist/ba_data/data/maps/hockey_stadium.json
vendored
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [0.0, 0.8, 0.0], "size": [30.8, 0.6, 13.88]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [-0.0, 0.02, -3.82], "size": [7.83, 1.0, 0.16]},
|
||||
{"center": [-0.0, 0.02, 3.56], "size": [7.83, 1.0, 0.06]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-11.22, 0.1, -0.08]},
|
||||
{"center": [11.08, 0.04, -0.08]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [-0.02, 0.06, -0.08]}
|
||||
],
|
||||
"goal": [
|
||||
{"center": [8.45, 1.0, 0.0], "size": [0.43, 1.6, 3.0]},
|
||||
{"center": [-8.45, 1.0, 0.0], "size": [0.43, 1.6, 3.0]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [0.0, 0.8, -0.47], "size": [35.16, 12.19, 21.53]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [-3.65, 1.08, -4.77]},
|
||||
{"center": [-3.65, 1.08, 4.6]},
|
||||
{"center": [2.88, 1.08, -4.77]},
|
||||
{"center": [2.88, 1.08, 4.6]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-6.84, 0.02, 0.0], "size": [1.0, 1.0, 3.0]},
|
||||
{"center": [6.86, 0.04, 0.0], "size": [1.0, 1.0, 3.0]}
|
||||
],
|
||||
"tnt": [
|
||||
{"center": [-0.06, 1.08, -4.77]}
|
||||
]
|
||||
}
|
||||
}
|
||||
84
dist/ba_data/data/maps/lake_frigid.json
vendored
Normal file
84
dist/ba_data/data/maps/lake_frigid.json
vendored
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [0.62, 3.96, -2.49], "size": [20.62, 7.76, 12.33]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [-5.78, 2.6, -2.12], "size": [0.49, 1.0, 2.99]},
|
||||
{"center": [8.33, 2.56, -2.36], "size": [0.49, 1.0, 2.59]},
|
||||
{"center": [-0.02, 2.62, -6.52], "size": [4.45, 1.0, 0.25]},
|
||||
{"center": [-0.02, 2.62, 2.15], "size": [4.99, 1.0, 0.15]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-5.97, 2.61, -2.43]},
|
||||
{"center": [7.47, 2.6, -2.22]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [0.58, 2.59, -6.08]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [0.67, 6.09, -2.48], "size": [26.78, 12.5, 19.09]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [-3.18, 3.17, 1.53]},
|
||||
{"center": [3.63, 3.17, 1.56]},
|
||||
{"center": [3.63, 3.17, -5.77]},
|
||||
{"center": [-3.18, 3.17, -5.81]}
|
||||
],
|
||||
"race_mine": [
|
||||
{"center": [-5.3, 2.52, 1.96]},
|
||||
{"center": [-5.29, 2.52, -5.87]},
|
||||
{"center": [6.49, 2.52, 1.53]},
|
||||
{"center": [6.78, 2.52, -4.81]},
|
||||
{"center": [1.53, 2.52, -7.24]},
|
||||
{"center": [-1.55, 2.52, -6.39]},
|
||||
{"center": [-4.36, 2.52, -2.05]},
|
||||
{"center": [-0.71, 2.52, -0.13]},
|
||||
{"center": [-0.71, 2.52, 1.28]},
|
||||
{"center": [-0.71, 2.52, 3.11]},
|
||||
{"center": [9.39, 2.52, -1.65]},
|
||||
{"center": [5.75, 2.52, -2.3]},
|
||||
{"center": [6.21, 2.52, -0.81]},
|
||||
{"center": [5.38, 2.52, -4.17]},
|
||||
{"center": [1.5, 2.52, -5.82]},
|
||||
{"center": [-1.69, 2.52, -5.24]},
|
||||
{"center": [-3.87, 2.52, -4.15]},
|
||||
{"center": [-7.41, 2.52, -1.5]},
|
||||
{"center": [-2.19, 2.52, 1.86]},
|
||||
{"center": [8.03, 2.52, -0.01]},
|
||||
{"center": [7.38, 2.52, -5.78]},
|
||||
{"center": [-4.57, 2.52, -5.03]},
|
||||
{"center": [-5.87, 2.52, -0.28]},
|
||||
{"center": [2.79, 2.52, -7.9]},
|
||||
{"center": [5.82, 2.52, -6.59]},
|
||||
{"center": [-3.97, 2.52, -0.04]}
|
||||
],
|
||||
"race_point": [
|
||||
{"center": [0.59, 2.54, 1.54], "size": [0.28, 3.95, 2.29]},
|
||||
{"center": [4.75, 2.49, 1.1], "size": [0.28, 3.95, 2.39]},
|
||||
{"center": [7.45, 2.6, -2.25], "size": [2.17, 3.95, 0.26]},
|
||||
{"center": [5.06, 2.49, -5.82], "size": [0.28, 3.95, 2.39]},
|
||||
{"center": [0.59, 2.68, -6.17], "size": [0.28, 3.95, 2.16]},
|
||||
{"center": [-3.06, 2.49, -6.11], "size": [0.28, 3.95, 2.32]},
|
||||
{"center": [-5.81, 2.58, -2.25], "size": [2.04, 3.95, 0.26]},
|
||||
{"center": [-2.96, 2.49, 1.36], "size": [0.28, 3.95, 2.53]}
|
||||
],
|
||||
"shadow_lower_bottom": [
|
||||
{"center": [0.52, 1.52, 5.34]}
|
||||
],
|
||||
"shadow_lower_top": [
|
||||
{"center": [0.52, 2.52, 5.34]}
|
||||
],
|
||||
"shadow_upper_bottom": [
|
||||
{"center": [0.52, 4.54, 5.34]}
|
||||
],
|
||||
"shadow_upper_top": [
|
||||
{"center": [0.52, 5.92, 5.34]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-5.95, 2.52, -2.1], "size": [0.09, 1.0, 2.2]},
|
||||
{"center": [8.08, 2.51, -2.36], "size": [0.03, 1.0, 2.22]}
|
||||
]
|
||||
}
|
||||
}
|
||||
46
dist/ba_data/data/maps/monkey_face.json
vendored
Normal file
46
dist/ba_data/data/maps/monkey_face.json
vendored
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [-1.66, 4.13, -1.58], "size": [17.36, 10.49, 12.31]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [-8.03, 3.35, -2.54], "size": [0.95, 0.95, 1.18]},
|
||||
{"center": [4.73, 3.31, -2.76], "size": [0.93, 1.0, 1.22]},
|
||||
{"center": [-1.91, 3.33, -6.57], "size": [4.08, 1.0, 0.29]},
|
||||
{"center": [-1.67, 3.33, 2.41], "size": [3.87, 1.0, 0.29]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-8.97, 3.36, -2.8]},
|
||||
{"center": [5.95, 3.35, -2.66]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [-1.69, 3.39, -2.24]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [-1.62, 6.83, -2.2], "size": [22.52, 12.21, 15.91]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [-6.86, 4.43, -6.59]},
|
||||
{"center": [-5.42, 4.23, 2.8]},
|
||||
{"center": [3.15, 4.43, -6.59]},
|
||||
{"center": [1.83, 4.23, 2.8]}
|
||||
],
|
||||
"shadow_lower_bottom": [
|
||||
{"center": [-1.88, 0.99, 5.5]}
|
||||
],
|
||||
"shadow_lower_top": [
|
||||
{"center": [-1.88, 2.88, 5.5]}
|
||||
],
|
||||
"shadow_upper_bottom": [
|
||||
{"center": [-1.88, 6.17, 5.5]}
|
||||
],
|
||||
"shadow_upper_top": [
|
||||
{"center": [-1.88, 10.25, 5.5]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-8.03, 3.35, -2.54], "size": [0.95, 0.95, 1.18]},
|
||||
{"center": [4.73, 3.31, -2.76], "size": [0.93, 1.0, 1.22]}
|
||||
]
|
||||
}
|
||||
}
|
||||
45
dist/ba_data/data/maps/rampage.json
vendored
Normal file
45
dist/ba_data/data/maps/rampage.json
vendored
Normal file
|
|
@ -0,0 +1,45 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [0.35, 5.62, -4.07], "size": [19.9, 10.34, 8.16]}
|
||||
],
|
||||
"edge_box": [
|
||||
{"center": [0.35, 5.44, -4.1], "size": [12.58, 4.65, 3.61]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [0.5, 5.05, -5.79], "size": [6.63, 1.0, 0.34]},
|
||||
{"center": [0.5, 5.05, -2.44], "size": [6.63, 1.0, 0.34]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-5.89, 5.11, -4.25]},
|
||||
{"center": [6.7, 5.1, -4.26]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [0.32, 5.11, -4.29]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [0.45, 4.9, -3.54], "size": [23.55, 14.2, 12.08]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [-2.65, 6.43, -4.23]},
|
||||
{"center": [3.54, 6.55, -4.2]}
|
||||
],
|
||||
"shadow_lower_bottom": [
|
||||
{"center": [5.58, 3.14, 5.34]}
|
||||
],
|
||||
"shadow_lower_top": [
|
||||
{"center": [5.58, 4.32, 5.34]}
|
||||
],
|
||||
"shadow_upper_bottom": [
|
||||
{"center": [5.27, 8.43, 5.34]}
|
||||
],
|
||||
"shadow_upper_top": [
|
||||
{"center": [5.27, 11.93, 5.34]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-4.75, 5.05, -4.25], "size": [0.92, 1.0, 0.52]},
|
||||
{"center": [5.84, 5.05, -4.26], "size": [0.92, 1.0, 0.52]}
|
||||
]
|
||||
}
|
||||
}
|
||||
46
dist/ba_data/data/maps/roundabout.json
vendored
Normal file
46
dist/ba_data/data/maps/roundabout.json
vendored
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [-1.55, 3.19, -2.41], "size": [11.96, 8.86, 9.53]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [-4.06, 3.86, -4.61], "size": [0.94, 1.0, 1.42]},
|
||||
{"center": [0.91, 3.85, -4.67], "size": [0.92, 1.0, 1.42]},
|
||||
{"center": [-1.5, 1.5, -0.73], "size": [5.73, 1.0, 0.19]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-3.02, 3.85, -6.7]},
|
||||
{"center": [-0.01, 3.83, -6.68]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [-1.51, 1.45, -1.44]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [-1.62, 8.76, -2.66], "size": [20.49, 18.92, 13.8]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [-6.79, 2.66, 0.01]},
|
||||
{"center": [3.61, 2.66, 0.01]}
|
||||
],
|
||||
"shadow_lower_bottom": [
|
||||
{"center": [-1.85, 0.63, 2.27]}
|
||||
],
|
||||
"shadow_lower_top": [
|
||||
{"center": [-1.85, 1.08, 2.27]}
|
||||
],
|
||||
"shadow_upper_bottom": [
|
||||
{"center": [-1.85, 6.05, 2.27]}
|
||||
],
|
||||
"shadow_upper_top": [
|
||||
{"center": [-1.85, 9.19, 2.27]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-4.06, 3.86, -4.61], "size": [0.94, 1.0, 1.42]},
|
||||
{"center": [0.91, 3.85, -4.67], "size": [0.92, 1.0, 1.42]}
|
||||
],
|
||||
"tnt": [
|
||||
{"center": [-1.51, 2.46, 0.23]}
|
||||
]
|
||||
}
|
||||
}
|
||||
59
dist/ba_data/data/maps/step_right_up.json
vendored
Normal file
59
dist/ba_data/data/maps/step_right_up.json
vendored
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [0.35, 6.08, -2.27], "size": [22.55, 10.15, 14.66]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [-6.99, 5.82, -4.0], "size": [0.41, 1.0, 3.63]},
|
||||
{"center": [7.31, 5.87, -4.0], "size": [0.41, 1.0, 3.63]},
|
||||
{"center": [2.64, 4.79, -4.0], "size": [0.41, 1.0, 3.63]},
|
||||
{"center": [-2.36, 4.79, -4.0], "size": [0.41, 1.0, 3.63]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-6.01, 5.82, -8.18]},
|
||||
{"center": [6.67, 5.82, -0.32]},
|
||||
{"center": [-2.11, 4.79, -3.94]},
|
||||
{"center": [2.69, 4.79, -3.94]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [0.25, 4.16, -3.69]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [0.26, 4.9, -3.54], "size": [29.24, 14.2, 29.93]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [-5.25, 4.73, 2.82]},
|
||||
{"center": [5.69, 4.73, 2.82]},
|
||||
{"center": [7.9, 6.31, -0.72]},
|
||||
{"center": [-7.22, 6.31, -7.94]},
|
||||
{"center": [-1.83, 5.25, -7.96]},
|
||||
{"center": [2.53, 5.25, -0.41]}
|
||||
],
|
||||
"shadow_lower_bottom": [
|
||||
{"center": [0.52, 2.6, 5.34]}
|
||||
],
|
||||
"shadow_lower_top": [
|
||||
{"center": [0.52, 3.78, 5.34]}
|
||||
],
|
||||
"shadow_upper_bottom": [
|
||||
{"center": [0.52, 7.32, 5.34]}
|
||||
],
|
||||
"shadow_upper_top": [
|
||||
{"center": [0.52, 11.09, 5.34]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-4.27, 5.46, -4.0], "size": [0.41, 1.0, 2.2]},
|
||||
{"center": [5.07, 5.44, -4.06], "size": [0.35, 1.0, 2.22]}
|
||||
],
|
||||
"spawn_by_flag": [
|
||||
{"center": [-6.66, 5.98, -6.17], "size": [0.75, 1.0, 0.85]},
|
||||
{"center": [7.39, 5.98, -1.71], "size": [0.75, 1.0, 0.85]},
|
||||
{"center": [-2.11, 4.8, -3.95], "size": [0.75, 1.0, 0.85]},
|
||||
{"center": [2.7, 4.8, -3.95], "size": [0.75, 1.0, 0.85]}
|
||||
],
|
||||
"tnt": [
|
||||
{"center": [0.26, 4.83, -4.31]}
|
||||
]
|
||||
}
|
||||
}
|
||||
49
dist/ba_data/data/maps/the_pad.json
vendored
Normal file
49
dist/ba_data/data/maps/the_pad.json
vendored
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [0.35, 4.49, -2.52], "size": [16.65, 8.06, 18.5]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [-3.81, 4.38, -8.96], "size": [2.37, 1.0, 0.87]},
|
||||
{"center": [4.47, 4.41, -9.01], "size": [2.71, 1.0, 0.87]},
|
||||
{"center": [6.97, 4.38, -7.42], "size": [0.49, 1.0, 1.6]},
|
||||
{"center": [-6.37, 4.38, -7.42], "size": [0.49, 1.0, 1.6]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-7.03, 4.31, -6.3]},
|
||||
{"center": [7.63, 4.37, -6.29]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [0.46, 4.38, 3.68]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [0.26, 4.9, -3.54], "size": [29.24, 14.2, 29.93]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [-4.17, 5.28, -6.43]},
|
||||
{"center": [4.43, 5.34, -6.33]},
|
||||
{"center": [-4.2, 5.12, 0.44]},
|
||||
{"center": [4.76, 5.12, 0.35]}
|
||||
],
|
||||
"shadow_lower_bottom": [
|
||||
{"center": [-0.29, 2.02, 5.34]}
|
||||
],
|
||||
"shadow_lower_top": [
|
||||
{"center": [-0.29, 3.21, 5.34]}
|
||||
],
|
||||
"shadow_upper_bottom": [
|
||||
{"center": [-0.29, 6.06, 5.34]}
|
||||
],
|
||||
"shadow_upper_top": [
|
||||
{"center": [-0.29, 9.83, 5.34]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-3.9, 4.38, -8.96], "size": [1.66, 1.0, 0.87]},
|
||||
{"center": [4.78, 4.41, -9.01], "size": [1.66, 1.0, 0.87]}
|
||||
],
|
||||
"tnt": [
|
||||
{"center": [0.46, 4.04, -6.57]}
|
||||
]
|
||||
}
|
||||
}
|
||||
49
dist/ba_data/data/maps/tip_top.json
vendored
Normal file
49
dist/ba_data/data/maps/tip_top.json
vendored
Normal file
|
|
@ -0,0 +1,49 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [0.0, 7.14, -0.02], "size": [21.13, 4.96, 16.69]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [-4.21, 6.97, -3.79], "size": [0.39, 1.16, 0.3]},
|
||||
{"center": [7.38, 5.21, -2.79], "size": [1.16, 1.16, 1.16]},
|
||||
{"center": [-7.26, 5.46, -3.1], "size": [1.16, 1.16, 1.16]},
|
||||
{"center": [0.02, 5.37, 4.08], "size": [1.88, 1.16, 0.2]},
|
||||
{"center": [-1.57, 7.04, -0.48], "size": [0.39, 1.16, 0.3]},
|
||||
{"center": [1.69, 7.04, -0.48], "size": [0.39, 1.16, 0.3]},
|
||||
{"center": [4.4, 6.97, -3.8], "size": [0.39, 1.16, 0.3]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-7.01, 5.42, -2.72]},
|
||||
{"center": [7.17, 5.17, -2.65]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [0.07, 8.87, -4.99]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [-0.21, 7.75, -0.38], "size": [23.81, 13.86, 16.38]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [1.66, 8.05, -1.22]},
|
||||
{"center": [-1.49, 7.91, -1.23]},
|
||||
{"center": [2.63, 6.36, 1.4]},
|
||||
{"center": [-2.7, 6.36, 1.43]}
|
||||
],
|
||||
"shadow_lower_bottom": [
|
||||
{"center": [0.07, 4.0, 6.32]}
|
||||
],
|
||||
"shadow_lower_top": [
|
||||
{"center": [0.07, 4.75, 6.32]}
|
||||
],
|
||||
"shadow_upper_bottom": [
|
||||
{"center": [0.07, 9.15, 6.32]}
|
||||
],
|
||||
"shadow_upper_top": [
|
||||
{"center": [0.07, 13.82, 6.32]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-7.26, 5.46, -3.1], "size": [1.16, 1.16, 1.16]},
|
||||
{"center": [7.38, 5.21, -2.79], "size": [1.16, 1.16, 1.16]}
|
||||
]
|
||||
}
|
||||
}
|
||||
76
dist/ba_data/data/maps/tower_d.json
vendored
Normal file
76
dist/ba_data/data/maps/tower_d.json
vendored
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [-0.47, 2.89, -1.51], "size": [17.9, 6.19, 15.96]}
|
||||
],
|
||||
"b": [
|
||||
{"center": [-4.83, 2.58, -2.35], "size": [0.94, 2.75, 7.27]},
|
||||
{"center": [4.74, 2.58, -3.34], "size": [0.82, 2.75, 7.22]},
|
||||
{"center": [6.42, 2.58, -4.42], "size": [0.83, 4.53, 9.17]},
|
||||
{"center": [-6.65, 3.23, -3.06], "size": [0.94, 2.75, 8.93]},
|
||||
{"center": [3.54, 2.58, -2.37], "size": [0.63, 2.75, 5.33]},
|
||||
{"center": [5.76, 2.58, 1.15], "size": [2.3, 2.18, 0.5]},
|
||||
{"center": [-2.87, 2.58, -1.56], "size": [0.94, 2.75, 5.92]},
|
||||
{"center": [-0.69, 4.76, -5.32], "size": [24.86, 9.0, 13.43]},
|
||||
{"center": [-0.02, 2.86, -0.95], "size": [4.9, 3.04, 6.14]}
|
||||
],
|
||||
"bot_spawn_bottom_left": [
|
||||
{"center": [-7.4, 1.62, 5.38], "size": [1.66, 1.0, 0.87]}
|
||||
],
|
||||
"bot_spawn_bottom_right": [
|
||||
{"center": [6.49, 1.62, 5.38], "size": [1.66, 1.0, 0.87]}
|
||||
],
|
||||
"bot_spawn_start": [
|
||||
{"center": [-9.0, 3.15, 0.31], "size": [1.66, 1.0, 0.87]}
|
||||
],
|
||||
"edge_box": [
|
||||
{"center": [-0.65, 3.19, 4.1], "size": [14.24, 1.47, 2.9]},
|
||||
{"center": [-0.14, 2.3, 0.35], "size": [1.74, 1.06, 4.91]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [0.1, 2.71, -0.36], "size": [1.66, 1.0, 0.87]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-7.75, 3.14, 0.27]},
|
||||
{"center": [6.85, 2.25, 0.38]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [0.02, 2.23, -6.31]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [0.26, 4.9, -3.54], "size": [29.24, 14.2, 29.93]}
|
||||
],
|
||||
"powerup_region": [
|
||||
{"center": [0.35, 4.04, 3.41], "size": [12.49, 0.38, 1.62]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [-4.93, 2.4, 2.88]},
|
||||
{"center": [-1.96, 2.4, 3.75]},
|
||||
{"center": [1.65, 2.4, 3.75]},
|
||||
{"center": [4.4, 2.4, 2.88]}
|
||||
],
|
||||
"score_region": [
|
||||
{"center": [8.38, 3.05, 0.51], "size": [1.28, 1.37, 0.69]}
|
||||
],
|
||||
"shadow_lower_bottom": [
|
||||
{"center": [-0.29, 0.95, 5.34]}
|
||||
],
|
||||
"shadow_lower_top": [
|
||||
{"center": [-0.29, 2.13, 5.34]}
|
||||
],
|
||||
"shadow_upper_bottom": [
|
||||
{"center": [-0.45, 6.06, 5.34]}
|
||||
],
|
||||
"shadow_upper_top": [
|
||||
{"center": [-0.29, 9.83, 5.34]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [0.1, 2.71, -0.36], "size": [1.66, 1.0, 0.87]},
|
||||
{"center": [0.14, 2.74, -0.34], "size": [1.66, 1.0, 0.87]}
|
||||
],
|
||||
"tnt_loc": [
|
||||
{"center": [0.01, 2.78, 3.89]}
|
||||
]
|
||||
}
|
||||
}
|
||||
57
dist/ba_data/data/maps/zig_zag.json
vendored
Normal file
57
dist/ba_data/data/maps/zig_zag.json
vendored
Normal file
|
|
@ -0,0 +1,57 @@
|
|||
{
|
||||
"format": ["stdmap", 1],
|
||||
"locations": {
|
||||
"area_of_interest_bounds": [
|
||||
{"center": [-1.81, 3.94, -1.61], "size": [23.01, 13.28, 10.01]}
|
||||
],
|
||||
"ffa_spawn": [
|
||||
{"center": [-9.52, 4.65, -3.19], "size": [0.85, 1.0, 1.3]},
|
||||
{"center": [6.22, 4.63, -3.19], "size": [0.88, 1.0, 1.3]},
|
||||
{"center": [-4.43, 3.01, -4.91], "size": [1.53, 1.0, 0.76]},
|
||||
{"center": [1.46, 3.01, -4.91], "size": [1.53, 1.0, 0.76]}
|
||||
],
|
||||
"flag": [
|
||||
{"center": [-9.97, 4.65, -4.97]},
|
||||
{"center": [6.84, 4.65, -4.95]},
|
||||
{"center": [1.16, 2.97, -4.89]},
|
||||
{"center": [-4.22, 3.01, -4.89]}
|
||||
],
|
||||
"flag_default": [
|
||||
{"center": [-1.43, 3.02, 0.88]}
|
||||
],
|
||||
"map_bounds": [
|
||||
{"center": [-1.57, 8.76, -1.31], "size": [28.77, 17.65, 19.52]}
|
||||
],
|
||||
"powerup_spawn": [
|
||||
{"center": [2.56, 4.37, -4.8]},
|
||||
{"center": [-6.02, 4.37, -4.8]},
|
||||
{"center": [5.56, 5.38, -4.8]},
|
||||
{"center": [-8.79, 5.38, -4.82]}
|
||||
],
|
||||
"shadow_lower_bottom": [
|
||||
{"center": [-1.43, 1.68, 4.79]}
|
||||
],
|
||||
"shadow_lower_top": [
|
||||
{"center": [-1.43, 2.55, 4.79]}
|
||||
],
|
||||
"shadow_upper_bottom": [
|
||||
{"center": [-1.43, 6.8, 4.79]}
|
||||
],
|
||||
"shadow_upper_top": [
|
||||
{"center": [-1.43, 8.78, 4.79]}
|
||||
],
|
||||
"spawn": [
|
||||
{"center": [-9.52, 4.65, -3.19], "size": [0.85, 1.0, 1.3]},
|
||||
{"center": [6.22, 4.63, -3.19], "size": [0.88, 1.0, 1.3]}
|
||||
],
|
||||
"spawn_by_flag": [
|
||||
{"center": [-9.52, 4.65, -3.19], "size": [0.85, 1.0, 1.3]},
|
||||
{"center": [6.22, 4.63, -3.19], "size": [0.88, 1.0, 1.3]},
|
||||
{"center": [1.46, 3.01, -4.91], "size": [1.53, 1.0, 0.76]},
|
||||
{"center": [-4.43, 3.01, -4.91], "size": [1.53, 1.0, 0.76]}
|
||||
],
|
||||
"tnt": [
|
||||
{"center": [-1.43, 4.05, 0.04]}
|
||||
]
|
||||
}
|
||||
}
|
||||
BIN
dist/ba_data/fonts/fontSmall0.fdata
vendored
Normal file
BIN
dist/ba_data/fonts/fontSmall0.fdata
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/fonts/fontSmall1.fdata
vendored
Normal file
BIN
dist/ba_data/fonts/fontSmall1.fdata
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/fonts/fontSmall2.fdata
vendored
Normal file
BIN
dist/ba_data/fonts/fontSmall2.fdata
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/fonts/fontSmall3.fdata
vendored
Normal file
BIN
dist/ba_data/fonts/fontSmall3.fdata
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/fonts/fontSmall4.fdata
vendored
Normal file
BIN
dist/ba_data/fonts/fontSmall4.fdata
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/fonts/fontSmall5.fdata
vendored
Normal file
BIN
dist/ba_data/fonts/fontSmall5.fdata
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/fonts/fontSmall6.fdata
vendored
Normal file
BIN
dist/ba_data/fonts/fontSmall6.fdata
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/fonts/fontSmall7.fdata
vendored
Normal file
BIN
dist/ba_data/fonts/fontSmall7.fdata
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/alwaysLandLevelCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/alwaysLandLevelCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/bigGBumper.cob
vendored
Normal file
BIN
dist/ba_data/models/bigGBumper.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/bigGCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/bigGCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/bridgitLevelCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/bridgitLevelCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/bridgitLevelRailingCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/bridgitLevelRailingCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/courtyardLevelCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/courtyardLevelCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/courtyardPlayerWall.cob
vendored
Normal file
BIN
dist/ba_data/models/courtyardPlayerWall.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/cragCastleLevelBumper.cob
vendored
Normal file
BIN
dist/ba_data/models/cragCastleLevelBumper.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/cragCastleLevelCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/cragCastleLevelCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/doomShroomLevelCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/doomShroomLevelCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/doomShroomStemCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/doomShroomStemCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/footballStadiumCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/footballStadiumCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/hockeyStadiumCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/hockeyStadiumCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/lakeFrigidCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/lakeFrigidCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/monkeyFaceLevelBumper.cob
vendored
Normal file
BIN
dist/ba_data/models/monkeyFaceLevelBumper.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/monkeyFaceLevelCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/monkeyFaceLevelCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/natureBackgroundCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/natureBackgroundCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/rampageBumper.cob
vendored
Normal file
BIN
dist/ba_data/models/rampageBumper.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/rampageLevelCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/rampageLevelCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/roundaboutLevelBumper.cob
vendored
Normal file
BIN
dist/ba_data/models/roundaboutLevelBumper.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/roundaboutLevelCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/roundaboutLevelCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/stepRightUpLevelCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/stepRightUpLevelCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/thePadLevelBumper.cob
vendored
Normal file
BIN
dist/ba_data/models/thePadLevelBumper.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/thePadLevelCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/thePadLevelCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/tipTopLevelBumper.cob
vendored
Normal file
BIN
dist/ba_data/models/tipTopLevelBumper.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/tipTopLevelCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/tipTopLevelCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/towerDLevelCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/towerDLevelCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/towerDPlayerWall.cob
vendored
Normal file
BIN
dist/ba_data/models/towerDPlayerWall.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/zigZagLevelBumper.cob
vendored
Normal file
BIN
dist/ba_data/models/zigZagLevelBumper.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/models/zigZagLevelCollide.cob
vendored
Normal file
BIN
dist/ba_data/models/zigZagLevelCollide.cob
vendored
Normal file
Binary file not shown.
BIN
dist/ba_data/python-site-packages/__pycache__/typing_extensions.cpython-38.opt-1.pyc
vendored
Normal file
BIN
dist/ba_data/python-site-packages/__pycache__/typing_extensions.cpython-38.opt-1.pyc
vendored
Normal file
Binary file not shown.
2168
dist/ba_data/python-site-packages/typing_extensions.py
vendored
Normal file
2168
dist/ba_data/python-site-packages/typing_extensions.py
vendored
Normal file
File diff suppressed because it is too large
Load diff
427
dist/ba_data/python-site-packages/yaml/__init__.py
vendored
Normal file
427
dist/ba_data/python-site-packages/yaml/__init__.py
vendored
Normal file
|
|
@ -0,0 +1,427 @@
|
|||
|
||||
from .error import *
|
||||
|
||||
from .tokens import *
|
||||
from .events import *
|
||||
from .nodes import *
|
||||
|
||||
from .loader import *
|
||||
from .dumper import *
|
||||
|
||||
__version__ = '5.3.1'
|
||||
try:
|
||||
from .cyaml import *
|
||||
__with_libyaml__ = True
|
||||
except ImportError:
|
||||
__with_libyaml__ = False
|
||||
|
||||
import io
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
# Warnings control
|
||||
#------------------------------------------------------------------------------
|
||||
|
||||
# 'Global' warnings state:
|
||||
_warnings_enabled = {
|
||||
'YAMLLoadWarning': True,
|
||||
}
|
||||
|
||||
# Get or set global warnings' state
|
||||
def warnings(settings=None):
|
||||
if settings is None:
|
||||
return _warnings_enabled
|
||||
|
||||
if type(settings) is dict:
|
||||
for key in settings:
|
||||
if key in _warnings_enabled:
|
||||
_warnings_enabled[key] = settings[key]
|
||||
|
||||
# Warn when load() is called without Loader=...
|
||||
class YAMLLoadWarning(RuntimeWarning):
|
||||
pass
|
||||
|
||||
def load_warning(method):
|
||||
if _warnings_enabled['YAMLLoadWarning'] is False:
|
||||
return
|
||||
|
||||
import warnings
|
||||
|
||||
message = (
|
||||
"calling yaml.%s() without Loader=... is deprecated, as the "
|
||||
"default Loader is unsafe. Please read "
|
||||
"https://msg.pyyaml.org/load for full details."
|
||||
) % method
|
||||
|
||||
warnings.warn(message, YAMLLoadWarning, stacklevel=3)
|
||||
|
||||
#------------------------------------------------------------------------------
|
||||
def scan(stream, Loader=Loader):
|
||||
"""
|
||||
Scan a YAML stream and produce scanning tokens.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
while loader.check_token():
|
||||
yield loader.get_token()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def parse(stream, Loader=Loader):
|
||||
"""
|
||||
Parse a YAML stream and produce parsing events.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
while loader.check_event():
|
||||
yield loader.get_event()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def compose(stream, Loader=Loader):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding representation tree.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
return loader.get_single_node()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def compose_all(stream, Loader=Loader):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding representation trees.
|
||||
"""
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
while loader.check_node():
|
||||
yield loader.get_node()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def load(stream, Loader=None):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding Python object.
|
||||
"""
|
||||
if Loader is None:
|
||||
load_warning('load')
|
||||
Loader = FullLoader
|
||||
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
return loader.get_single_data()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def load_all(stream, Loader=None):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding Python objects.
|
||||
"""
|
||||
if Loader is None:
|
||||
load_warning('load_all')
|
||||
Loader = FullLoader
|
||||
|
||||
loader = Loader(stream)
|
||||
try:
|
||||
while loader.check_data():
|
||||
yield loader.get_data()
|
||||
finally:
|
||||
loader.dispose()
|
||||
|
||||
def full_load(stream):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding Python object.
|
||||
|
||||
Resolve all tags except those known to be
|
||||
unsafe on untrusted input.
|
||||
"""
|
||||
return load(stream, FullLoader)
|
||||
|
||||
def full_load_all(stream):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding Python objects.
|
||||
|
||||
Resolve all tags except those known to be
|
||||
unsafe on untrusted input.
|
||||
"""
|
||||
return load_all(stream, FullLoader)
|
||||
|
||||
def safe_load(stream):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding Python object.
|
||||
|
||||
Resolve only basic YAML tags. This is known
|
||||
to be safe for untrusted input.
|
||||
"""
|
||||
return load(stream, SafeLoader)
|
||||
|
||||
def safe_load_all(stream):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding Python objects.
|
||||
|
||||
Resolve only basic YAML tags. This is known
|
||||
to be safe for untrusted input.
|
||||
"""
|
||||
return load_all(stream, SafeLoader)
|
||||
|
||||
def unsafe_load(stream):
|
||||
"""
|
||||
Parse the first YAML document in a stream
|
||||
and produce the corresponding Python object.
|
||||
|
||||
Resolve all tags, even those known to be
|
||||
unsafe on untrusted input.
|
||||
"""
|
||||
return load(stream, UnsafeLoader)
|
||||
|
||||
def unsafe_load_all(stream):
|
||||
"""
|
||||
Parse all YAML documents in a stream
|
||||
and produce corresponding Python objects.
|
||||
|
||||
Resolve all tags, even those known to be
|
||||
unsafe on untrusted input.
|
||||
"""
|
||||
return load_all(stream, UnsafeLoader)
|
||||
|
||||
def emit(events, stream=None, Dumper=Dumper,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None):
|
||||
"""
|
||||
Emit YAML parsing events into a stream.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
getvalue = None
|
||||
if stream is None:
|
||||
stream = io.StringIO()
|
||||
getvalue = stream.getvalue
|
||||
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break)
|
||||
try:
|
||||
for event in events:
|
||||
dumper.emit(event)
|
||||
finally:
|
||||
dumper.dispose()
|
||||
if getvalue:
|
||||
return getvalue()
|
||||
|
||||
def serialize_all(nodes, stream=None, Dumper=Dumper,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None):
|
||||
"""
|
||||
Serialize a sequence of representation trees into a YAML stream.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
getvalue = None
|
||||
if stream is None:
|
||||
if encoding is None:
|
||||
stream = io.StringIO()
|
||||
else:
|
||||
stream = io.BytesIO()
|
||||
getvalue = stream.getvalue
|
||||
dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
encoding=encoding, version=version, tags=tags,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end)
|
||||
try:
|
||||
dumper.open()
|
||||
for node in nodes:
|
||||
dumper.serialize(node)
|
||||
dumper.close()
|
||||
finally:
|
||||
dumper.dispose()
|
||||
if getvalue:
|
||||
return getvalue()
|
||||
|
||||
def serialize(node, stream=None, Dumper=Dumper, **kwds):
|
||||
"""
|
||||
Serialize a representation tree into a YAML stream.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
return serialize_all([node], stream, Dumper=Dumper, **kwds)
|
||||
|
||||
def dump_all(documents, stream=None, Dumper=Dumper,
|
||||
default_style=None, default_flow_style=False,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, sort_keys=True):
|
||||
"""
|
||||
Serialize a sequence of Python objects into a YAML stream.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
getvalue = None
|
||||
if stream is None:
|
||||
if encoding is None:
|
||||
stream = io.StringIO()
|
||||
else:
|
||||
stream = io.BytesIO()
|
||||
getvalue = stream.getvalue
|
||||
dumper = Dumper(stream, default_style=default_style,
|
||||
default_flow_style=default_flow_style,
|
||||
canonical=canonical, indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
encoding=encoding, version=version, tags=tags,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end, sort_keys=sort_keys)
|
||||
try:
|
||||
dumper.open()
|
||||
for data in documents:
|
||||
dumper.represent(data)
|
||||
dumper.close()
|
||||
finally:
|
||||
dumper.dispose()
|
||||
if getvalue:
|
||||
return getvalue()
|
||||
|
||||
def dump(data, stream=None, Dumper=Dumper, **kwds):
|
||||
"""
|
||||
Serialize a Python object into a YAML stream.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
return dump_all([data], stream, Dumper=Dumper, **kwds)
|
||||
|
||||
def safe_dump_all(documents, stream=None, **kwds):
|
||||
"""
|
||||
Serialize a sequence of Python objects into a YAML stream.
|
||||
Produce only basic YAML tags.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
|
||||
|
||||
def safe_dump(data, stream=None, **kwds):
|
||||
"""
|
||||
Serialize a Python object into a YAML stream.
|
||||
Produce only basic YAML tags.
|
||||
If stream is None, return the produced string instead.
|
||||
"""
|
||||
return dump_all([data], stream, Dumper=SafeDumper, **kwds)
|
||||
|
||||
def add_implicit_resolver(tag, regexp, first=None,
|
||||
Loader=None, Dumper=Dumper):
|
||||
"""
|
||||
Add an implicit scalar detector.
|
||||
If an implicit scalar value matches the given regexp,
|
||||
the corresponding tag is assigned to the scalar.
|
||||
first is a sequence of possible initial characters or None.
|
||||
"""
|
||||
if Loader is None:
|
||||
loader.Loader.add_implicit_resolver(tag, regexp, first)
|
||||
loader.FullLoader.add_implicit_resolver(tag, regexp, first)
|
||||
loader.UnsafeLoader.add_implicit_resolver(tag, regexp, first)
|
||||
else:
|
||||
Loader.add_implicit_resolver(tag, regexp, first)
|
||||
Dumper.add_implicit_resolver(tag, regexp, first)
|
||||
|
||||
def add_path_resolver(tag, path, kind=None, Loader=None, Dumper=Dumper):
|
||||
"""
|
||||
Add a path based resolver for the given tag.
|
||||
A path is a list of keys that forms a path
|
||||
to a node in the representation tree.
|
||||
Keys can be string values, integers, or None.
|
||||
"""
|
||||
if Loader is None:
|
||||
loader.Loader.add_path_resolver(tag, path, kind)
|
||||
loader.FullLoader.add_path_resolver(tag, path, kind)
|
||||
loader.UnsafeLoader.add_path_resolver(tag, path, kind)
|
||||
else:
|
||||
Loader.add_path_resolver(tag, path, kind)
|
||||
Dumper.add_path_resolver(tag, path, kind)
|
||||
|
||||
def add_constructor(tag, constructor, Loader=None):
|
||||
"""
|
||||
Add a constructor for the given tag.
|
||||
Constructor is a function that accepts a Loader instance
|
||||
and a node object and produces the corresponding Python object.
|
||||
"""
|
||||
if Loader is None:
|
||||
loader.Loader.add_constructor(tag, constructor)
|
||||
loader.FullLoader.add_constructor(tag, constructor)
|
||||
loader.UnsafeLoader.add_constructor(tag, constructor)
|
||||
else:
|
||||
Loader.add_constructor(tag, constructor)
|
||||
|
||||
def add_multi_constructor(tag_prefix, multi_constructor, Loader=None):
|
||||
"""
|
||||
Add a multi-constructor for the given tag prefix.
|
||||
Multi-constructor is called for a node if its tag starts with tag_prefix.
|
||||
Multi-constructor accepts a Loader instance, a tag suffix,
|
||||
and a node object and produces the corresponding Python object.
|
||||
"""
|
||||
if Loader is None:
|
||||
loader.Loader.add_multi_constructor(tag_prefix, multi_constructor)
|
||||
loader.FullLoader.add_multi_constructor(tag_prefix, multi_constructor)
|
||||
loader.UnsafeLoader.add_multi_constructor(tag_prefix, multi_constructor)
|
||||
else:
|
||||
Loader.add_multi_constructor(tag_prefix, multi_constructor)
|
||||
|
||||
def add_representer(data_type, representer, Dumper=Dumper):
|
||||
"""
|
||||
Add a representer for the given type.
|
||||
Representer is a function accepting a Dumper instance
|
||||
and an instance of the given data type
|
||||
and producing the corresponding representation node.
|
||||
"""
|
||||
Dumper.add_representer(data_type, representer)
|
||||
|
||||
def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
|
||||
"""
|
||||
Add a representer for the given type.
|
||||
Multi-representer is a function accepting a Dumper instance
|
||||
and an instance of the given data type or subtype
|
||||
and producing the corresponding representation node.
|
||||
"""
|
||||
Dumper.add_multi_representer(data_type, multi_representer)
|
||||
|
||||
class YAMLObjectMetaclass(type):
|
||||
"""
|
||||
The metaclass for YAMLObject.
|
||||
"""
|
||||
def __init__(cls, name, bases, kwds):
|
||||
super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
|
||||
if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
|
||||
if isinstance(cls.yaml_loader, list):
|
||||
for loader in cls.yaml_loader:
|
||||
loader.add_constructor(cls.yaml_tag, cls.from_yaml)
|
||||
else:
|
||||
cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
|
||||
|
||||
cls.yaml_dumper.add_representer(cls, cls.to_yaml)
|
||||
|
||||
class YAMLObject(metaclass=YAMLObjectMetaclass):
|
||||
"""
|
||||
An object that can dump itself to a YAML stream
|
||||
and load itself from a YAML stream.
|
||||
"""
|
||||
|
||||
__slots__ = () # no direct instantiation, so allow immutable subclasses
|
||||
|
||||
yaml_loader = [Loader, FullLoader, UnsafeLoader]
|
||||
yaml_dumper = Dumper
|
||||
|
||||
yaml_tag = None
|
||||
yaml_flow_style = None
|
||||
|
||||
@classmethod
|
||||
def from_yaml(cls, loader, node):
|
||||
"""
|
||||
Convert a representation node to a Python object.
|
||||
"""
|
||||
return loader.construct_yaml_object(node, cls)
|
||||
|
||||
@classmethod
|
||||
def to_yaml(cls, dumper, data):
|
||||
"""
|
||||
Convert a Python object to a representation node.
|
||||
"""
|
||||
return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
|
||||
flow_style=cls.yaml_flow_style)
|
||||
|
||||
139
dist/ba_data/python-site-packages/yaml/composer.py
vendored
Normal file
139
dist/ba_data/python-site-packages/yaml/composer.py
vendored
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
|
||||
__all__ = ['Composer', 'ComposerError']
|
||||
|
||||
from .error import MarkedYAMLError
|
||||
from .events import *
|
||||
from .nodes import *
|
||||
|
||||
class ComposerError(MarkedYAMLError):
|
||||
pass
|
||||
|
||||
class Composer:
|
||||
|
||||
def __init__(self):
|
||||
self.anchors = {}
|
||||
|
||||
def check_node(self):
|
||||
# Drop the STREAM-START event.
|
||||
if self.check_event(StreamStartEvent):
|
||||
self.get_event()
|
||||
|
||||
# If there are more documents available?
|
||||
return not self.check_event(StreamEndEvent)
|
||||
|
||||
def get_node(self):
|
||||
# Get the root node of the next document.
|
||||
if not self.check_event(StreamEndEvent):
|
||||
return self.compose_document()
|
||||
|
||||
def get_single_node(self):
|
||||
# Drop the STREAM-START event.
|
||||
self.get_event()
|
||||
|
||||
# Compose a document if the stream is not empty.
|
||||
document = None
|
||||
if not self.check_event(StreamEndEvent):
|
||||
document = self.compose_document()
|
||||
|
||||
# Ensure that the stream contains no more documents.
|
||||
if not self.check_event(StreamEndEvent):
|
||||
event = self.get_event()
|
||||
raise ComposerError("expected a single document in the stream",
|
||||
document.start_mark, "but found another document",
|
||||
event.start_mark)
|
||||
|
||||
# Drop the STREAM-END event.
|
||||
self.get_event()
|
||||
|
||||
return document
|
||||
|
||||
def compose_document(self):
|
||||
# Drop the DOCUMENT-START event.
|
||||
self.get_event()
|
||||
|
||||
# Compose the root node.
|
||||
node = self.compose_node(None, None)
|
||||
|
||||
# Drop the DOCUMENT-END event.
|
||||
self.get_event()
|
||||
|
||||
self.anchors = {}
|
||||
return node
|
||||
|
||||
def compose_node(self, parent, index):
|
||||
if self.check_event(AliasEvent):
|
||||
event = self.get_event()
|
||||
anchor = event.anchor
|
||||
if anchor not in self.anchors:
|
||||
raise ComposerError(None, None, "found undefined alias %r"
|
||||
% anchor, event.start_mark)
|
||||
return self.anchors[anchor]
|
||||
event = self.peek_event()
|
||||
anchor = event.anchor
|
||||
if anchor is not None:
|
||||
if anchor in self.anchors:
|
||||
raise ComposerError("found duplicate anchor %r; first occurrence"
|
||||
% anchor, self.anchors[anchor].start_mark,
|
||||
"second occurrence", event.start_mark)
|
||||
self.descend_resolver(parent, index)
|
||||
if self.check_event(ScalarEvent):
|
||||
node = self.compose_scalar_node(anchor)
|
||||
elif self.check_event(SequenceStartEvent):
|
||||
node = self.compose_sequence_node(anchor)
|
||||
elif self.check_event(MappingStartEvent):
|
||||
node = self.compose_mapping_node(anchor)
|
||||
self.ascend_resolver()
|
||||
return node
|
||||
|
||||
def compose_scalar_node(self, anchor):
|
||||
event = self.get_event()
|
||||
tag = event.tag
|
||||
if tag is None or tag == '!':
|
||||
tag = self.resolve(ScalarNode, event.value, event.implicit)
|
||||
node = ScalarNode(tag, event.value,
|
||||
event.start_mark, event.end_mark, style=event.style)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
return node
|
||||
|
||||
def compose_sequence_node(self, anchor):
|
||||
start_event = self.get_event()
|
||||
tag = start_event.tag
|
||||
if tag is None or tag == '!':
|
||||
tag = self.resolve(SequenceNode, None, start_event.implicit)
|
||||
node = SequenceNode(tag, [],
|
||||
start_event.start_mark, None,
|
||||
flow_style=start_event.flow_style)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
index = 0
|
||||
while not self.check_event(SequenceEndEvent):
|
||||
node.value.append(self.compose_node(node, index))
|
||||
index += 1
|
||||
end_event = self.get_event()
|
||||
node.end_mark = end_event.end_mark
|
||||
return node
|
||||
|
||||
def compose_mapping_node(self, anchor):
|
||||
start_event = self.get_event()
|
||||
tag = start_event.tag
|
||||
if tag is None or tag == '!':
|
||||
tag = self.resolve(MappingNode, None, start_event.implicit)
|
||||
node = MappingNode(tag, [],
|
||||
start_event.start_mark, None,
|
||||
flow_style=start_event.flow_style)
|
||||
if anchor is not None:
|
||||
self.anchors[anchor] = node
|
||||
while not self.check_event(MappingEndEvent):
|
||||
#key_event = self.peek_event()
|
||||
item_key = self.compose_node(node, None)
|
||||
#if item_key in node.value:
|
||||
# raise ComposerError("while composing a mapping", start_event.start_mark,
|
||||
# "found duplicate key", key_event.start_mark)
|
||||
item_value = self.compose_node(node, item_key)
|
||||
#node.value[item_key] = item_value
|
||||
node.value.append((item_key, item_value))
|
||||
end_event = self.get_event()
|
||||
node.end_mark = end_event.end_mark
|
||||
return node
|
||||
|
||||
748
dist/ba_data/python-site-packages/yaml/constructor.py
vendored
Normal file
748
dist/ba_data/python-site-packages/yaml/constructor.py
vendored
Normal file
|
|
@ -0,0 +1,748 @@
|
|||
|
||||
__all__ = [
|
||||
'BaseConstructor',
|
||||
'SafeConstructor',
|
||||
'FullConstructor',
|
||||
'UnsafeConstructor',
|
||||
'Constructor',
|
||||
'ConstructorError'
|
||||
]
|
||||
|
||||
from .error import *
|
||||
from .nodes import *
|
||||
|
||||
import collections.abc, datetime, base64, binascii, re, sys, types
|
||||
|
||||
class ConstructorError(MarkedYAMLError):
|
||||
pass
|
||||
|
||||
class BaseConstructor:
|
||||
|
||||
yaml_constructors = {}
|
||||
yaml_multi_constructors = {}
|
||||
|
||||
def __init__(self):
|
||||
self.constructed_objects = {}
|
||||
self.recursive_objects = {}
|
||||
self.state_generators = []
|
||||
self.deep_construct = False
|
||||
|
||||
def check_data(self):
|
||||
# If there are more documents available?
|
||||
return self.check_node()
|
||||
|
||||
def check_state_key(self, key):
|
||||
"""Block special attributes/methods from being set in a newly created
|
||||
object, to prevent user-controlled methods from being called during
|
||||
deserialization"""
|
||||
if self.get_state_keys_blacklist_regexp().match(key):
|
||||
raise ConstructorError(None, None,
|
||||
"blacklisted key '%s' in instance state found" % (key,), None)
|
||||
|
||||
def get_data(self):
|
||||
# Construct and return the next document.
|
||||
if self.check_node():
|
||||
return self.construct_document(self.get_node())
|
||||
|
||||
def get_single_data(self):
|
||||
# Ensure that the stream contains a single document and construct it.
|
||||
node = self.get_single_node()
|
||||
if node is not None:
|
||||
return self.construct_document(node)
|
||||
return None
|
||||
|
||||
def construct_document(self, node):
|
||||
data = self.construct_object(node)
|
||||
while self.state_generators:
|
||||
state_generators = self.state_generators
|
||||
self.state_generators = []
|
||||
for generator in state_generators:
|
||||
for dummy in generator:
|
||||
pass
|
||||
self.constructed_objects = {}
|
||||
self.recursive_objects = {}
|
||||
self.deep_construct = False
|
||||
return data
|
||||
|
||||
def construct_object(self, node, deep=False):
|
||||
if node in self.constructed_objects:
|
||||
return self.constructed_objects[node]
|
||||
if deep:
|
||||
old_deep = self.deep_construct
|
||||
self.deep_construct = True
|
||||
if node in self.recursive_objects:
|
||||
raise ConstructorError(None, None,
|
||||
"found unconstructable recursive node", node.start_mark)
|
||||
self.recursive_objects[node] = None
|
||||
constructor = None
|
||||
tag_suffix = None
|
||||
if node.tag in self.yaml_constructors:
|
||||
constructor = self.yaml_constructors[node.tag]
|
||||
else:
|
||||
for tag_prefix in self.yaml_multi_constructors:
|
||||
if tag_prefix is not None and node.tag.startswith(tag_prefix):
|
||||
tag_suffix = node.tag[len(tag_prefix):]
|
||||
constructor = self.yaml_multi_constructors[tag_prefix]
|
||||
break
|
||||
else:
|
||||
if None in self.yaml_multi_constructors:
|
||||
tag_suffix = node.tag
|
||||
constructor = self.yaml_multi_constructors[None]
|
||||
elif None in self.yaml_constructors:
|
||||
constructor = self.yaml_constructors[None]
|
||||
elif isinstance(node, ScalarNode):
|
||||
constructor = self.__class__.construct_scalar
|
||||
elif isinstance(node, SequenceNode):
|
||||
constructor = self.__class__.construct_sequence
|
||||
elif isinstance(node, MappingNode):
|
||||
constructor = self.__class__.construct_mapping
|
||||
if tag_suffix is None:
|
||||
data = constructor(self, node)
|
||||
else:
|
||||
data = constructor(self, tag_suffix, node)
|
||||
if isinstance(data, types.GeneratorType):
|
||||
generator = data
|
||||
data = next(generator)
|
||||
if self.deep_construct:
|
||||
for dummy in generator:
|
||||
pass
|
||||
else:
|
||||
self.state_generators.append(generator)
|
||||
self.constructed_objects[node] = data
|
||||
del self.recursive_objects[node]
|
||||
if deep:
|
||||
self.deep_construct = old_deep
|
||||
return data
|
||||
|
||||
def construct_scalar(self, node):
|
||||
if not isinstance(node, ScalarNode):
|
||||
raise ConstructorError(None, None,
|
||||
"expected a scalar node, but found %s" % node.id,
|
||||
node.start_mark)
|
||||
return node.value
|
||||
|
||||
def construct_sequence(self, node, deep=False):
|
||||
if not isinstance(node, SequenceNode):
|
||||
raise ConstructorError(None, None,
|
||||
"expected a sequence node, but found %s" % node.id,
|
||||
node.start_mark)
|
||||
return [self.construct_object(child, deep=deep)
|
||||
for child in node.value]
|
||||
|
||||
def construct_mapping(self, node, deep=False):
|
||||
if not isinstance(node, MappingNode):
|
||||
raise ConstructorError(None, None,
|
||||
"expected a mapping node, but found %s" % node.id,
|
||||
node.start_mark)
|
||||
mapping = {}
|
||||
for key_node, value_node in node.value:
|
||||
key = self.construct_object(key_node, deep=deep)
|
||||
if not isinstance(key, collections.abc.Hashable):
|
||||
raise ConstructorError("while constructing a mapping", node.start_mark,
|
||||
"found unhashable key", key_node.start_mark)
|
||||
value = self.construct_object(value_node, deep=deep)
|
||||
mapping[key] = value
|
||||
return mapping
|
||||
|
||||
def construct_pairs(self, node, deep=False):
|
||||
if not isinstance(node, MappingNode):
|
||||
raise ConstructorError(None, None,
|
||||
"expected a mapping node, but found %s" % node.id,
|
||||
node.start_mark)
|
||||
pairs = []
|
||||
for key_node, value_node in node.value:
|
||||
key = self.construct_object(key_node, deep=deep)
|
||||
value = self.construct_object(value_node, deep=deep)
|
||||
pairs.append((key, value))
|
||||
return pairs
|
||||
|
||||
@classmethod
|
||||
def add_constructor(cls, tag, constructor):
|
||||
if not 'yaml_constructors' in cls.__dict__:
|
||||
cls.yaml_constructors = cls.yaml_constructors.copy()
|
||||
cls.yaml_constructors[tag] = constructor
|
||||
|
||||
@classmethod
|
||||
def add_multi_constructor(cls, tag_prefix, multi_constructor):
|
||||
if not 'yaml_multi_constructors' in cls.__dict__:
|
||||
cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
|
||||
cls.yaml_multi_constructors[tag_prefix] = multi_constructor
|
||||
|
||||
class SafeConstructor(BaseConstructor):
|
||||
|
||||
def construct_scalar(self, node):
|
||||
if isinstance(node, MappingNode):
|
||||
for key_node, value_node in node.value:
|
||||
if key_node.tag == 'tag:yaml.org,2002:value':
|
||||
return self.construct_scalar(value_node)
|
||||
return super().construct_scalar(node)
|
||||
|
||||
def flatten_mapping(self, node):
|
||||
merge = []
|
||||
index = 0
|
||||
while index < len(node.value):
|
||||
key_node, value_node = node.value[index]
|
||||
if key_node.tag == 'tag:yaml.org,2002:merge':
|
||||
del node.value[index]
|
||||
if isinstance(value_node, MappingNode):
|
||||
self.flatten_mapping(value_node)
|
||||
merge.extend(value_node.value)
|
||||
elif isinstance(value_node, SequenceNode):
|
||||
submerge = []
|
||||
for subnode in value_node.value:
|
||||
if not isinstance(subnode, MappingNode):
|
||||
raise ConstructorError("while constructing a mapping",
|
||||
node.start_mark,
|
||||
"expected a mapping for merging, but found %s"
|
||||
% subnode.id, subnode.start_mark)
|
||||
self.flatten_mapping(subnode)
|
||||
submerge.append(subnode.value)
|
||||
submerge.reverse()
|
||||
for value in submerge:
|
||||
merge.extend(value)
|
||||
else:
|
||||
raise ConstructorError("while constructing a mapping", node.start_mark,
|
||||
"expected a mapping or list of mappings for merging, but found %s"
|
||||
% value_node.id, value_node.start_mark)
|
||||
elif key_node.tag == 'tag:yaml.org,2002:value':
|
||||
key_node.tag = 'tag:yaml.org,2002:str'
|
||||
index += 1
|
||||
else:
|
||||
index += 1
|
||||
if merge:
|
||||
node.value = merge + node.value
|
||||
|
||||
def construct_mapping(self, node, deep=False):
|
||||
if isinstance(node, MappingNode):
|
||||
self.flatten_mapping(node)
|
||||
return super().construct_mapping(node, deep=deep)
|
||||
|
||||
def construct_yaml_null(self, node):
|
||||
self.construct_scalar(node)
|
||||
return None
|
||||
|
||||
bool_values = {
|
||||
'yes': True,
|
||||
'no': False,
|
||||
'true': True,
|
||||
'false': False,
|
||||
'on': True,
|
||||
'off': False,
|
||||
}
|
||||
|
||||
def construct_yaml_bool(self, node):
|
||||
value = self.construct_scalar(node)
|
||||
return self.bool_values[value.lower()]
|
||||
|
||||
def construct_yaml_int(self, node):
|
||||
value = self.construct_scalar(node)
|
||||
value = value.replace('_', '')
|
||||
sign = +1
|
||||
if value[0] == '-':
|
||||
sign = -1
|
||||
if value[0] in '+-':
|
||||
value = value[1:]
|
||||
if value == '0':
|
||||
return 0
|
||||
elif value.startswith('0b'):
|
||||
return sign*int(value[2:], 2)
|
||||
elif value.startswith('0x'):
|
||||
return sign*int(value[2:], 16)
|
||||
elif value[0] == '0':
|
||||
return sign*int(value, 8)
|
||||
elif ':' in value:
|
||||
digits = [int(part) for part in value.split(':')]
|
||||
digits.reverse()
|
||||
base = 1
|
||||
value = 0
|
||||
for digit in digits:
|
||||
value += digit*base
|
||||
base *= 60
|
||||
return sign*value
|
||||
else:
|
||||
return sign*int(value)
|
||||
|
||||
inf_value = 1e300
|
||||
while inf_value != inf_value*inf_value:
|
||||
inf_value *= inf_value
|
||||
nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99).
|
||||
|
||||
def construct_yaml_float(self, node):
|
||||
value = self.construct_scalar(node)
|
||||
value = value.replace('_', '').lower()
|
||||
sign = +1
|
||||
if value[0] == '-':
|
||||
sign = -1
|
||||
if value[0] in '+-':
|
||||
value = value[1:]
|
||||
if value == '.inf':
|
||||
return sign*self.inf_value
|
||||
elif value == '.nan':
|
||||
return self.nan_value
|
||||
elif ':' in value:
|
||||
digits = [float(part) for part in value.split(':')]
|
||||
digits.reverse()
|
||||
base = 1
|
||||
value = 0.0
|
||||
for digit in digits:
|
||||
value += digit*base
|
||||
base *= 60
|
||||
return sign*value
|
||||
else:
|
||||
return sign*float(value)
|
||||
|
||||
def construct_yaml_binary(self, node):
|
||||
try:
|
||||
value = self.construct_scalar(node).encode('ascii')
|
||||
except UnicodeEncodeError as exc:
|
||||
raise ConstructorError(None, None,
|
||||
"failed to convert base64 data into ascii: %s" % exc,
|
||||
node.start_mark)
|
||||
try:
|
||||
if hasattr(base64, 'decodebytes'):
|
||||
return base64.decodebytes(value)
|
||||
else:
|
||||
return base64.decodestring(value)
|
||||
except binascii.Error as exc:
|
||||
raise ConstructorError(None, None,
|
||||
"failed to decode base64 data: %s" % exc, node.start_mark)
|
||||
|
||||
timestamp_regexp = re.compile(
|
||||
r'''^(?P<year>[0-9][0-9][0-9][0-9])
|
||||
-(?P<month>[0-9][0-9]?)
|
||||
-(?P<day>[0-9][0-9]?)
|
||||
(?:(?:[Tt]|[ \t]+)
|
||||
(?P<hour>[0-9][0-9]?)
|
||||
:(?P<minute>[0-9][0-9])
|
||||
:(?P<second>[0-9][0-9])
|
||||
(?:\.(?P<fraction>[0-9]*))?
|
||||
(?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
|
||||
(?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X)
|
||||
|
||||
def construct_yaml_timestamp(self, node):
|
||||
value = self.construct_scalar(node)
|
||||
match = self.timestamp_regexp.match(node.value)
|
||||
values = match.groupdict()
|
||||
year = int(values['year'])
|
||||
month = int(values['month'])
|
||||
day = int(values['day'])
|
||||
if not values['hour']:
|
||||
return datetime.date(year, month, day)
|
||||
hour = int(values['hour'])
|
||||
minute = int(values['minute'])
|
||||
second = int(values['second'])
|
||||
fraction = 0
|
||||
tzinfo = None
|
||||
if values['fraction']:
|
||||
fraction = values['fraction'][:6]
|
||||
while len(fraction) < 6:
|
||||
fraction += '0'
|
||||
fraction = int(fraction)
|
||||
if values['tz_sign']:
|
||||
tz_hour = int(values['tz_hour'])
|
||||
tz_minute = int(values['tz_minute'] or 0)
|
||||
delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
|
||||
if values['tz_sign'] == '-':
|
||||
delta = -delta
|
||||
tzinfo = datetime.timezone(delta)
|
||||
elif values['tz']:
|
||||
tzinfo = datetime.timezone.utc
|
||||
return datetime.datetime(year, month, day, hour, minute, second, fraction,
|
||||
tzinfo=tzinfo)
|
||||
|
||||
def construct_yaml_omap(self, node):
|
||||
# Note: we do not check for duplicate keys, because it's too
|
||||
# CPU-expensive.
|
||||
omap = []
|
||||
yield omap
|
||||
if not isinstance(node, SequenceNode):
|
||||
raise ConstructorError("while constructing an ordered map", node.start_mark,
|
||||
"expected a sequence, but found %s" % node.id, node.start_mark)
|
||||
for subnode in node.value:
|
||||
if not isinstance(subnode, MappingNode):
|
||||
raise ConstructorError("while constructing an ordered map", node.start_mark,
|
||||
"expected a mapping of length 1, but found %s" % subnode.id,
|
||||
subnode.start_mark)
|
||||
if len(subnode.value) != 1:
|
||||
raise ConstructorError("while constructing an ordered map", node.start_mark,
|
||||
"expected a single mapping item, but found %d items" % len(subnode.value),
|
||||
subnode.start_mark)
|
||||
key_node, value_node = subnode.value[0]
|
||||
key = self.construct_object(key_node)
|
||||
value = self.construct_object(value_node)
|
||||
omap.append((key, value))
|
||||
|
||||
def construct_yaml_pairs(self, node):
|
||||
# Note: the same code as `construct_yaml_omap`.
|
||||
pairs = []
|
||||
yield pairs
|
||||
if not isinstance(node, SequenceNode):
|
||||
raise ConstructorError("while constructing pairs", node.start_mark,
|
||||
"expected a sequence, but found %s" % node.id, node.start_mark)
|
||||
for subnode in node.value:
|
||||
if not isinstance(subnode, MappingNode):
|
||||
raise ConstructorError("while constructing pairs", node.start_mark,
|
||||
"expected a mapping of length 1, but found %s" % subnode.id,
|
||||
subnode.start_mark)
|
||||
if len(subnode.value) != 1:
|
||||
raise ConstructorError("while constructing pairs", node.start_mark,
|
||||
"expected a single mapping item, but found %d items" % len(subnode.value),
|
||||
subnode.start_mark)
|
||||
key_node, value_node = subnode.value[0]
|
||||
key = self.construct_object(key_node)
|
||||
value = self.construct_object(value_node)
|
||||
pairs.append((key, value))
|
||||
|
||||
def construct_yaml_set(self, node):
|
||||
data = set()
|
||||
yield data
|
||||
value = self.construct_mapping(node)
|
||||
data.update(value)
|
||||
|
||||
def construct_yaml_str(self, node):
|
||||
return self.construct_scalar(node)
|
||||
|
||||
def construct_yaml_seq(self, node):
|
||||
data = []
|
||||
yield data
|
||||
data.extend(self.construct_sequence(node))
|
||||
|
||||
def construct_yaml_map(self, node):
|
||||
data = {}
|
||||
yield data
|
||||
value = self.construct_mapping(node)
|
||||
data.update(value)
|
||||
|
||||
def construct_yaml_object(self, node, cls):
|
||||
data = cls.__new__(cls)
|
||||
yield data
|
||||
if hasattr(data, '__setstate__'):
|
||||
state = self.construct_mapping(node, deep=True)
|
||||
data.__setstate__(state)
|
||||
else:
|
||||
state = self.construct_mapping(node)
|
||||
data.__dict__.update(state)
|
||||
|
||||
def construct_undefined(self, node):
|
||||
raise ConstructorError(None, None,
|
||||
"could not determine a constructor for the tag %r" % node.tag,
|
||||
node.start_mark)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:null',
|
||||
SafeConstructor.construct_yaml_null)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:bool',
|
||||
SafeConstructor.construct_yaml_bool)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:int',
|
||||
SafeConstructor.construct_yaml_int)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:float',
|
||||
SafeConstructor.construct_yaml_float)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:binary',
|
||||
SafeConstructor.construct_yaml_binary)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:timestamp',
|
||||
SafeConstructor.construct_yaml_timestamp)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:omap',
|
||||
SafeConstructor.construct_yaml_omap)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:pairs',
|
||||
SafeConstructor.construct_yaml_pairs)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:set',
|
||||
SafeConstructor.construct_yaml_set)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:str',
|
||||
SafeConstructor.construct_yaml_str)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:seq',
|
||||
SafeConstructor.construct_yaml_seq)
|
||||
|
||||
SafeConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:map',
|
||||
SafeConstructor.construct_yaml_map)
|
||||
|
||||
SafeConstructor.add_constructor(None,
|
||||
SafeConstructor.construct_undefined)
|
||||
|
||||
class FullConstructor(SafeConstructor):
|
||||
# 'extend' is blacklisted because it is used by
|
||||
# construct_python_object_apply to add `listitems` to a newly generate
|
||||
# python instance
|
||||
def get_state_keys_blacklist(self):
|
||||
return ['^extend$', '^__.*__$']
|
||||
|
||||
def get_state_keys_blacklist_regexp(self):
|
||||
if not hasattr(self, 'state_keys_blacklist_regexp'):
|
||||
self.state_keys_blacklist_regexp = re.compile('(' + '|'.join(self.get_state_keys_blacklist()) + ')')
|
||||
return self.state_keys_blacklist_regexp
|
||||
|
||||
def construct_python_str(self, node):
|
||||
return self.construct_scalar(node)
|
||||
|
||||
def construct_python_unicode(self, node):
|
||||
return self.construct_scalar(node)
|
||||
|
||||
def construct_python_bytes(self, node):
|
||||
try:
|
||||
value = self.construct_scalar(node).encode('ascii')
|
||||
except UnicodeEncodeError as exc:
|
||||
raise ConstructorError(None, None,
|
||||
"failed to convert base64 data into ascii: %s" % exc,
|
||||
node.start_mark)
|
||||
try:
|
||||
if hasattr(base64, 'decodebytes'):
|
||||
return base64.decodebytes(value)
|
||||
else:
|
||||
return base64.decodestring(value)
|
||||
except binascii.Error as exc:
|
||||
raise ConstructorError(None, None,
|
||||
"failed to decode base64 data: %s" % exc, node.start_mark)
|
||||
|
||||
def construct_python_long(self, node):
|
||||
return self.construct_yaml_int(node)
|
||||
|
||||
def construct_python_complex(self, node):
|
||||
return complex(self.construct_scalar(node))
|
||||
|
||||
def construct_python_tuple(self, node):
|
||||
return tuple(self.construct_sequence(node))
|
||||
|
||||
def find_python_module(self, name, mark, unsafe=False):
|
||||
if not name:
|
||||
raise ConstructorError("while constructing a Python module", mark,
|
||||
"expected non-empty name appended to the tag", mark)
|
||||
if unsafe:
|
||||
try:
|
||||
__import__(name)
|
||||
except ImportError as exc:
|
||||
raise ConstructorError("while constructing a Python module", mark,
|
||||
"cannot find module %r (%s)" % (name, exc), mark)
|
||||
if name not in sys.modules:
|
||||
raise ConstructorError("while constructing a Python module", mark,
|
||||
"module %r is not imported" % name, mark)
|
||||
return sys.modules[name]
|
||||
|
||||
def find_python_name(self, name, mark, unsafe=False):
|
||||
if not name:
|
||||
raise ConstructorError("while constructing a Python object", mark,
|
||||
"expected non-empty name appended to the tag", mark)
|
||||
if '.' in name:
|
||||
module_name, object_name = name.rsplit('.', 1)
|
||||
else:
|
||||
module_name = 'builtins'
|
||||
object_name = name
|
||||
if unsafe:
|
||||
try:
|
||||
__import__(module_name)
|
||||
except ImportError as exc:
|
||||
raise ConstructorError("while constructing a Python object", mark,
|
||||
"cannot find module %r (%s)" % (module_name, exc), mark)
|
||||
if module_name not in sys.modules:
|
||||
raise ConstructorError("while constructing a Python object", mark,
|
||||
"module %r is not imported" % module_name, mark)
|
||||
module = sys.modules[module_name]
|
||||
if not hasattr(module, object_name):
|
||||
raise ConstructorError("while constructing a Python object", mark,
|
||||
"cannot find %r in the module %r"
|
||||
% (object_name, module.__name__), mark)
|
||||
return getattr(module, object_name)
|
||||
|
||||
def construct_python_name(self, suffix, node):
|
||||
value = self.construct_scalar(node)
|
||||
if value:
|
||||
raise ConstructorError("while constructing a Python name", node.start_mark,
|
||||
"expected the empty value, but found %r" % value, node.start_mark)
|
||||
return self.find_python_name(suffix, node.start_mark)
|
||||
|
||||
def construct_python_module(self, suffix, node):
|
||||
value = self.construct_scalar(node)
|
||||
if value:
|
||||
raise ConstructorError("while constructing a Python module", node.start_mark,
|
||||
"expected the empty value, but found %r" % value, node.start_mark)
|
||||
return self.find_python_module(suffix, node.start_mark)
|
||||
|
||||
def make_python_instance(self, suffix, node,
|
||||
args=None, kwds=None, newobj=False, unsafe=False):
|
||||
if not args:
|
||||
args = []
|
||||
if not kwds:
|
||||
kwds = {}
|
||||
cls = self.find_python_name(suffix, node.start_mark)
|
||||
if not (unsafe or isinstance(cls, type)):
|
||||
raise ConstructorError("while constructing a Python instance", node.start_mark,
|
||||
"expected a class, but found %r" % type(cls),
|
||||
node.start_mark)
|
||||
if newobj and isinstance(cls, type):
|
||||
return cls.__new__(cls, *args, **kwds)
|
||||
else:
|
||||
return cls(*args, **kwds)
|
||||
|
||||
def set_python_instance_state(self, instance, state, unsafe=False):
|
||||
if hasattr(instance, '__setstate__'):
|
||||
instance.__setstate__(state)
|
||||
else:
|
||||
slotstate = {}
|
||||
if isinstance(state, tuple) and len(state) == 2:
|
||||
state, slotstate = state
|
||||
if hasattr(instance, '__dict__'):
|
||||
if not unsafe and state:
|
||||
for key in state.keys():
|
||||
self.check_state_key(key)
|
||||
instance.__dict__.update(state)
|
||||
elif state:
|
||||
slotstate.update(state)
|
||||
for key, value in slotstate.items():
|
||||
if not unsafe:
|
||||
self.check_state_key(key)
|
||||
setattr(instance, key, value)
|
||||
|
||||
def construct_python_object(self, suffix, node):
|
||||
# Format:
|
||||
# !!python/object:module.name { ... state ... }
|
||||
instance = self.make_python_instance(suffix, node, newobj=True)
|
||||
yield instance
|
||||
deep = hasattr(instance, '__setstate__')
|
||||
state = self.construct_mapping(node, deep=deep)
|
||||
self.set_python_instance_state(instance, state)
|
||||
|
||||
def construct_python_object_apply(self, suffix, node, newobj=False):
|
||||
# Format:
|
||||
# !!python/object/apply # (or !!python/object/new)
|
||||
# args: [ ... arguments ... ]
|
||||
# kwds: { ... keywords ... }
|
||||
# state: ... state ...
|
||||
# listitems: [ ... listitems ... ]
|
||||
# dictitems: { ... dictitems ... }
|
||||
# or short format:
|
||||
# !!python/object/apply [ ... arguments ... ]
|
||||
# The difference between !!python/object/apply and !!python/object/new
|
||||
# is how an object is created, check make_python_instance for details.
|
||||
if isinstance(node, SequenceNode):
|
||||
args = self.construct_sequence(node, deep=True)
|
||||
kwds = {}
|
||||
state = {}
|
||||
listitems = []
|
||||
dictitems = {}
|
||||
else:
|
||||
value = self.construct_mapping(node, deep=True)
|
||||
args = value.get('args', [])
|
||||
kwds = value.get('kwds', {})
|
||||
state = value.get('state', {})
|
||||
listitems = value.get('listitems', [])
|
||||
dictitems = value.get('dictitems', {})
|
||||
instance = self.make_python_instance(suffix, node, args, kwds, newobj)
|
||||
if state:
|
||||
self.set_python_instance_state(instance, state)
|
||||
if listitems:
|
||||
instance.extend(listitems)
|
||||
if dictitems:
|
||||
for key in dictitems:
|
||||
instance[key] = dictitems[key]
|
||||
return instance
|
||||
|
||||
def construct_python_object_new(self, suffix, node):
|
||||
return self.construct_python_object_apply(suffix, node, newobj=True)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:python/none',
|
||||
FullConstructor.construct_yaml_null)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:python/bool',
|
||||
FullConstructor.construct_yaml_bool)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:python/str',
|
||||
FullConstructor.construct_python_str)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:python/unicode',
|
||||
FullConstructor.construct_python_unicode)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:python/bytes',
|
||||
FullConstructor.construct_python_bytes)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:python/int',
|
||||
FullConstructor.construct_yaml_int)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:python/long',
|
||||
FullConstructor.construct_python_long)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:python/float',
|
||||
FullConstructor.construct_yaml_float)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:python/complex',
|
||||
FullConstructor.construct_python_complex)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:python/list',
|
||||
FullConstructor.construct_yaml_seq)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:python/tuple',
|
||||
FullConstructor.construct_python_tuple)
|
||||
|
||||
FullConstructor.add_constructor(
|
||||
'tag:yaml.org,2002:python/dict',
|
||||
FullConstructor.construct_yaml_map)
|
||||
|
||||
FullConstructor.add_multi_constructor(
|
||||
'tag:yaml.org,2002:python/name:',
|
||||
FullConstructor.construct_python_name)
|
||||
|
||||
FullConstructor.add_multi_constructor(
|
||||
'tag:yaml.org,2002:python/module:',
|
||||
FullConstructor.construct_python_module)
|
||||
|
||||
FullConstructor.add_multi_constructor(
|
||||
'tag:yaml.org,2002:python/object:',
|
||||
FullConstructor.construct_python_object)
|
||||
|
||||
FullConstructor.add_multi_constructor(
|
||||
'tag:yaml.org,2002:python/object/new:',
|
||||
FullConstructor.construct_python_object_new)
|
||||
|
||||
class UnsafeConstructor(FullConstructor):
|
||||
|
||||
def find_python_module(self, name, mark):
|
||||
return super(UnsafeConstructor, self).find_python_module(name, mark, unsafe=True)
|
||||
|
||||
def find_python_name(self, name, mark):
|
||||
return super(UnsafeConstructor, self).find_python_name(name, mark, unsafe=True)
|
||||
|
||||
def make_python_instance(self, suffix, node, args=None, kwds=None, newobj=False):
|
||||
return super(UnsafeConstructor, self).make_python_instance(
|
||||
suffix, node, args, kwds, newobj, unsafe=True)
|
||||
|
||||
def set_python_instance_state(self, instance, state):
|
||||
return super(UnsafeConstructor, self).set_python_instance_state(
|
||||
instance, state, unsafe=True)
|
||||
|
||||
UnsafeConstructor.add_multi_constructor(
|
||||
'tag:yaml.org,2002:python/object/apply:',
|
||||
UnsafeConstructor.construct_python_object_apply)
|
||||
|
||||
# Constructor is same as UnsafeConstructor. Need to leave this in place in case
|
||||
# people have extended it directly.
|
||||
class Constructor(UnsafeConstructor):
|
||||
pass
|
||||
101
dist/ba_data/python-site-packages/yaml/cyaml.py
vendored
Normal file
101
dist/ba_data/python-site-packages/yaml/cyaml.py
vendored
Normal file
|
|
@ -0,0 +1,101 @@
|
|||
|
||||
__all__ = [
|
||||
'CBaseLoader', 'CSafeLoader', 'CFullLoader', 'CUnsafeLoader', 'CLoader',
|
||||
'CBaseDumper', 'CSafeDumper', 'CDumper'
|
||||
]
|
||||
|
||||
from _yaml import CParser, CEmitter
|
||||
|
||||
from .constructor import *
|
||||
|
||||
from .serializer import *
|
||||
from .representer import *
|
||||
|
||||
from .resolver import *
|
||||
|
||||
class CBaseLoader(CParser, BaseConstructor, BaseResolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
CParser.__init__(self, stream)
|
||||
BaseConstructor.__init__(self)
|
||||
BaseResolver.__init__(self)
|
||||
|
||||
class CSafeLoader(CParser, SafeConstructor, Resolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
CParser.__init__(self, stream)
|
||||
SafeConstructor.__init__(self)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class CFullLoader(CParser, FullConstructor, Resolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
CParser.__init__(self, stream)
|
||||
FullConstructor.__init__(self)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class CUnsafeLoader(CParser, UnsafeConstructor, Resolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
CParser.__init__(self, stream)
|
||||
UnsafeConstructor.__init__(self)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class CLoader(CParser, Constructor, Resolver):
|
||||
|
||||
def __init__(self, stream):
|
||||
CParser.__init__(self, stream)
|
||||
Constructor.__init__(self)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver):
|
||||
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=False,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, sort_keys=True):
|
||||
CEmitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width, encoding=encoding,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
Representer.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class CSafeDumper(CEmitter, SafeRepresenter, Resolver):
|
||||
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=False,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, sort_keys=True):
|
||||
CEmitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width, encoding=encoding,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
SafeRepresenter.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class CDumper(CEmitter, Serializer, Representer, Resolver):
|
||||
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=False,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, sort_keys=True):
|
||||
CEmitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width, encoding=encoding,
|
||||
allow_unicode=allow_unicode, line_break=line_break,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
Representer.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
||||
Resolver.__init__(self)
|
||||
|
||||
62
dist/ba_data/python-site-packages/yaml/dumper.py
vendored
Normal file
62
dist/ba_data/python-site-packages/yaml/dumper.py
vendored
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
|
||||
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper']
|
||||
|
||||
from .emitter import *
|
||||
from .serializer import *
|
||||
from .representer import *
|
||||
from .resolver import *
|
||||
|
||||
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
|
||||
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=False,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, sort_keys=True):
|
||||
Emitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break)
|
||||
Serializer.__init__(self, encoding=encoding,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
Representer.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
|
||||
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=False,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, sort_keys=True):
|
||||
Emitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break)
|
||||
Serializer.__init__(self, encoding=encoding,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
SafeRepresenter.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
||||
Resolver.__init__(self)
|
||||
|
||||
class Dumper(Emitter, Serializer, Representer, Resolver):
|
||||
|
||||
def __init__(self, stream,
|
||||
default_style=None, default_flow_style=False,
|
||||
canonical=None, indent=None, width=None,
|
||||
allow_unicode=None, line_break=None,
|
||||
encoding=None, explicit_start=None, explicit_end=None,
|
||||
version=None, tags=None, sort_keys=True):
|
||||
Emitter.__init__(self, stream, canonical=canonical,
|
||||
indent=indent, width=width,
|
||||
allow_unicode=allow_unicode, line_break=line_break)
|
||||
Serializer.__init__(self, encoding=encoding,
|
||||
explicit_start=explicit_start, explicit_end=explicit_end,
|
||||
version=version, tags=tags)
|
||||
Representer.__init__(self, default_style=default_style,
|
||||
default_flow_style=default_flow_style, sort_keys=sort_keys)
|
||||
Resolver.__init__(self)
|
||||
|
||||
1137
dist/ba_data/python-site-packages/yaml/emitter.py
vendored
Normal file
1137
dist/ba_data/python-site-packages/yaml/emitter.py
vendored
Normal file
File diff suppressed because it is too large
Load diff
75
dist/ba_data/python-site-packages/yaml/error.py
vendored
Normal file
75
dist/ba_data/python-site-packages/yaml/error.py
vendored
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
|
||||
__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
|
||||
|
||||
class Mark:
|
||||
|
||||
def __init__(self, name, index, line, column, buffer, pointer):
|
||||
self.name = name
|
||||
self.index = index
|
||||
self.line = line
|
||||
self.column = column
|
||||
self.buffer = buffer
|
||||
self.pointer = pointer
|
||||
|
||||
def get_snippet(self, indent=4, max_length=75):
|
||||
if self.buffer is None:
|
||||
return None
|
||||
head = ''
|
||||
start = self.pointer
|
||||
while start > 0 and self.buffer[start-1] not in '\0\r\n\x85\u2028\u2029':
|
||||
start -= 1
|
||||
if self.pointer-start > max_length/2-1:
|
||||
head = ' ... '
|
||||
start += 5
|
||||
break
|
||||
tail = ''
|
||||
end = self.pointer
|
||||
while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029':
|
||||
end += 1
|
||||
if end-self.pointer > max_length/2-1:
|
||||
tail = ' ... '
|
||||
end -= 5
|
||||
break
|
||||
snippet = self.buffer[start:end]
|
||||
return ' '*indent + head + snippet + tail + '\n' \
|
||||
+ ' '*(indent+self.pointer-start+len(head)) + '^'
|
||||
|
||||
def __str__(self):
|
||||
snippet = self.get_snippet()
|
||||
where = " in \"%s\", line %d, column %d" \
|
||||
% (self.name, self.line+1, self.column+1)
|
||||
if snippet is not None:
|
||||
where += ":\n"+snippet
|
||||
return where
|
||||
|
||||
class YAMLError(Exception):
|
||||
pass
|
||||
|
||||
class MarkedYAMLError(YAMLError):
|
||||
|
||||
def __init__(self, context=None, context_mark=None,
|
||||
problem=None, problem_mark=None, note=None):
|
||||
self.context = context
|
||||
self.context_mark = context_mark
|
||||
self.problem = problem
|
||||
self.problem_mark = problem_mark
|
||||
self.note = note
|
||||
|
||||
def __str__(self):
|
||||
lines = []
|
||||
if self.context is not None:
|
||||
lines.append(self.context)
|
||||
if self.context_mark is not None \
|
||||
and (self.problem is None or self.problem_mark is None
|
||||
or self.context_mark.name != self.problem_mark.name
|
||||
or self.context_mark.line != self.problem_mark.line
|
||||
or self.context_mark.column != self.problem_mark.column):
|
||||
lines.append(str(self.context_mark))
|
||||
if self.problem is not None:
|
||||
lines.append(self.problem)
|
||||
if self.problem_mark is not None:
|
||||
lines.append(str(self.problem_mark))
|
||||
if self.note is not None:
|
||||
lines.append(self.note)
|
||||
return '\n'.join(lines)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue