2021-10-02 04:11:59 +08:00
|
|
|
|
import re
|
2023-11-03 03:34:23 +08:00
|
|
|
|
import typing as t
|
2021-09-25 23:29:10 +08:00
|
|
|
|
from copy import deepcopy
|
|
|
|
|
|
|
|
|
|
from cached_property import cached_property
|
|
|
|
|
|
2022-02-03 23:46:48 +08:00
|
|
|
|
from deploy.utils import DEPLOY_TEMPLATE, poor_yaml_read, poor_yaml_write
|
2021-09-25 23:29:10 +08:00
|
|
|
|
from module.base.timer import timer
|
2025-03-17 01:38:51 +08:00
|
|
|
|
from module.config.deep import deep_default, deep_get, deep_iter, deep_pop, deep_set
|
2023-08-28 17:16:24 +08:00
|
|
|
|
from module.config.env import IS_ON_PHONE_CLOUD
|
2023-11-03 03:34:23 +08:00
|
|
|
|
from module.config.server import VALID_CHANNEL_PACKAGE, VALID_PACKAGE, VALID_SERVER_LIST, to_package, to_server
|
2021-09-25 23:29:10 +08:00
|
|
|
|
from module.config.utils import *
|
2025-03-17 01:38:51 +08:00
|
|
|
|
from module.config.redirect_utils.utils import *
|
2021-09-25 23:29:10 +08:00
|
|
|
|
|
|
|
|
|
CONFIG_IMPORT = '''
|
|
|
|
|
import datetime
|
|
|
|
|
|
2021-12-07 23:23:08 +08:00
|
|
|
|
# This file was automatically generated by module/config/config_updater.py.
|
2021-09-25 23:29:10 +08:00
|
|
|
|
# Don't modify it manually.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class GeneratedConfig:
|
|
|
|
|
"""
|
|
|
|
|
Auto generated configuration
|
|
|
|
|
"""
|
|
|
|
|
'''.strip().split('\n')
|
2021-10-02 04:11:59 +08:00
|
|
|
|
ARCHIVES_PREFIX = {
|
2021-10-03 02:28:24 +08:00
|
|
|
|
'cn': '档案 ',
|
|
|
|
|
'en': 'archives ',
|
|
|
|
|
'jp': '檔案 ',
|
|
|
|
|
'tw': '檔案 '
|
2021-10-02 04:11:59 +08:00
|
|
|
|
}
|
2023-03-23 23:31:28 +08:00
|
|
|
|
MAINS = ['Main', 'Main2', 'Main3']
|
|
|
|
|
EVENTS = ['Event', 'Event2', 'EventA', 'EventB', 'EventC', 'EventD', 'EventSp']
|
|
|
|
|
GEMS_FARMINGS = ['GemsFarming']
|
|
|
|
|
RAIDS = ['Raid', 'RaidDaily']
|
|
|
|
|
WAR_ARCHIVES = ['WarArchives']
|
|
|
|
|
COALITIONS = ['Coalition', 'CoalitionSp']
|
2023-03-24 04:32:48 +08:00
|
|
|
|
MARITIME_ESCORTS = ['MaritimeEscort']
|
2021-10-02 04:11:59 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class Event:
|
|
|
|
|
def __init__(self, text):
|
|
|
|
|
self.date, self.directory, self.name, self.cn, self.en, self.jp, self.tw \
|
|
|
|
|
= [x.strip() for x in text.strip('| \n').split('|')]
|
|
|
|
|
|
|
|
|
|
self.directory = self.directory.replace(' ', '_')
|
|
|
|
|
self.cn = self.cn.replace('、', '')
|
|
|
|
|
self.en = self.en.replace(',', '').replace('\'', '').replace('\\', '')
|
|
|
|
|
self.jp = self.jp.replace('、', '')
|
|
|
|
|
self.tw = self.tw.replace('、', '')
|
|
|
|
|
self.is_war_archives = self.directory.startswith('war_archives')
|
|
|
|
|
self.is_raid = self.directory.startswith('raid_')
|
2023-03-23 23:31:28 +08:00
|
|
|
|
self.is_coalition = self.directory.startswith('coalition_')
|
2022-04-19 00:17:22 +08:00
|
|
|
|
for server in ARCHIVES_PREFIX.keys():
|
|
|
|
|
if self.__getattribute__(server) == '-':
|
|
|
|
|
self.__setattr__(server, None)
|
2021-10-02 04:11:59 +08:00
|
|
|
|
else:
|
|
|
|
|
if self.is_war_archives:
|
2022-04-19 00:17:22 +08:00
|
|
|
|
self.__setattr__(server, ARCHIVES_PREFIX[server] + self.__getattribute__(server))
|
2021-10-02 04:11:59 +08:00
|
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
|
return self.directory
|
|
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
|
return str(self) == str(other)
|
2021-09-25 23:29:10 +08:00
|
|
|
|
|
2023-10-14 22:58:57 +08:00
|
|
|
|
def __lt__(self, other):
|
|
|
|
|
return str(self) < str(other)
|
|
|
|
|
|
2023-10-28 21:42:12 +08:00
|
|
|
|
def __hash__(self):
|
|
|
|
|
return hash(str(self))
|
|
|
|
|
|
2021-09-25 23:29:10 +08:00
|
|
|
|
|
|
|
|
|
class ConfigGenerator:
|
|
|
|
|
@cached_property
|
|
|
|
|
def argument(self):
|
|
|
|
|
"""
|
|
|
|
|
Load argument.yaml, and standardise its structure.
|
|
|
|
|
|
|
|
|
|
<group>:
|
|
|
|
|
<argument>:
|
|
|
|
|
type: checkbox|select|textarea|input
|
|
|
|
|
value:
|
2022-02-11 18:12:04 +08:00
|
|
|
|
option (Optional): Options, if argument has any options.
|
|
|
|
|
validate (Optional): datetime
|
2021-09-25 23:29:10 +08:00
|
|
|
|
"""
|
|
|
|
|
data = {}
|
|
|
|
|
raw = read_file(filepath_argument('argument'))
|
|
|
|
|
for path, value in deep_iter(raw, depth=2):
|
|
|
|
|
arg = {
|
|
|
|
|
'type': 'input',
|
|
|
|
|
'value': '',
|
|
|
|
|
# option
|
|
|
|
|
}
|
|
|
|
|
if not isinstance(value, dict):
|
|
|
|
|
value = {'value': value}
|
|
|
|
|
arg['type'] = data_to_type(value, arg=path[1])
|
2022-02-11 18:12:04 +08:00
|
|
|
|
if isinstance(value['value'], datetime):
|
2022-08-15 01:16:05 +08:00
|
|
|
|
arg['type'] = 'datetime'
|
2022-02-11 18:12:04 +08:00
|
|
|
|
arg['validate'] = 'datetime'
|
|
|
|
|
# Manual definition has the highest priority
|
2021-09-25 23:29:10 +08:00
|
|
|
|
arg.update(value)
|
|
|
|
|
deep_set(data, keys=path, value=arg)
|
|
|
|
|
|
2022-11-30 21:46:12 +08:00
|
|
|
|
# Define storage group
|
|
|
|
|
arg = {
|
|
|
|
|
'type': 'storage',
|
|
|
|
|
'value': {},
|
|
|
|
|
'valuetype': 'ignore',
|
|
|
|
|
'display': 'disabled',
|
|
|
|
|
}
|
|
|
|
|
deep_set(data, keys=['Storage', 'Storage'], value=arg)
|
2021-09-25 23:29:10 +08:00
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
@cached_property
|
|
|
|
|
def task(self):
|
|
|
|
|
"""
|
2023-06-17 22:08:57 +08:00
|
|
|
|
<task_group>:
|
|
|
|
|
<task>:
|
|
|
|
|
<group>:
|
2021-09-25 23:29:10 +08:00
|
|
|
|
"""
|
|
|
|
|
return read_file(filepath_argument('task'))
|
|
|
|
|
|
2022-04-19 00:59:14 +08:00
|
|
|
|
@cached_property
|
|
|
|
|
def default(self):
|
|
|
|
|
"""
|
|
|
|
|
<task>:
|
|
|
|
|
<group>:
|
|
|
|
|
<argument>: value
|
|
|
|
|
"""
|
|
|
|
|
return read_file(filepath_argument('default'))
|
|
|
|
|
|
2021-09-25 23:29:10 +08:00
|
|
|
|
@cached_property
|
|
|
|
|
def override(self):
|
|
|
|
|
"""
|
|
|
|
|
<task>:
|
|
|
|
|
<group>:
|
|
|
|
|
<argument>: value
|
|
|
|
|
"""
|
|
|
|
|
return read_file(filepath_argument('override'))
|
|
|
|
|
|
2021-10-10 23:19:25 +08:00
|
|
|
|
@cached_property
|
|
|
|
|
def gui(self):
|
|
|
|
|
"""
|
|
|
|
|
<i18n_group>:
|
|
|
|
|
<i18n_key>: value, value is None
|
|
|
|
|
"""
|
|
|
|
|
return read_file(filepath_argument('gui'))
|
|
|
|
|
|
2021-09-25 23:29:10 +08:00
|
|
|
|
@cached_property
|
|
|
|
|
@timer
|
|
|
|
|
def args(self):
|
|
|
|
|
"""
|
|
|
|
|
Merge definitions into standardised json.
|
|
|
|
|
|
|
|
|
|
task.yaml ---+
|
|
|
|
|
argument.yaml ---+-----> args.json
|
|
|
|
|
override.yaml ---+
|
2022-04-19 00:59:14 +08:00
|
|
|
|
default.yaml ---+
|
2021-09-25 23:29:10 +08:00
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
# Construct args
|
|
|
|
|
data = {}
|
2023-06-17 22:08:57 +08:00
|
|
|
|
for path, groups in deep_iter(self.task, depth=3):
|
|
|
|
|
if 'tasks' not in path:
|
|
|
|
|
continue
|
|
|
|
|
task = path[2]
|
2022-11-30 21:46:12 +08:00
|
|
|
|
# Add storage to all task
|
|
|
|
|
groups.append('Storage')
|
2021-09-25 23:29:10 +08:00
|
|
|
|
for group in groups:
|
|
|
|
|
if group not in self.argument:
|
2022-05-01 10:22:14 +08:00
|
|
|
|
print(f'`{task}.{group}` is not related to any argument group')
|
2021-09-25 23:29:10 +08:00
|
|
|
|
continue
|
|
|
|
|
deep_set(data, keys=[task, group], value=deepcopy(self.argument[group]))
|
|
|
|
|
|
2022-04-19 00:59:14 +08:00
|
|
|
|
def check_override(path, value):
|
2021-09-25 23:29:10 +08:00
|
|
|
|
# Check existence
|
|
|
|
|
old = deep_get(data, keys=path, default=None)
|
|
|
|
|
if old is None:
|
2022-05-01 10:22:14 +08:00
|
|
|
|
print(f'`{".".join(path)}` is not a existing argument')
|
2022-04-19 00:59:14 +08:00
|
|
|
|
return False
|
2021-09-25 23:29:10 +08:00
|
|
|
|
# Check type
|
2021-09-30 17:12:34 +08:00
|
|
|
|
# But allow `Interval` to be different
|
2021-09-25 23:29:10 +08:00
|
|
|
|
old_value = old.get('value', None) if isinstance(old, dict) else old
|
2022-04-30 22:24:50 +08:00
|
|
|
|
value = old.get('value', None) if isinstance(value, dict) else value
|
2022-11-05 09:45:24 +08:00
|
|
|
|
if type(value) != type(old_value) \
|
|
|
|
|
and old_value is not None \
|
|
|
|
|
and path[2] not in ['SuccessInterval', 'FailureInterval']:
|
2022-05-01 10:22:14 +08:00
|
|
|
|
print(
|
2021-09-25 23:29:10 +08:00
|
|
|
|
f'`{value}` ({type(value)}) and `{".".join(path)}` ({type(old_value)}) are in different types')
|
2022-04-19 00:59:14 +08:00
|
|
|
|
return False
|
2021-09-25 23:29:10 +08:00
|
|
|
|
# Check option
|
|
|
|
|
if isinstance(old, dict) and 'option' in old:
|
|
|
|
|
if value not in old['option']:
|
2022-05-01 10:22:14 +08:00
|
|
|
|
print(f'`{value}` is not an option of argument `{".".join(path)}`')
|
2022-04-19 00:59:14 +08:00
|
|
|
|
return False
|
|
|
|
|
return True
|
2021-09-25 23:29:10 +08:00
|
|
|
|
|
2022-04-19 00:59:14 +08:00
|
|
|
|
# Set defaults
|
|
|
|
|
for p, v in deep_iter(self.default, depth=3):
|
|
|
|
|
if not check_override(p, v):
|
|
|
|
|
continue
|
|
|
|
|
deep_set(data, keys=p + ['value'], value=v)
|
|
|
|
|
# Override non-modifiable arguments
|
|
|
|
|
for p, v in deep_iter(self.override, depth=3):
|
|
|
|
|
if not check_override(p, v):
|
|
|
|
|
continue
|
2022-04-30 22:24:50 +08:00
|
|
|
|
if isinstance(v, dict):
|
2023-10-28 21:42:12 +08:00
|
|
|
|
typ = v.get('type')
|
|
|
|
|
if typ == 'state':
|
|
|
|
|
pass
|
|
|
|
|
elif typ == 'lock':
|
|
|
|
|
pass
|
2023-02-28 23:38:53 +08:00
|
|
|
|
elif deep_get(v, keys='value') is not None:
|
2022-08-15 01:16:05 +08:00
|
|
|
|
deep_default(v, keys='display', value='hide')
|
2022-04-30 22:24:50 +08:00
|
|
|
|
for arg_k, arg_v in v.items():
|
|
|
|
|
deep_set(data, keys=p + [arg_k], value=arg_v)
|
|
|
|
|
else:
|
|
|
|
|
deep_set(data, keys=p + ['value'], value=v)
|
2022-08-15 01:16:05 +08:00
|
|
|
|
deep_set(data, keys=p + ['display'], value='hide')
|
2021-09-25 23:29:10 +08:00
|
|
|
|
# Set command
|
2023-06-17 22:08:57 +08:00
|
|
|
|
for path, groups in deep_iter(self.task, depth=3):
|
|
|
|
|
if 'tasks' not in path:
|
|
|
|
|
continue
|
|
|
|
|
task = path[2]
|
2021-09-26 07:44:26 +08:00
|
|
|
|
if deep_get(data, keys=f'{task}.Scheduler.Command'):
|
|
|
|
|
deep_set(data, keys=f'{task}.Scheduler.Command.value', value=task)
|
2022-08-15 01:16:05 +08:00
|
|
|
|
deep_set(data, keys=f'{task}.Scheduler.Command.display', value='hide')
|
2021-09-25 23:29:10 +08:00
|
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
@timer
|
|
|
|
|
def generate_code(self):
|
|
|
|
|
"""
|
|
|
|
|
Generate python code.
|
|
|
|
|
|
|
|
|
|
args.json ---> config_generated.py
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
visited_group = set()
|
|
|
|
|
visited_path = set()
|
|
|
|
|
lines = CONFIG_IMPORT
|
|
|
|
|
for path, data in deep_iter(self.argument, depth=2):
|
|
|
|
|
group, arg = path
|
|
|
|
|
if group not in visited_group:
|
|
|
|
|
lines.append('')
|
|
|
|
|
lines.append(f' # Group `{group}`')
|
|
|
|
|
visited_group.add(group)
|
|
|
|
|
|
|
|
|
|
option = ''
|
|
|
|
|
if 'option' in data and data['option']:
|
|
|
|
|
option = ' # ' + ', '.join([str(opt) for opt in data['option']])
|
|
|
|
|
path = '.'.join(path)
|
|
|
|
|
lines.append(f' {path_to_arg(path)} = {repr(parse_value(data["value"], data=data))}{option}')
|
|
|
|
|
visited_path.add(path)
|
|
|
|
|
|
2022-05-08 14:02:28 +08:00
|
|
|
|
with open(filepath_code(), 'w', encoding='utf-8', newline='') as f:
|
2021-09-25 23:29:10 +08:00
|
|
|
|
for text in lines:
|
|
|
|
|
f.write(text + '\n')
|
|
|
|
|
|
|
|
|
|
@timer
|
|
|
|
|
def generate_i18n(self, lang):
|
|
|
|
|
"""
|
|
|
|
|
Load old translations and generate new translation file.
|
|
|
|
|
|
|
|
|
|
args.json ---+-----> i18n/<lang>.json
|
|
|
|
|
(old) i18n/<lang>.json ---+
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
new = {}
|
|
|
|
|
old = read_file(filepath_i18n(lang))
|
|
|
|
|
|
2021-09-27 18:13:57 +08:00
|
|
|
|
def deep_load(keys, default=True, words=('name', 'help')):
|
|
|
|
|
for word in words:
|
|
|
|
|
k = keys + [str(word)]
|
|
|
|
|
d = ".".join(k) if default else str(word)
|
2022-05-01 10:22:14 +08:00
|
|
|
|
v = deep_get(old, keys=k, default=d)
|
|
|
|
|
deep_set(new, keys=k, value=v)
|
2021-10-10 23:19:25 +08:00
|
|
|
|
|
2021-10-02 04:11:59 +08:00
|
|
|
|
# Menu
|
2023-06-17 22:08:57 +08:00
|
|
|
|
for path, data in deep_iter(self.task, depth=3):
|
|
|
|
|
if 'tasks' not in path:
|
|
|
|
|
continue
|
|
|
|
|
task_group, _, task = path
|
|
|
|
|
deep_load(['Menu', task_group])
|
|
|
|
|
deep_load(['Task', task])
|
2021-10-02 04:11:59 +08:00
|
|
|
|
# Arguments
|
2021-09-27 18:13:57 +08:00
|
|
|
|
visited_group = set()
|
2021-09-25 23:29:10 +08:00
|
|
|
|
for path, data in deep_iter(self.argument, depth=2):
|
|
|
|
|
if path[0] not in visited_group:
|
2021-09-27 18:13:57 +08:00
|
|
|
|
deep_load([path[0], '_info'])
|
2021-09-25 23:29:10 +08:00
|
|
|
|
visited_group.add(path[0])
|
2021-09-27 18:13:57 +08:00
|
|
|
|
deep_load(path)
|
2021-09-25 23:29:10 +08:00
|
|
|
|
if 'option' in data:
|
2021-09-27 18:13:57 +08:00
|
|
|
|
deep_load(path, words=data['option'], default=False)
|
2021-10-02 04:11:59 +08:00
|
|
|
|
# Event names
|
2021-10-03 02:28:24 +08:00
|
|
|
|
# Names come from SameLanguageServer > en > cn > jp > tw
|
|
|
|
|
events = {}
|
|
|
|
|
for event in self.event:
|
|
|
|
|
if lang in LANG_TO_SERVER:
|
2021-10-02 04:11:59 +08:00
|
|
|
|
name = event.__getattribute__(LANG_TO_SERVER[lang])
|
2021-10-03 02:28:24 +08:00
|
|
|
|
if name:
|
|
|
|
|
deep_default(events, keys=event.directory, value=name)
|
2022-04-19 00:17:22 +08:00
|
|
|
|
for server in ['en', 'cn', 'jp', 'tw']:
|
2021-10-03 02:28:24 +08:00
|
|
|
|
for event in self.event:
|
2022-04-19 00:17:22 +08:00
|
|
|
|
name = event.__getattribute__(server)
|
2021-10-03 02:28:24 +08:00
|
|
|
|
if name:
|
|
|
|
|
deep_default(events, keys=event.directory, value=name)
|
2023-10-14 22:58:57 +08:00
|
|
|
|
for event in sorted(self.event):
|
2021-10-03 02:28:24 +08:00
|
|
|
|
name = events.get(event.directory, event.directory)
|
|
|
|
|
deep_set(new, keys=f'Campaign.Event.{event.directory}', value=name)
|
2022-04-19 00:17:22 +08:00
|
|
|
|
# Package names
|
|
|
|
|
for package, server in VALID_PACKAGE.items():
|
|
|
|
|
path = ['Emulator', 'PackageName', package]
|
|
|
|
|
if deep_get(new, keys=path) == package:
|
|
|
|
|
deep_set(new, keys=path, value=server.upper())
|
2022-04-20 00:05:50 +08:00
|
|
|
|
|
|
|
|
|
for package, server_and_channel in VALID_CHANNEL_PACKAGE.items():
|
|
|
|
|
server, channel = server_and_channel
|
|
|
|
|
name = deep_get(new, keys=['Emulator', 'PackageName', to_package(server)])
|
|
|
|
|
if lang == SERVER_TO_LANG[server]:
|
|
|
|
|
value = f'{name} {channel}渠道服 {package}'
|
2022-04-19 00:17:22 +08:00
|
|
|
|
else:
|
2022-04-20 00:05:50 +08:00
|
|
|
|
value = f'{name} {package}'
|
2022-04-19 00:17:22 +08:00
|
|
|
|
deep_set(new, keys=['Emulator', 'PackageName', package], value=value)
|
2022-07-21 10:09:20 +08:00
|
|
|
|
# Game server names
|
|
|
|
|
for server, _list in VALID_SERVER_LIST.items():
|
|
|
|
|
for index in range(len(_list)):
|
|
|
|
|
path = ['Emulator', 'ServerName', f'{server}-{index}']
|
2022-08-01 10:41:20 +08:00
|
|
|
|
prefix = server.split('_')[0].upper()
|
|
|
|
|
prefix = '国服' if prefix == 'CN' else prefix
|
|
|
|
|
deep_set(new, keys=path, value=f'[{prefix}] {_list[index]}')
|
2021-10-10 23:19:25 +08:00
|
|
|
|
# GUI i18n
|
|
|
|
|
for path, _ in deep_iter(self.gui, depth=2):
|
|
|
|
|
group, key = path
|
|
|
|
|
deep_load(keys=['Gui', group], words=(key,))
|
2023-08-27 17:45:21 +08:00
|
|
|
|
# zh-TW
|
|
|
|
|
dic_repl = {
|
|
|
|
|
'設置': '設定',
|
|
|
|
|
'支持': '支援',
|
|
|
|
|
'啓': '啟',
|
|
|
|
|
'异': '異',
|
|
|
|
|
'服務器': '伺服器',
|
|
|
|
|
'文件': '檔案',
|
|
|
|
|
}
|
|
|
|
|
if lang == 'zh-TW':
|
|
|
|
|
for path, value in deep_iter(new, depth=3):
|
|
|
|
|
for before, after in dic_repl.items():
|
|
|
|
|
value = value.replace(before, after)
|
|
|
|
|
deep_set(new, keys=path, value=value)
|
2021-09-25 23:29:10 +08:00
|
|
|
|
|
|
|
|
|
write_file(filepath_i18n(lang), new)
|
|
|
|
|
|
2021-09-27 18:13:57 +08:00
|
|
|
|
@cached_property
|
|
|
|
|
def menu(self):
|
|
|
|
|
"""
|
2021-10-02 04:11:59 +08:00
|
|
|
|
Generate menu definitions
|
2021-09-27 18:13:57 +08:00
|
|
|
|
|
|
|
|
|
task.yaml --> menu.json
|
|
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
data = {}
|
2023-06-17 22:08:57 +08:00
|
|
|
|
for task_group in self.task.keys():
|
|
|
|
|
value = deep_get(self.task, keys=[task_group, 'menu'])
|
|
|
|
|
if value not in ['collapse', 'list']:
|
|
|
|
|
value = 'collapse'
|
|
|
|
|
deep_set(data, keys=[task_group, 'menu'], value=value)
|
|
|
|
|
value = deep_get(self.task, keys=[task_group, 'page'])
|
|
|
|
|
if value not in ['setting', 'tool']:
|
|
|
|
|
value = 'setting'
|
|
|
|
|
deep_set(data, keys=[task_group, 'page'], value=value)
|
|
|
|
|
tasks = deep_get(self.task, keys=[task_group, 'tasks'], default={})
|
|
|
|
|
tasks = list(tasks.keys())
|
|
|
|
|
deep_set(data, keys=[task_group, 'tasks'], value=tasks)
|
2021-09-27 18:13:57 +08:00
|
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
2021-10-02 04:11:59 +08:00
|
|
|
|
@cached_property
|
|
|
|
|
@timer
|
|
|
|
|
def event(self):
|
|
|
|
|
"""
|
|
|
|
|
Returns:
|
|
|
|
|
list[Event]: From latest to oldest
|
|
|
|
|
"""
|
2024-10-25 10:02:04 +08:00
|
|
|
|
def calc_width(text):
|
|
|
|
|
return len(text) + len(re.findall(
|
|
|
|
|
r'[\u3000-\u30ff\u3400-\u4dbf\u4e00-\u9fff、!()]', text))
|
|
|
|
|
|
|
|
|
|
lines = []
|
|
|
|
|
data_lines = []
|
|
|
|
|
data_widths = []
|
|
|
|
|
column_width = [4]*7 # `:---`
|
2021-10-02 04:11:59 +08:00
|
|
|
|
events = []
|
|
|
|
|
with open('./campaign/Readme.md', encoding='utf-8') as f:
|
|
|
|
|
for text in f.readlines():
|
2024-10-25 10:02:04 +08:00
|
|
|
|
if not re.search(r'^\|.+\|$', text):
|
|
|
|
|
# not a table line
|
|
|
|
|
lines.append(text)
|
|
|
|
|
elif re.search(r'^.*\-{3,}.*$', text):
|
|
|
|
|
# is a delimiter line
|
|
|
|
|
continue
|
|
|
|
|
else:
|
|
|
|
|
line_entries = [x.strip() for x in text.strip('| \n').split('|')]
|
|
|
|
|
data_lines.append(line_entries)
|
|
|
|
|
data_width = [calc_width(string) for string in line_entries]
|
|
|
|
|
data_widths.append(data_width)
|
|
|
|
|
column_width = [max(l1, l2) for l1, l2 in zip(column_width, data_width)]
|
|
|
|
|
if re.search(r'\d{8}', text):
|
|
|
|
|
event = Event(text)
|
|
|
|
|
events.append(event)
|
|
|
|
|
for i, (line, old_width) in enumerate(zip(data_lines, data_widths)):
|
|
|
|
|
lines.append('| ' + ' | '.join([cell+' '*(width-length) for cell, width, length in zip(line, column_width, old_width)]) + ' |\n')
|
|
|
|
|
if i == 0:
|
|
|
|
|
lines.append('| ' + ' | '.join([':'+'-'*(width-1) for width in column_width]) + ' |\n')
|
|
|
|
|
with open('./campaign/Readme.md', 'w', encoding='utf-8') as f:
|
|
|
|
|
f.writelines(lines)
|
2021-10-02 04:11:59 +08:00
|
|
|
|
return events[::-1]
|
|
|
|
|
|
|
|
|
|
def insert_event(self):
|
|
|
|
|
"""
|
|
|
|
|
Insert event information into `self.args`.
|
|
|
|
|
|
|
|
|
|
./campaign/Readme.md -----+
|
|
|
|
|
v
|
|
|
|
|
args.json -----+-----> args.json
|
|
|
|
|
"""
|
|
|
|
|
for event in self.event:
|
2022-04-19 00:17:22 +08:00
|
|
|
|
for server in ARCHIVES_PREFIX.keys():
|
|
|
|
|
name = event.__getattribute__(server)
|
2021-10-02 04:11:59 +08:00
|
|
|
|
|
|
|
|
|
def insert(key):
|
2022-05-01 10:22:14 +08:00
|
|
|
|
opts = deep_get(self.args, keys=f'{key}.Campaign.Event.option')
|
|
|
|
|
if event not in opts:
|
|
|
|
|
opts.append(event)
|
2021-10-02 04:11:59 +08:00
|
|
|
|
if name:
|
2022-04-19 00:17:22 +08:00
|
|
|
|
deep_default(self.args, keys=f'{key}.Campaign.Event.{server}', value=event)
|
2021-10-02 04:11:59 +08:00
|
|
|
|
|
|
|
|
|
if name:
|
|
|
|
|
if event.is_raid:
|
2023-03-23 23:31:28 +08:00
|
|
|
|
for task in RAIDS:
|
|
|
|
|
insert(task)
|
2021-10-02 04:11:59 +08:00
|
|
|
|
elif event.is_war_archives:
|
2023-03-23 23:31:28 +08:00
|
|
|
|
for task in WAR_ARCHIVES:
|
|
|
|
|
insert(task)
|
|
|
|
|
elif event.is_coalition:
|
|
|
|
|
for task in COALITIONS:
|
|
|
|
|
insert(task)
|
2021-10-02 04:11:59 +08:00
|
|
|
|
else:
|
2023-03-23 23:31:28 +08:00
|
|
|
|
for task in EVENTS + GEMS_FARMINGS:
|
|
|
|
|
insert(task)
|
2021-10-06 20:55:53 +08:00
|
|
|
|
|
2023-03-23 23:31:28 +08:00
|
|
|
|
for task in EVENTS + GEMS_FARMINGS + WAR_ARCHIVES + RAIDS + COALITIONS:
|
2021-10-06 20:55:53 +08:00
|
|
|
|
options = deep_get(self.args, keys=f'{task}.Campaign.Event.option')
|
2023-10-14 22:58:57 +08:00
|
|
|
|
# Remove campaign_main from event list
|
2021-10-06 20:55:53 +08:00
|
|
|
|
options = [option for option in options if option != 'campaign_main']
|
2023-10-14 22:58:57 +08:00
|
|
|
|
# Sort options
|
|
|
|
|
options = sorted(options)
|
2021-10-06 20:55:53 +08:00
|
|
|
|
deep_set(self.args, keys=f'{task}.Campaign.Event.option', value=options)
|
2023-10-14 22:58:57 +08:00
|
|
|
|
# Sort latest
|
|
|
|
|
latest = {}
|
|
|
|
|
for server in ARCHIVES_PREFIX.keys():
|
|
|
|
|
latest[server] = deep_pop(self.args, keys=f'{task}.Campaign.Event.{server}', default='')
|
2024-04-18 12:21:55 +08:00
|
|
|
|
bold = sorted(set(latest.values()))
|
2023-10-28 21:42:12 +08:00
|
|
|
|
deep_set(self.args, keys=f'{task}.Campaign.Event.option_bold', value=bold)
|
2023-10-14 22:58:57 +08:00
|
|
|
|
for server, event in latest.items():
|
|
|
|
|
deep_set(self.args, keys=f'{task}.Campaign.Event.{server}', value=event)
|
2021-10-02 04:11:59 +08:00
|
|
|
|
|
2022-02-03 23:46:48 +08:00
|
|
|
|
@staticmethod
|
|
|
|
|
def generate_deploy_template():
|
|
|
|
|
template = poor_yaml_read(DEPLOY_TEMPLATE)
|
|
|
|
|
cn = {
|
2023-06-04 17:19:45 +08:00
|
|
|
|
'Repository': 'git://git.lyoko.io/AzurLaneAutoScript',
|
2025-02-23 23:55:02 +08:00
|
|
|
|
'PypiMirror': 'https://mirrors.aliyun.com/pypi/simple',
|
2023-03-28 16:55:37 +08:00
|
|
|
|
'Language': 'zh-CN',
|
2022-02-03 23:46:48 +08:00
|
|
|
|
}
|
|
|
|
|
aidlux = {
|
|
|
|
|
'GitExecutable': '/usr/bin/git',
|
|
|
|
|
'PythonExecutable': '/usr/bin/python',
|
|
|
|
|
'RequirementsFile': './deploy/AidLux/0.92/requirements.txt',
|
|
|
|
|
'AdbExecutable': '/usr/bin/adb',
|
|
|
|
|
}
|
|
|
|
|
|
2022-06-21 20:25:05 -04:00
|
|
|
|
docker = {
|
|
|
|
|
'GitExecutable': '/usr/bin/git',
|
2023-02-27 09:53:53 +08:00
|
|
|
|
'PythonExecutable': '/usr/local/bin/python',
|
2022-06-21 20:25:05 -04:00
|
|
|
|
'RequirementsFile': './deploy/docker/requirements.txt',
|
|
|
|
|
'AdbExecutable': '/usr/bin/adb',
|
|
|
|
|
}
|
|
|
|
|
|
2023-06-24 19:09:24 +08:00
|
|
|
|
linux = {
|
|
|
|
|
'GitExecutable': '/usr/bin/git',
|
|
|
|
|
'PythonExecutable': 'python',
|
|
|
|
|
'RequirementsFile': './deploy/headless/requirements.txt',
|
|
|
|
|
'AdbExecutable': '/usr/bin/adb',
|
|
|
|
|
'SSHExecutable': '/usr/bin/ssh',
|
|
|
|
|
'ReplaceAdb': 'false'
|
|
|
|
|
}
|
|
|
|
|
|
2022-02-03 23:46:48 +08:00
|
|
|
|
def update(suffix, *args):
|
|
|
|
|
file = f'./config/deploy.{suffix}.yaml'
|
|
|
|
|
new = deepcopy(template)
|
|
|
|
|
for dic in args:
|
|
|
|
|
new.update(dic)
|
|
|
|
|
poor_yaml_write(data=new, file=file)
|
|
|
|
|
|
|
|
|
|
update('template')
|
|
|
|
|
update('template-cn', cn)
|
|
|
|
|
update('template-AidLux', aidlux)
|
|
|
|
|
update('template-AidLux-cn', aidlux, cn)
|
2022-06-21 20:25:05 -04:00
|
|
|
|
update('template-docker', docker)
|
|
|
|
|
update('template-docker-cn', docker, cn)
|
2023-06-24 19:09:24 +08:00
|
|
|
|
update('template-linux', linux)
|
|
|
|
|
update('template-linux-cn', linux, cn)
|
2022-02-03 23:46:48 +08:00
|
|
|
|
|
2022-04-19 00:17:22 +08:00
|
|
|
|
def insert_package(self):
|
|
|
|
|
option = deep_get(self.argument, keys='Emulator.PackageName.option')
|
|
|
|
|
option += list(VALID_PACKAGE.keys())
|
2022-04-20 00:05:50 +08:00
|
|
|
|
option += list(VALID_CHANNEL_PACKAGE.keys())
|
2022-04-19 00:17:22 +08:00
|
|
|
|
deep_set(self.argument, keys='Emulator.PackageName.option', value=option)
|
|
|
|
|
deep_set(self.args, keys='Alas.Emulator.PackageName.option', value=option)
|
|
|
|
|
|
2022-07-21 10:09:20 +08:00
|
|
|
|
def insert_server(self):
|
|
|
|
|
option = deep_get(self.argument, keys='Emulator.ServerName.option')
|
|
|
|
|
server_list = []
|
|
|
|
|
for server, _list in VALID_SERVER_LIST.items():
|
|
|
|
|
for index in range(len(_list)):
|
|
|
|
|
server_list.append(f'{server}-{index}')
|
|
|
|
|
option += server_list
|
|
|
|
|
deep_set(self.argument, keys='Emulator.ServerName.option', value=option)
|
|
|
|
|
deep_set(self.args, keys='Alas.Emulator.ServerName.option', value=option)
|
|
|
|
|
|
2021-09-25 23:29:10 +08:00
|
|
|
|
@timer
|
|
|
|
|
def generate(self):
|
|
|
|
|
_ = self.args
|
2021-09-27 18:13:57 +08:00
|
|
|
|
_ = self.menu
|
2021-10-02 04:11:59 +08:00
|
|
|
|
_ = self.event
|
|
|
|
|
self.insert_event()
|
2022-04-19 00:17:22 +08:00
|
|
|
|
self.insert_package()
|
2022-07-21 10:09:20 +08:00
|
|
|
|
self.insert_server()
|
2021-10-02 04:11:59 +08:00
|
|
|
|
write_file(filepath_args(), self.args)
|
|
|
|
|
write_file(filepath_args('menu'), self.menu)
|
2021-09-25 23:29:10 +08:00
|
|
|
|
self.generate_code()
|
|
|
|
|
for lang in LANGUAGES:
|
|
|
|
|
self.generate_i18n(lang)
|
2022-02-03 23:46:48 +08:00
|
|
|
|
self.generate_deploy_template()
|
2021-09-25 23:29:10 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class ConfigUpdater:
|
2022-01-13 18:41:06 +08:00
|
|
|
|
# source, target, (optional)convert_func
|
|
|
|
|
redirection = [
|
2023-02-18 00:48:40 +08:00
|
|
|
|
# ('OpsiDaily.OpsiDaily.BuySupply', 'OpsiShop.Scheduler.Enable'),
|
|
|
|
|
# ('OpsiDaily.Scheduler.Enable', 'OpsiDaily.OpsiDaily.DoMission'),
|
|
|
|
|
# ('OpsiShop.Scheduler.Enable', 'OpsiShop.OpsiShop.BuySupply'),
|
|
|
|
|
# ('ShopOnce.GuildShop.Filter', 'ShopOnce.GuildShop.Filter', bp_redirect),
|
|
|
|
|
# ('ShopOnce.MedalShop2.Filter', 'ShopOnce.MedalShop2.Filter', bp_redirect),
|
|
|
|
|
# (('Alas.DropRecord.SaveResearch', 'Alas.DropRecord.UploadResearch'),
|
|
|
|
|
# 'Alas.DropRecord.ResearchRecord', upload_redirect),
|
|
|
|
|
# (('Alas.DropRecord.SaveCommission', 'Alas.DropRecord.UploadCommission'),
|
|
|
|
|
# 'Alas.DropRecord.CommissionRecord', upload_redirect),
|
|
|
|
|
# (('Alas.DropRecord.SaveOpsi', 'Alas.DropRecord.UploadOpsi'),
|
|
|
|
|
# 'Alas.DropRecord.OpsiRecord', upload_redirect),
|
|
|
|
|
# (('Alas.DropRecord.SaveMeowfficerTalent', 'Alas.DropRecord.UploadMeowfficerTalent'),
|
|
|
|
|
# 'Alas.DropRecord.MeowfficerTalent', upload_redirect),
|
|
|
|
|
# ('Alas.DropRecord.SaveCombat', 'Alas.DropRecord.CombatRecord', upload_redirect),
|
|
|
|
|
# ('Alas.DropRecord.SaveMeowfficer', 'Alas.DropRecord.MeowfficerBuy', upload_redirect),
|
|
|
|
|
# ('Alas.Emulator.PackageName', 'Alas.DropRecord.API', api_redirect),
|
|
|
|
|
# ('Alas.RestartEmulator.Enable', 'Alas.RestartEmulator.ErrorRestart'),
|
|
|
|
|
# ('OpsiGeneral.OpsiGeneral.BuyActionPoint', 'OpsiGeneral.OpsiGeneral.BuyActionPointLimit', action_point_redirect),
|
|
|
|
|
# ('BattlePass.BattlePass.BattlePassReward', 'Freebies.BattlePass.Collect'),
|
|
|
|
|
# ('DataKey.Scheduler.Enable', 'Freebies.DataKey.Collect'),
|
|
|
|
|
# ('DataKey.DataKey.ForceGet', 'Freebies.DataKey.ForceCollect'),
|
|
|
|
|
# ('SupplyPack.SupplyPack.WeeklyFreeSupplyPack', 'Freebies.SupplyPack.Collect'),
|
|
|
|
|
# ('Commission.Commission.CommissionFilter', 'Commission.Commission.CustomFilter'),
|
|
|
|
|
# 2023.02.17
|
2023-12-28 02:45:39 +08:00
|
|
|
|
# ('OpsiAshBeacon.OpsiDossierBeacon.Enable', 'OpsiAshBeacon.OpsiAshBeacon.AttackMode', dossier_redirect),
|
|
|
|
|
# ('General.Retirement.EnhanceFavourite', 'General.Enhance.ShipToEnhance', enhance_favourite_redirect),
|
|
|
|
|
# ('General.Retirement.EnhanceFilter', 'General.Enhance.Filter'),
|
|
|
|
|
# ('General.Retirement.EnhanceCheckPerCategory', 'General.Enhance.CheckPerCategory', enhance_check_redirect),
|
|
|
|
|
# ('General.Retirement.OldRetireN', 'General.OldRetire.N'),
|
|
|
|
|
# ('General.Retirement.OldRetireR', 'General.OldRetire.R'),
|
|
|
|
|
# ('General.Retirement.OldRetireSR', 'General.OldRetire.SR'),
|
|
|
|
|
# ('General.Retirement.OldRetireSSR', 'General.OldRetire.SSR'),
|
|
|
|
|
# (('GemsFarming.GemsFarming.FlagshipChange', 'GemsFarming.GemsFarming.FlagshipEquipChange'),
|
|
|
|
|
# 'GemsFarming.GemsFarming.ChangeFlagship',
|
|
|
|
|
# change_ship_redirect),
|
|
|
|
|
# (('GemsFarming.GemsFarming.VanguardChange', 'GemsFarming.GemsFarming.VanguardEquipChange'),
|
|
|
|
|
# 'GemsFarming.GemsFarming.ChangeVanguard',
|
|
|
|
|
# change_ship_redirect),
|
|
|
|
|
# ('Alas.DropRecord.API', 'Alas.DropRecord.API', api_redirect2)
|
2023-02-18 01:19:04 +08:00
|
|
|
|
]
|
2023-12-28 02:45:39 +08:00
|
|
|
|
# redirection += [
|
|
|
|
|
# (
|
|
|
|
|
# (f'{task}.Emotion.CalculateEmotion', f'{task}.Emotion.IgnoreLowEmotionWarn'),
|
|
|
|
|
# f'{task}.Emotion.Mode',
|
|
|
|
|
# emotion_mode_redirect
|
|
|
|
|
# ) for task in [
|
|
|
|
|
# 'Main', 'Main2', 'Main3', 'GemsFarming',
|
|
|
|
|
# 'Event', 'Event2', 'EventA', 'EventB', 'EventC', 'EventD', 'EventSp', 'Raid', 'RaidDaily',
|
|
|
|
|
# 'Sos', 'WarArchives',
|
|
|
|
|
# ]
|
|
|
|
|
# ]
|
2022-01-13 18:41:06 +08:00
|
|
|
|
|
2021-09-25 23:29:10 +08:00
|
|
|
|
@cached_property
|
|
|
|
|
def args(self):
|
|
|
|
|
return read_file(filepath_args())
|
|
|
|
|
|
2022-05-01 10:22:14 +08:00
|
|
|
|
def config_update(self, old, is_template=False):
|
2021-09-25 23:29:10 +08:00
|
|
|
|
"""
|
|
|
|
|
Args:
|
2022-05-01 10:22:14 +08:00
|
|
|
|
old (dict):
|
|
|
|
|
is_template (bool):
|
2021-09-25 23:29:10 +08:00
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
dict:
|
|
|
|
|
"""
|
|
|
|
|
new = {}
|
|
|
|
|
|
2025-03-17 02:08:19 +08:00
|
|
|
|
for keys, data in deep_iter(self.args, depth=3):
|
2021-09-25 23:29:10 +08:00
|
|
|
|
value = deep_get(old, keys=keys, default=data['value'])
|
2023-10-28 21:42:12 +08:00
|
|
|
|
typ = data['type']
|
|
|
|
|
display = data.get('display')
|
|
|
|
|
if is_template or value is None or value == '' \
|
|
|
|
|
or typ in ['lock', 'state'] or (display == 'hide' and typ != 'stored'):
|
2021-09-25 23:29:10 +08:00
|
|
|
|
value = data['value']
|
|
|
|
|
value = parse_value(value, data=data)
|
|
|
|
|
deep_set(new, keys=keys, value=value)
|
|
|
|
|
|
2021-10-02 04:11:59 +08:00
|
|
|
|
# AzurStatsID
|
|
|
|
|
if is_template:
|
2021-09-26 00:48:57 +08:00
|
|
|
|
deep_set(new, 'Alas.DropRecord.AzurStatsID', None)
|
|
|
|
|
else:
|
|
|
|
|
deep_default(new, 'Alas.DropRecord.AzurStatsID', random_id())
|
2021-10-02 04:11:59 +08:00
|
|
|
|
# Update to latest event
|
2022-04-19 00:17:22 +08:00
|
|
|
|
server = to_server(deep_get(new, 'Alas.Emulator.PackageName', 'cn'))
|
2021-10-02 04:11:59 +08:00
|
|
|
|
if not is_template:
|
2023-03-23 23:31:28 +08:00
|
|
|
|
for task in EVENTS + RAIDS + COALITIONS:
|
2021-10-03 22:14:30 +08:00
|
|
|
|
deep_set(new,
|
|
|
|
|
keys=f'{task}.Campaign.Event',
|
2022-04-19 00:17:22 +08:00
|
|
|
|
value=deep_get(self.args, f'{task}.Campaign.Event.{server}'))
|
2022-02-11 22:38:41 +08:00
|
|
|
|
for task in ['GemsFarming']:
|
|
|
|
|
if deep_get(new, keys=f'{task}.Campaign.Event', default='campaign_main') != 'campaign_main':
|
|
|
|
|
deep_set(new,
|
|
|
|
|
keys=f'{task}.Campaign.Event',
|
2022-04-19 00:17:22 +08:00
|
|
|
|
value=deep_get(self.args, f'{task}.Campaign.Event.{server}'))
|
2021-10-28 02:25:55 +08:00
|
|
|
|
# War archive does not allow campaign_main
|
2023-03-23 23:31:28 +08:00
|
|
|
|
for task in WAR_ARCHIVES:
|
2021-10-28 19:57:48 +08:00
|
|
|
|
if deep_get(new, keys=f'{task}.Campaign.Event', default='campaign_main') == 'campaign_main':
|
|
|
|
|
deep_set(new,
|
|
|
|
|
keys=f'{task}.Campaign.Event',
|
2022-04-19 00:17:22 +08:00
|
|
|
|
value=deep_get(self.args, f'{task}.Campaign.Event.{server}'))
|
2021-09-26 00:48:57 +08:00
|
|
|
|
|
2023-03-23 23:31:28 +08:00
|
|
|
|
# Events does not allow default stage 12-4
|
|
|
|
|
def default_stage(t, stage):
|
|
|
|
|
if deep_get(new, keys=f'{t}.Campaign.Name', default='12-4') in ['7-2', '12-4']:
|
|
|
|
|
deep_set(new, keys=f'{t}.Campaign.Name', value=stage)
|
|
|
|
|
|
|
|
|
|
for task in EVENTS + WAR_ARCHIVES:
|
|
|
|
|
default_stage(task, 'D3')
|
|
|
|
|
for task in COALITIONS:
|
|
|
|
|
default_stage(task, 'TC-3')
|
|
|
|
|
|
2022-01-13 18:41:06 +08:00
|
|
|
|
if not is_template:
|
|
|
|
|
new = self.config_redirect(old, new)
|
2023-08-28 17:16:24 +08:00
|
|
|
|
new = self._override(new)
|
2022-01-13 18:41:06 +08:00
|
|
|
|
|
|
|
|
|
return new
|
|
|
|
|
|
|
|
|
|
def config_redirect(self, old, new):
|
|
|
|
|
"""
|
|
|
|
|
Convert old settings to the new.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
old (dict):
|
|
|
|
|
new (dict):
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
dict:
|
|
|
|
|
"""
|
|
|
|
|
for row in self.redirection:
|
|
|
|
|
if len(row) == 2:
|
|
|
|
|
source, target = row
|
|
|
|
|
update_func = None
|
|
|
|
|
elif len(row) == 3:
|
|
|
|
|
source, target, update_func = row
|
|
|
|
|
else:
|
|
|
|
|
continue
|
|
|
|
|
|
2022-05-20 15:25:32 +08:00
|
|
|
|
if isinstance(source, tuple):
|
|
|
|
|
value = []
|
|
|
|
|
error = False
|
|
|
|
|
for attribute in source:
|
2023-03-10 20:28:10 +08:00
|
|
|
|
tmp = deep_get(old, keys=attribute)
|
2022-05-20 15:25:32 +08:00
|
|
|
|
if tmp is None:
|
|
|
|
|
error = True
|
|
|
|
|
continue
|
|
|
|
|
value.append(tmp)
|
|
|
|
|
if error:
|
|
|
|
|
continue
|
2022-01-13 18:41:06 +08:00
|
|
|
|
else:
|
2023-03-10 20:28:10 +08:00
|
|
|
|
value = deep_get(old, keys=source)
|
2022-05-20 15:25:32 +08:00
|
|
|
|
if value is None:
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
if update_func is not None:
|
|
|
|
|
value = update_func(value)
|
|
|
|
|
|
|
|
|
|
if isinstance(target, tuple):
|
2023-03-10 20:28:10 +08:00
|
|
|
|
for k, v in zip(target, value):
|
|
|
|
|
# Allow update same key
|
|
|
|
|
if (deep_get(old, keys=k) is None) or (source == target):
|
|
|
|
|
deep_set(new, keys=k, value=v)
|
|
|
|
|
elif (deep_get(old, keys=target) is None) or (source == target):
|
2022-05-20 15:25:32 +08:00
|
|
|
|
deep_set(new, keys=target, value=value)
|
2022-01-13 18:41:06 +08:00
|
|
|
|
|
2021-09-25 23:29:10 +08:00
|
|
|
|
return new
|
|
|
|
|
|
2023-08-28 17:16:24 +08:00
|
|
|
|
def _override(self, data):
|
|
|
|
|
def remove_drop_save(key):
|
|
|
|
|
value = deep_get(data, keys=key, default='do_not')
|
|
|
|
|
if value == 'save_and_upload':
|
|
|
|
|
value = 'upload'
|
|
|
|
|
deep_set(data, keys=key, value=value)
|
|
|
|
|
elif value == 'save':
|
|
|
|
|
value = 'do_not'
|
|
|
|
|
deep_set(data, keys=key, value=value)
|
|
|
|
|
|
|
|
|
|
if IS_ON_PHONE_CLOUD:
|
|
|
|
|
deep_set(data, 'Alas.Emulator.Serial', '127.0.0.1:5555')
|
|
|
|
|
deep_set(data, 'Alas.Emulator.ScreenshotMethod', 'DroidCast_raw')
|
|
|
|
|
deep_set(data, 'Alas.Emulator.ControlMethod', 'MaaTouch')
|
|
|
|
|
for arg in deep_get(self.args, keys='Alas.DropRecord', default={}).keys():
|
|
|
|
|
remove_drop_save(arg)
|
|
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
|
2023-11-03 03:34:23 +08:00
|
|
|
|
def save_callback(self, key: str, value: t.Any) -> t.Iterable[t.Tuple[str, t.Any]]:
|
|
|
|
|
"""
|
|
|
|
|
Args:
|
|
|
|
|
key: Key path in config json, such as "Main.Emotion.Fleet1Value"
|
|
|
|
|
value: Value set by user, such as "98"
|
|
|
|
|
|
|
|
|
|
Yields:
|
|
|
|
|
str: Key path to set config json, such as "Main.Emotion.Fleet1Record"
|
|
|
|
|
any: Value to set, such as "2020-01-01 00:00:00"
|
|
|
|
|
"""
|
|
|
|
|
if "Emotion" in key and "Value" in key:
|
|
|
|
|
key = key.split(".")
|
|
|
|
|
key[-1] = key[-1].replace("Value", "Record")
|
|
|
|
|
yield ".".join(key), datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
2024-04-08 04:32:33 +08:00
|
|
|
|
# Oh no, dynamic dropdown update can only be used on pywebio > 1.8.0
|
|
|
|
|
# elif key == 'Alas.Emulator.ScreenshotMethod' and value == 'nemu_ipc':
|
|
|
|
|
# yield 'Alas.Emulator.ControlMethod', 'nemu_ipc'
|
|
|
|
|
# elif key == 'Alas.Emulator.ControlMethod' and value == 'nemu_ipc':
|
|
|
|
|
# yield 'Alas.Emulator.ScreenshotMethod', 'nemu_ipc'
|
2023-11-03 03:34:23 +08:00
|
|
|
|
|
2022-06-06 23:42:24 +08:00
|
|
|
|
def read_file(self, config_name, is_template=False):
|
2022-05-01 10:22:14 +08:00
|
|
|
|
"""
|
|
|
|
|
Read and update config file.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
config_name (str): ./config/{file}.json
|
2022-06-06 23:42:24 +08:00
|
|
|
|
is_template (bool):
|
2022-05-01 10:22:14 +08:00
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
dict:
|
|
|
|
|
"""
|
|
|
|
|
old = read_file(filepath_config(config_name))
|
2023-03-10 20:28:10 +08:00
|
|
|
|
new = self.config_update(old, is_template=is_template)
|
|
|
|
|
# The updated config did not write into file, although it doesn't matters.
|
|
|
|
|
# Commented for performance issue
|
|
|
|
|
# self.write_file(config_name, new)
|
|
|
|
|
return new
|
2022-05-01 10:22:14 +08:00
|
|
|
|
|
|
|
|
|
@staticmethod
|
2022-09-01 22:13:47 +08:00
|
|
|
|
def write_file(config_name, data, mod_name='alas'):
|
2022-05-01 10:22:14 +08:00
|
|
|
|
"""
|
|
|
|
|
Write config file.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
config_name (str): ./config/{file}.json
|
|
|
|
|
data (dict):
|
2022-09-01 22:13:47 +08:00
|
|
|
|
mod_name (str):
|
2022-05-01 10:22:14 +08:00
|
|
|
|
"""
|
2022-09-01 22:13:47 +08:00
|
|
|
|
write_file(filepath_config(config_name, mod_name), data)
|
2022-05-01 10:22:14 +08:00
|
|
|
|
|
2021-09-25 23:29:10 +08:00
|
|
|
|
@timer
|
2022-06-06 23:42:24 +08:00
|
|
|
|
def update_file(self, config_name, is_template=False):
|
2022-05-01 10:22:14 +08:00
|
|
|
|
"""
|
|
|
|
|
Read, update and write config file.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
config_name (str): ./config/{file}.json
|
2022-06-06 23:42:24 +08:00
|
|
|
|
is_template (bool):
|
2022-05-01 10:22:14 +08:00
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
dict:
|
|
|
|
|
"""
|
2022-06-06 23:42:24 +08:00
|
|
|
|
data = self.read_file(config_name, is_template=is_template)
|
2022-05-01 10:22:14 +08:00
|
|
|
|
self.write_file(config_name, data)
|
2021-10-05 01:02:43 +08:00
|
|
|
|
return data
|
2021-09-25 23:29:10 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
|
"""
|
|
|
|
|
Process the whole config generation.
|
2022-01-22 18:01:49 -05:00
|
|
|
|
|
2021-09-27 18:13:57 +08:00
|
|
|
|
task.yaml -+----------------> menu.json
|
|
|
|
|
argument.yaml -+-> args.json ---> config_generated.py
|
2021-09-25 23:29:10 +08:00
|
|
|
|
override.yaml -+ |
|
2021-10-10 23:19:25 +08:00
|
|
|
|
gui.yaml --------\|
|
|
|
|
|
||
|
|
|
|
|
(old) i18n/<lang>.json --------\\========> i18n/<lang>.json
|
2021-09-25 23:29:10 +08:00
|
|
|
|
(old) template.json ---------\========> template.json
|
|
|
|
|
"""
|
2022-05-01 10:22:14 +08:00
|
|
|
|
# Ensure running in Alas root folder
|
|
|
|
|
import os
|
2023-03-23 23:31:28 +08:00
|
|
|
|
|
2022-05-01 10:22:14 +08:00
|
|
|
|
os.chdir(os.path.join(os.path.dirname(__file__), '../../'))
|
|
|
|
|
|
2021-09-25 23:29:10 +08:00
|
|
|
|
ConfigGenerator().generate()
|
2022-06-06 23:42:24 +08:00
|
|
|
|
ConfigUpdater().update_file('template', is_template=True)
|