mirror of
https://github.com/dcs-retribution/dcs-retribution.git
synced 2025-11-10 15:41:24 +00:00
Merge remote-tracking branch 'khopa/develop' into develop
This commit is contained in:
@@ -24,6 +24,10 @@ local unitPayloads = {
|
||||
["CLSID"] = "{M2KC_RPL_522}",
|
||||
["num"] = 5,
|
||||
},
|
||||
[6] = {
|
||||
["CLSID"] = "{Eclair}",
|
||||
["num"] = 10,
|
||||
},
|
||||
},
|
||||
["tasks"] = {
|
||||
[1] = 11,
|
||||
@@ -52,6 +56,10 @@ local unitPayloads = {
|
||||
["CLSID"] = "{M2KC_RPL_522}",
|
||||
["num"] = 5,
|
||||
},
|
||||
[6] = {
|
||||
["CLSID"] = "{Eclair}",
|
||||
["num"] = 10,
|
||||
},
|
||||
},
|
||||
["tasks"] = {
|
||||
[1] = 11,
|
||||
@@ -96,6 +104,10 @@ local unitPayloads = {
|
||||
["CLSID"] = "{BCE4E030-38E9-423E-98ED-24BE3DA87C32}",
|
||||
["num"] = 7,
|
||||
},
|
||||
[10] = {
|
||||
["CLSID"] = "{Eclair}",
|
||||
["num"] = 10,
|
||||
},
|
||||
},
|
||||
["tasks"] = {
|
||||
[1] = 11,
|
||||
@@ -140,6 +152,10 @@ local unitPayloads = {
|
||||
["CLSID"] = "{BCE4E030-38E9-423E-98ED-24BE3DA87C32}",
|
||||
["num"] = 7,
|
||||
},
|
||||
[10] = {
|
||||
["CLSID"] = "{Eclair}",
|
||||
["num"] = 10,
|
||||
},
|
||||
},
|
||||
["tasks"] = {
|
||||
[1] = 11,
|
||||
@@ -184,6 +200,10 @@ local unitPayloads = {
|
||||
["CLSID"] = "{BCE4E030-38E9-423E-98ED-24BE3DA87C32}",
|
||||
["num"] = 7,
|
||||
},
|
||||
[10] = {
|
||||
["CLSID"] = "{Eclair}",
|
||||
["num"] = 10,
|
||||
},
|
||||
},
|
||||
["tasks"] = {
|
||||
[1] = 11,
|
||||
|
||||
@@ -12,14 +12,17 @@ Terrain = Union[Caucasus, PersianGulf, Syria, Nevada, Normandy, TheChannel]
|
||||
|
||||
SAVE_PATH = Path("resources/frontlines")
|
||||
|
||||
|
||||
def validate_miz(file_path: Path) -> bool:
|
||||
return bool(file_path.suffix == ".miz" and file_path.exists())
|
||||
|
||||
|
||||
def validate_airports(airports: Tuple[int], terrain: Terrain):
|
||||
for airport in airports:
|
||||
if terrain.airport_by_id(airport) is None:
|
||||
print(f"Cannot load airport for invalid id {airport}")
|
||||
|
||||
|
||||
def load_files(files) -> List[Mission]:
|
||||
missions = []
|
||||
for file in files:
|
||||
@@ -31,26 +34,30 @@ def load_files(files) -> List[Mission]:
|
||||
print(f"Error: {file} doesn't look like a valid mission file.")
|
||||
return missions
|
||||
|
||||
|
||||
def create_frontline_dict(mission: Mission) -> Dict[str, Dict]:
|
||||
frontline_dict = {}
|
||||
for group in mission.country("USA").vehicle_group:
|
||||
groupname = str(group.name).replace(group.name.id, "").replace(":","")
|
||||
groupname = str(group.name).replace(group.name.id, "").replace(":", "")
|
||||
control_points = groupname.split("|")
|
||||
frontline_dict[groupname] = {
|
||||
"points": [(i.position.x, i.position.y) for i in group.points],
|
||||
"start_cp": int(control_points[0])
|
||||
}
|
||||
"start_cp": int(control_points[0]),
|
||||
}
|
||||
return frontline_dict
|
||||
|
||||
|
||||
def process_missions(missions: List[Mission]) -> None:
|
||||
for mission in missions:
|
||||
frontline_dict = create_frontline_dict(mission)
|
||||
write_json(frontline_dict, mission.terrain.name.lower())
|
||||
|
||||
|
||||
def write_json(frontline_dict: Dict[str, Dict], terrain_name: str) -> None:
|
||||
with open(SAVE_PATH.joinpath(terrain_name + ".json"), "w") as file:
|
||||
json.dump(frontline_dict, file)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Process a miz file to create json descriptions of multi-segment frontlines"
|
||||
@@ -69,6 +76,3 @@ if __name__ == "__main__":
|
||||
# frontline_dict = create_frontline_dict(missions[0])
|
||||
|
||||
print("Done")
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -13,7 +13,10 @@ def load_templates():
|
||||
|
||||
groups = {} # type: typing.Dict[str, typing.Dict[int, typing.List[Static]]]
|
||||
|
||||
for static_group in temp_mis.country("USA").static_group + temp_mis.country("USAF Aggressors").static_group:
|
||||
for static_group in (
|
||||
temp_mis.country("USA").static_group
|
||||
+ temp_mis.country("USAF Aggressors").static_group
|
||||
):
|
||||
for static in static_group.units:
|
||||
static_name = str(static.name).split()[0]
|
||||
tpl_name, tpl_idx = static_name[:-1], int(static_name[-1])
|
||||
@@ -34,13 +37,19 @@ def load_templates():
|
||||
a = aa
|
||||
b = bb
|
||||
|
||||
center = a.position.point_from_heading(a.position.heading_between_point(b.position), dist / 2)
|
||||
center = a.position.point_from_heading(
|
||||
a.position.heading_between_point(b.position), dist / 2
|
||||
)
|
||||
for static in static_groups:
|
||||
tpls[category_name][idx].append({
|
||||
"type": static.type,
|
||||
"offset": Point(center.x - static.position.x, center.y - static.position.y),
|
||||
"heading": static.heading,
|
||||
})
|
||||
tpls[category_name][idx].append(
|
||||
{
|
||||
"type": static.type,
|
||||
"offset": Point(
|
||||
center.x - static.position.x, center.y - static.position.y
|
||||
),
|
||||
"heading": static.heading,
|
||||
}
|
||||
)
|
||||
|
||||
tpls["aa"] = {0: [{"type": "AA", "offset": Point(0, 0), "heading": 0}]}
|
||||
return tpls
|
||||
|
||||
@@ -11,7 +11,8 @@ from game.theater.landmap import Landmap
|
||||
@singledispatch
|
||||
def to_multipoly(obj) -> MultiPolygon:
|
||||
raise NotImplementedError(
|
||||
f"to_multipoly not implemented for {obj.__class__.__name__}")
|
||||
f"to_multipoly not implemented for {obj.__class__.__name__}"
|
||||
)
|
||||
|
||||
|
||||
@to_multipoly.register
|
||||
@@ -28,8 +29,7 @@ def _multipoly_to_multipoly(obj: MultiPolygon) -> MultiPolygon:
|
||||
def _geometry_collection_to_multipoly(obj: GeometryCollection) -> MultiPolygon:
|
||||
if obj.is_empty:
|
||||
return MultiPolygon()
|
||||
raise RuntimeError(
|
||||
f"Not sure how to convert collection to multipoly: {obj.wkt}")
|
||||
raise RuntimeError(f"Not sure how to convert collection to multipoly: {obj.wkt}")
|
||||
|
||||
|
||||
for terrain in ["cau", "nev", "syria", "channel", "normandy", "gulf"]:
|
||||
@@ -61,6 +61,11 @@ for terrain in ["cau", "nev", "syria", "channel", "normandy", "gulf"]:
|
||||
|
||||
with open("../{}landmap.p".format(terrain), "wb") as f:
|
||||
print(len(inclusion_zones), len(exclusion_zones), len(seas_zones))
|
||||
pickle.dump(Landmap(to_multipoly(unary_union(inclusion_zones)),
|
||||
to_multipoly(unary_union(exclusion_zones)),
|
||||
to_multipoly(unary_union(seas_zones))), f)
|
||||
pickle.dump(
|
||||
Landmap(
|
||||
to_multipoly(unary_union(inclusion_zones)),
|
||||
to_multipoly(unary_union(exclusion_zones)),
|
||||
to_multipoly(unary_union(seas_zones)),
|
||||
),
|
||||
f,
|
||||
)
|
||||
|
||||
@@ -70,13 +70,19 @@ def beacons_from_terrain(dcs_path: Path, path: Path) -> Iterable[Beacon]:
|
||||
with cd(dcs_path):
|
||||
lua = lupa.LuaRuntime()
|
||||
|
||||
lua.execute(textwrap.dedent("""\
|
||||
lua.execute(
|
||||
textwrap.dedent(
|
||||
"""\
|
||||
function module(name)
|
||||
end
|
||||
|
||||
"""))
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
bind_gettext = lua.eval(textwrap.dedent("""\
|
||||
bind_gettext = lua.eval(
|
||||
textwrap.dedent(
|
||||
"""\
|
||||
function(py_gettext)
|
||||
package.preload["i_18n"] = function()
|
||||
return {
|
||||
@@ -85,16 +91,20 @@ def beacons_from_terrain(dcs_path: Path, path: Path) -> Iterable[Beacon]:
|
||||
end
|
||||
end
|
||||
|
||||
"""))
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
translator = gettext.translation(
|
||||
"messages", path / "l10n", languages=["en"])
|
||||
"messages", path / "l10n", languages=["en"]
|
||||
)
|
||||
|
||||
def translate(message_name: str) -> str:
|
||||
if not message_name:
|
||||
return message_name
|
||||
return translator.gettext(message_name)
|
||||
|
||||
except FileNotFoundError:
|
||||
# TheChannel has no locale data for English.
|
||||
def translate(message_name: str) -> str:
|
||||
@@ -126,7 +136,7 @@ def beacons_from_terrain(dcs_path: Path, path: Path) -> Iterable[Beacon]:
|
||||
beacon["callsign"],
|
||||
beacon_type,
|
||||
convert_lua_frequency(beacon["frequency"]),
|
||||
getattr(beacon, "channel", None)
|
||||
getattr(beacon, "channel", None),
|
||||
)
|
||||
|
||||
|
||||
@@ -152,9 +162,10 @@ class Importer:
|
||||
def export_beacons(self, terrain: str, beacons: Iterable[Beacon]) -> None:
|
||||
terrain_py_path = self.export_dir / f"{terrain.lower()}.json"
|
||||
import json
|
||||
terrain_py_path.write_text(json.dumps([
|
||||
dataclasses.asdict(b) for b in beacons
|
||||
], indent=True))
|
||||
|
||||
terrain_py_path.write_text(
|
||||
json.dumps([dataclasses.asdict(b) for b in beacons], indent=True)
|
||||
)
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
@@ -169,13 +180,14 @@ def parse_args() -> argparse.Namespace:
|
||||
"--export-to",
|
||||
type=resolved_path,
|
||||
default=EXPORT_DIR,
|
||||
help="Output directory for generated JSON files.")
|
||||
help="Output directory for generated JSON files.",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"dcs_path",
|
||||
metavar="DCS_PATH",
|
||||
type=resolved_path,
|
||||
help="Path to DCS installation."
|
||||
help="Path to DCS installation.",
|
||||
)
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
@@ -36,7 +36,13 @@ def _zip_dir(archieve, path):
|
||||
|
||||
|
||||
def _mk_archieve():
|
||||
path = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, "build", "dcs_liberation_{}.zip".format(VERSION))
|
||||
path = os.path.join(
|
||||
os.path.dirname(__file__),
|
||||
os.pardir,
|
||||
os.pardir,
|
||||
"build",
|
||||
"dcs_liberation_{}.zip".format(VERSION),
|
||||
)
|
||||
if os.path.exists(path):
|
||||
print("version already exists")
|
||||
return
|
||||
@@ -46,9 +52,9 @@ def _mk_archieve():
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
os.system("pyinstaller.exe --clean pyinstaller.spec")
|
||||
#archieve = ZipFile(path, "w")
|
||||
#archieve.writestr("dcs_liberation.bat", "cd dist\\dcs_liberation\r\nliberation_main \"%UserProfile%\\Saved Games\" \"{}\"".format(VERSION))
|
||||
#_zip_dir(archieve, "./dist/dcs_liberation")
|
||||
# archieve = ZipFile(path, "w")
|
||||
# archieve.writestr("dcs_liberation.bat", "cd dist\\dcs_liberation\r\nliberation_main \"%UserProfile%\\Saved Games\" \"{}\"".format(VERSION))
|
||||
# _zip_dir(archieve, "./dist/dcs_liberation")
|
||||
|
||||
|
||||
_mk_archieve()
|
||||
|
||||
Reference in New Issue
Block a user