mirror of
https://github.com/ValveSoftware/halflife.git
synced 2025-02-13 15:18:54 +03:00
2122 lines
121 KiB
Python
2122 lines
121 KiB
Python
|
# Copyright 2014-2016 The Meson development team
|
||
|
|
||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||
|
# you may not use this file except in compliance with the License.
|
||
|
# You may obtain a copy of the License at
|
||
|
|
||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||
|
|
||
|
# Unless required by applicable law or agreed to in writing, software
|
||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||
|
# See the License for the specific language governing permissions and
|
||
|
# limitations under the License.
|
||
|
|
||
|
from __future__ import annotations
|
||
|
import copy
|
||
|
import itertools
|
||
|
import os
|
||
|
import xml.dom.minidom
|
||
|
import xml.etree.ElementTree as ET
|
||
|
import uuid
|
||
|
import typing as T
|
||
|
from pathlib import Path, PurePath
|
||
|
import re
|
||
|
from collections import Counter
|
||
|
|
||
|
from . import backends
|
||
|
from .. import build
|
||
|
from .. import mlog
|
||
|
from .. import compilers
|
||
|
from .. import mesonlib
|
||
|
from ..mesonlib import (
|
||
|
File, MesonBugException, MesonException, replace_if_different, OptionKey, version_compare, MachineChoice
|
||
|
)
|
||
|
from ..environment import Environment, build_filename
|
||
|
from .. import coredata
|
||
|
|
||
|
if T.TYPE_CHECKING:
|
||
|
from ..arglist import CompilerArgs
|
||
|
from ..interpreter import Interpreter
|
||
|
|
||
|
Project = T.Tuple[str, Path, str, MachineChoice]
|
||
|
|
||
|
def autodetect_vs_version(build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]) -> backends.Backend:
|
||
|
vs_version = os.getenv('VisualStudioVersion', None)
|
||
|
vs_install_dir = os.getenv('VSINSTALLDIR', None)
|
||
|
if not vs_install_dir:
|
||
|
raise MesonException('Could not detect Visual Studio: Environment variable VSINSTALLDIR is not set!\n'
|
||
|
'Are you running meson from the Visual Studio Developer Command Prompt?')
|
||
|
# VisualStudioVersion is set since Visual Studio 11.0, but sometimes
|
||
|
# vcvarsall.bat doesn't set it, so also use VSINSTALLDIR
|
||
|
if vs_version == '11.0' or 'Visual Studio 11' in vs_install_dir:
|
||
|
from mesonbuild.backend.vs2012backend import Vs2012Backend
|
||
|
return Vs2012Backend(build, interpreter)
|
||
|
if vs_version == '12.0' or 'Visual Studio 12' in vs_install_dir:
|
||
|
from mesonbuild.backend.vs2013backend import Vs2013Backend
|
||
|
return Vs2013Backend(build, interpreter)
|
||
|
if vs_version == '14.0' or 'Visual Studio 14' in vs_install_dir:
|
||
|
from mesonbuild.backend.vs2015backend import Vs2015Backend
|
||
|
return Vs2015Backend(build, interpreter)
|
||
|
if vs_version == '15.0' or 'Visual Studio 17' in vs_install_dir or \
|
||
|
'Visual Studio\\2017' in vs_install_dir:
|
||
|
from mesonbuild.backend.vs2017backend import Vs2017Backend
|
||
|
return Vs2017Backend(build, interpreter)
|
||
|
if vs_version == '16.0' or 'Visual Studio 19' in vs_install_dir or \
|
||
|
'Visual Studio\\2019' in vs_install_dir:
|
||
|
from mesonbuild.backend.vs2019backend import Vs2019Backend
|
||
|
return Vs2019Backend(build, interpreter)
|
||
|
if vs_version == '17.0' or 'Visual Studio 22' in vs_install_dir or \
|
||
|
'Visual Studio\\2022' in vs_install_dir:
|
||
|
from mesonbuild.backend.vs2022backend import Vs2022Backend
|
||
|
return Vs2022Backend(build, interpreter)
|
||
|
if 'Visual Studio 10.0' in vs_install_dir:
|
||
|
return Vs2010Backend(build, interpreter)
|
||
|
raise MesonException('Could not detect Visual Studio using VisualStudioVersion: {!r} or VSINSTALLDIR: {!r}!\n'
|
||
|
'Please specify the exact backend to use.'.format(vs_version, vs_install_dir))
|
||
|
|
||
|
|
||
|
def split_o_flags_args(args: T.List[str]) -> T.List[str]:
|
||
|
"""
|
||
|
Splits any /O args and returns them. Does not take care of flags overriding
|
||
|
previous ones. Skips non-O flag arguments.
|
||
|
|
||
|
['/Ox', '/Ob1'] returns ['/Ox', '/Ob1']
|
||
|
['/Oxj', '/MP'] returns ['/Ox', '/Oj']
|
||
|
"""
|
||
|
o_flags = []
|
||
|
for arg in args:
|
||
|
if not arg.startswith('/O'):
|
||
|
continue
|
||
|
flags = list(arg[2:])
|
||
|
# Assume that this one can't be clumped with the others since it takes
|
||
|
# an argument itself
|
||
|
if 'b' in flags:
|
||
|
o_flags.append(arg)
|
||
|
else:
|
||
|
o_flags += ['/O' + f for f in flags]
|
||
|
return o_flags
|
||
|
|
||
|
def generate_guid_from_path(path, path_type) -> str:
|
||
|
return str(uuid.uuid5(uuid.NAMESPACE_URL, 'meson-vs-' + path_type + ':' + str(path))).upper()
|
||
|
|
||
|
def detect_microsoft_gdk(platform: str) -> bool:
|
||
|
return re.match(r'Gaming\.(Desktop|Xbox.XboxOne|Xbox.Scarlett)\.x64', platform, re.IGNORECASE)
|
||
|
|
||
|
def filtered_src_langs_generator(sources: T.List[str]):
|
||
|
for src in sources:
|
||
|
ext = src.split('.')[-1]
|
||
|
if compilers.compilers.is_source_suffix(ext):
|
||
|
yield compilers.compilers.SUFFIX_TO_LANG[ext]
|
||
|
|
||
|
# Returns the source language (i.e. a key from 'lang_suffixes') of the most frequent source language in the given
|
||
|
# list of sources.
|
||
|
# We choose the most frequent language as 'primary' because it means the most sources in a target/project can
|
||
|
# simply refer to the project's shared intellisense define and include fields, rather than have to fill out their
|
||
|
# own duplicate full set of defines/includes/opts intellisense fields. All of which helps keep the vcxproj file
|
||
|
# size down.
|
||
|
def get_primary_source_lang(target_sources: T.List[File], custom_sources: T.List[str]) -> T.Optional[str]:
|
||
|
lang_counts = Counter([compilers.compilers.SUFFIX_TO_LANG[src.suffix] for src in target_sources if compilers.compilers.is_source_suffix(src.suffix)])
|
||
|
lang_counts += Counter(filtered_src_langs_generator(custom_sources))
|
||
|
most_common_lang_list = lang_counts.most_common(1)
|
||
|
# It may be possible that we have a target with no actual src files of interest (e.g. a generator target),
|
||
|
# leaving us with an empty list, which we should handle -
|
||
|
return most_common_lang_list[0][0] if most_common_lang_list else None
|
||
|
|
||
|
# Returns a dictionary (by [src type][build type]) that contains a tuple of -
|
||
|
# (pre-processor defines, include paths, additional compiler options)
|
||
|
# fields to use to fill in the respective intellisense fields of sources that can't simply
|
||
|
# reference and re-use the shared 'primary' language intellisense fields of the vcxproj.
|
||
|
def get_non_primary_lang_intellisense_fields(vslite_ctx: dict,
|
||
|
target_id: str,
|
||
|
primary_src_lang: str) -> T.Dict[str, T.Dict[str, T.Tuple[str, str, str]]]:
|
||
|
defs_paths_opts_per_lang_and_buildtype = {}
|
||
|
for buildtype in coredata.get_genvs_default_buildtype_list():
|
||
|
captured_build_args = vslite_ctx[buildtype][target_id] # Results in a 'Src types to compile args' dict
|
||
|
non_primary_build_args_per_src_lang = [(lang, build_args) for lang, build_args in captured_build_args.items() if lang != primary_src_lang] # Only need to individually populate intellisense fields for sources of non-primary types.
|
||
|
for src_lang, args_list in non_primary_build_args_per_src_lang:
|
||
|
if src_lang not in defs_paths_opts_per_lang_and_buildtype:
|
||
|
defs_paths_opts_per_lang_and_buildtype[src_lang] = {}
|
||
|
defs = Vs2010Backend.extract_nmake_preprocessor_defs(args_list)
|
||
|
paths = Vs2010Backend.extract_nmake_include_paths(args_list)
|
||
|
opts = Vs2010Backend.extract_intellisense_additional_compiler_options(args_list)
|
||
|
defs_paths_opts_per_lang_and_buildtype[src_lang][buildtype] = (defs, paths, opts)
|
||
|
return defs_paths_opts_per_lang_and_buildtype
|
||
|
|
||
|
class Vs2010Backend(backends.Backend):
|
||
|
|
||
|
name = 'vs2010'
|
||
|
|
||
|
def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter], gen_lite: bool = False):
|
||
|
super().__init__(build, interpreter)
|
||
|
self.project_file_version = '10.0.30319.1'
|
||
|
self.sln_file_version = '11.00'
|
||
|
self.sln_version_comment = '2010'
|
||
|
self.platform_toolset = None
|
||
|
self.vs_version = '2010'
|
||
|
self.windows_target_platform_version = None
|
||
|
self.subdirs = {}
|
||
|
self.handled_target_deps = {}
|
||
|
self.gen_lite = gen_lite # Synonymous with generating the simpler makefile-style multi-config projects that invoke 'meson compile' builds, avoiding native MSBuild complications
|
||
|
|
||
|
def get_target_private_dir(self, target):
|
||
|
return os.path.join(self.get_target_dir(target), target.get_id())
|
||
|
|
||
|
def generate_genlist_for_target(self, genlist: T.Union[build.GeneratedList, build.CustomTarget, build.CustomTargetIndex], target: build.BuildTarget, parent_node: ET.Element, generator_output_files: T.List[str], custom_target_include_dirs: T.List[str], custom_target_output_files: T.List[str]) -> None:
|
||
|
if isinstance(genlist, build.GeneratedList):
|
||
|
for x in genlist.depends:
|
||
|
self.generate_genlist_for_target(x, target, parent_node, [], [], [])
|
||
|
target_private_dir = self.relpath(self.get_target_private_dir(target), self.get_target_dir(target))
|
||
|
down = self.target_to_build_root(target)
|
||
|
if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)):
|
||
|
for i in genlist.get_outputs():
|
||
|
# Path to the generated source from the current vcxproj dir via the build root
|
||
|
ipath = os.path.join(down, self.get_target_dir(genlist), i)
|
||
|
custom_target_output_files.append(ipath)
|
||
|
idir = self.relpath(self.get_target_dir(genlist), self.get_target_dir(target))
|
||
|
if idir not in custom_target_include_dirs:
|
||
|
custom_target_include_dirs.append(idir)
|
||
|
else:
|
||
|
generator = genlist.get_generator()
|
||
|
exe = generator.get_exe()
|
||
|
infilelist = genlist.get_inputs()
|
||
|
outfilelist = genlist.get_outputs()
|
||
|
source_dir = os.path.join(down, self.build_to_src, genlist.subdir)
|
||
|
idgroup = ET.SubElement(parent_node, 'ItemGroup')
|
||
|
samelen = len(infilelist) == len(outfilelist)
|
||
|
for i, curfile in enumerate(infilelist):
|
||
|
if samelen:
|
||
|
sole_output = os.path.join(target_private_dir, outfilelist[i])
|
||
|
else:
|
||
|
sole_output = ''
|
||
|
infilename = os.path.join(down, curfile.rel_to_builddir(self.build_to_src, target_private_dir))
|
||
|
deps = self.get_custom_target_depend_files(genlist, True)
|
||
|
base_args = generator.get_arglist(infilename)
|
||
|
outfiles_rel = genlist.get_outputs_for(curfile)
|
||
|
outfiles = [os.path.join(target_private_dir, of) for of in outfiles_rel]
|
||
|
generator_output_files += outfiles
|
||
|
args = [x.replace("@INPUT@", infilename).replace('@OUTPUT@', sole_output)
|
||
|
for x in base_args]
|
||
|
args = self.replace_outputs(args, target_private_dir, outfiles_rel)
|
||
|
args = [x.replace("@SOURCE_DIR@", self.environment.get_source_dir())
|
||
|
.replace("@BUILD_DIR@", target_private_dir)
|
||
|
for x in args]
|
||
|
args = [x.replace("@CURRENT_SOURCE_DIR@", source_dir) for x in args]
|
||
|
args = [x.replace("@SOURCE_ROOT@", self.environment.get_source_dir())
|
||
|
.replace("@BUILD_ROOT@", self.environment.get_build_dir())
|
||
|
for x in args]
|
||
|
args = [x.replace('\\', '/') for x in args]
|
||
|
# Always use a wrapper because MSBuild eats random characters when
|
||
|
# there are many arguments.
|
||
|
tdir_abs = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
|
||
|
cmd, _ = self.as_meson_exe_cmdline(
|
||
|
exe,
|
||
|
self.replace_extra_args(args, genlist),
|
||
|
workdir=tdir_abs,
|
||
|
capture=outfiles[0] if generator.capture else None,
|
||
|
force_serialize=True
|
||
|
)
|
||
|
deps = cmd[-1:] + deps
|
||
|
abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
|
||
|
os.makedirs(abs_pdir, exist_ok=True)
|
||
|
cbs = ET.SubElement(idgroup, 'CustomBuild', Include=infilename)
|
||
|
ET.SubElement(cbs, 'Command').text = ' '.join(self.quote_arguments(cmd))
|
||
|
ET.SubElement(cbs, 'Outputs').text = ';'.join(outfiles)
|
||
|
ET.SubElement(cbs, 'AdditionalInputs').text = ';'.join(deps)
|
||
|
|
||
|
def generate_custom_generator_commands(self, target, parent_node):
|
||
|
generator_output_files = []
|
||
|
custom_target_include_dirs = []
|
||
|
custom_target_output_files = []
|
||
|
for genlist in target.get_generated_sources():
|
||
|
self.generate_genlist_for_target(genlist, target, parent_node, generator_output_files, custom_target_include_dirs, custom_target_output_files)
|
||
|
return generator_output_files, custom_target_output_files, custom_target_include_dirs
|
||
|
|
||
|
def generate(self,
|
||
|
capture: bool = False,
|
||
|
vslite_ctx: dict = None) -> T.Optional[dict]:
|
||
|
# Check for (currently) unexpected capture arg use cases -
|
||
|
if capture:
|
||
|
raise MesonBugException('We do not expect any vs backend to generate with \'capture = True\'')
|
||
|
target_machine = self.interpreter.builtin['target_machine'].cpu_family_method(None, None)
|
||
|
if target_machine in {'64', 'x86_64'}:
|
||
|
# amd64 or x86_64
|
||
|
target_system = self.interpreter.builtin['target_machine'].system_method(None, None)
|
||
|
if detect_microsoft_gdk(target_system):
|
||
|
self.platform = target_system
|
||
|
else:
|
||
|
self.platform = 'x64'
|
||
|
elif target_machine == 'x86':
|
||
|
# x86
|
||
|
self.platform = 'Win32'
|
||
|
elif target_machine in {'aarch64', 'arm64'}:
|
||
|
target_cpu = self.interpreter.builtin['target_machine'].cpu_method(None, None)
|
||
|
if target_cpu == 'arm64ec':
|
||
|
self.platform = 'arm64ec'
|
||
|
else:
|
||
|
self.platform = 'arm64'
|
||
|
elif 'arm' in target_machine.lower():
|
||
|
self.platform = 'ARM'
|
||
|
else:
|
||
|
raise MesonException('Unsupported Visual Studio platform: ' + target_machine)
|
||
|
|
||
|
build_machine = self.interpreter.builtin['build_machine'].cpu_family_method(None, None)
|
||
|
if build_machine in {'64', 'x86_64'}:
|
||
|
# amd64 or x86_64
|
||
|
self.build_platform = 'x64'
|
||
|
elif build_machine == 'x86':
|
||
|
# x86
|
||
|
self.build_platform = 'Win32'
|
||
|
elif build_machine in {'aarch64', 'arm64'}:
|
||
|
target_cpu = self.interpreter.builtin['build_machine'].cpu_method(None, None)
|
||
|
if target_cpu == 'arm64ec':
|
||
|
self.build_platform = 'arm64ec'
|
||
|
else:
|
||
|
self.build_platform = 'arm64'
|
||
|
elif 'arm' in build_machine.lower():
|
||
|
self.build_platform = 'ARM'
|
||
|
else:
|
||
|
raise MesonException('Unsupported Visual Studio platform: ' + build_machine)
|
||
|
|
||
|
self.buildtype = self.environment.coredata.get_option(OptionKey('buildtype'))
|
||
|
self.optimization = self.environment.coredata.get_option(OptionKey('optimization'))
|
||
|
self.debug = self.environment.coredata.get_option(OptionKey('debug'))
|
||
|
try:
|
||
|
self.sanitize = self.environment.coredata.get_option(OptionKey('b_sanitize'))
|
||
|
except MesonException:
|
||
|
self.sanitize = 'none'
|
||
|
sln_filename = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.sln')
|
||
|
projlist = self.generate_projects(vslite_ctx)
|
||
|
self.gen_testproj()
|
||
|
self.gen_installproj()
|
||
|
self.gen_regenproj()
|
||
|
self.generate_solution(sln_filename, projlist)
|
||
|
self.generate_regen_info()
|
||
|
Vs2010Backend.touch_regen_timestamp(self.environment.get_build_dir())
|
||
|
|
||
|
@staticmethod
|
||
|
def get_regen_stampfile(build_dir: str) -> None:
|
||
|
return os.path.join(os.path.join(build_dir, Environment.private_dir), 'regen.stamp')
|
||
|
|
||
|
@staticmethod
|
||
|
def touch_regen_timestamp(build_dir: str) -> None:
|
||
|
with open(Vs2010Backend.get_regen_stampfile(build_dir), 'w', encoding='utf-8'):
|
||
|
pass
|
||
|
|
||
|
def get_vcvars_command(self):
|
||
|
has_arch_values = 'VSCMD_ARG_TGT_ARCH' in os.environ and 'VSCMD_ARG_HOST_ARCH' in os.environ
|
||
|
|
||
|
# Use vcvarsall.bat if we found it.
|
||
|
if 'VCINSTALLDIR' in os.environ:
|
||
|
vs_version = os.environ['VisualStudioVersion'] \
|
||
|
if 'VisualStudioVersion' in os.environ else None
|
||
|
relative_path = 'Auxiliary\\Build\\' if vs_version is not None and vs_version >= '15.0' else ''
|
||
|
script_path = os.environ['VCINSTALLDIR'] + relative_path + 'vcvarsall.bat'
|
||
|
if os.path.exists(script_path):
|
||
|
if has_arch_values:
|
||
|
target_arch = os.environ['VSCMD_ARG_TGT_ARCH']
|
||
|
host_arch = os.environ['VSCMD_ARG_HOST_ARCH']
|
||
|
else:
|
||
|
target_arch = os.environ.get('Platform', 'x86')
|
||
|
host_arch = target_arch
|
||
|
arch = host_arch + '_' + target_arch if host_arch != target_arch else target_arch
|
||
|
return f'"{script_path}" {arch}'
|
||
|
|
||
|
# Otherwise try the VS2017 Developer Command Prompt.
|
||
|
if 'VS150COMNTOOLS' in os.environ and has_arch_values:
|
||
|
script_path = os.environ['VS150COMNTOOLS'] + 'VsDevCmd.bat'
|
||
|
if os.path.exists(script_path):
|
||
|
return '"%s" -arch=%s -host_arch=%s' % \
|
||
|
(script_path, os.environ['VSCMD_ARG_TGT_ARCH'], os.environ['VSCMD_ARG_HOST_ARCH'])
|
||
|
return ''
|
||
|
|
||
|
def get_obj_target_deps(self, obj_list):
|
||
|
result = {}
|
||
|
for o in obj_list:
|
||
|
if isinstance(o, build.ExtractedObjects):
|
||
|
result[o.target.get_id()] = o.target
|
||
|
return result.items()
|
||
|
|
||
|
def get_target_deps(self, t: T.Dict[T.Any, build.Target], recursive=False):
|
||
|
all_deps: T.Dict[str, build.Target] = {}
|
||
|
for target in t.values():
|
||
|
if isinstance(target, build.CustomTarget):
|
||
|
for d in target.get_target_dependencies():
|
||
|
# FIXME: this isn't strictly correct, as the target doesn't
|
||
|
# Get dependencies on non-targets, such as Files
|
||
|
if isinstance(d, build.Target):
|
||
|
all_deps[d.get_id()] = d
|
||
|
elif isinstance(target, build.RunTarget):
|
||
|
for d in target.get_dependencies():
|
||
|
all_deps[d.get_id()] = d
|
||
|
elif isinstance(target, build.BuildTarget):
|
||
|
for ldep in target.link_targets:
|
||
|
if isinstance(ldep, build.CustomTargetIndex):
|
||
|
all_deps[ldep.get_id()] = ldep.target
|
||
|
else:
|
||
|
all_deps[ldep.get_id()] = ldep
|
||
|
for ldep in target.link_whole_targets:
|
||
|
if isinstance(ldep, build.CustomTargetIndex):
|
||
|
all_deps[ldep.get_id()] = ldep.target
|
||
|
else:
|
||
|
all_deps[ldep.get_id()] = ldep
|
||
|
|
||
|
for ldep in target.link_depends:
|
||
|
if isinstance(ldep, build.CustomTargetIndex):
|
||
|
all_deps[ldep.get_id()] = ldep.target
|
||
|
elif isinstance(ldep, File):
|
||
|
# Already built, no target references needed
|
||
|
pass
|
||
|
else:
|
||
|
all_deps[ldep.get_id()] = ldep
|
||
|
|
||
|
for obj_id, objdep in self.get_obj_target_deps(target.objects):
|
||
|
all_deps[obj_id] = objdep
|
||
|
else:
|
||
|
raise MesonException(f'Unknown target type for target {target}')
|
||
|
|
||
|
for gendep in target.get_generated_sources():
|
||
|
if isinstance(gendep, build.CustomTarget):
|
||
|
all_deps[gendep.get_id()] = gendep
|
||
|
elif isinstance(gendep, build.CustomTargetIndex):
|
||
|
all_deps[gendep.target.get_id()] = gendep.target
|
||
|
else:
|
||
|
generator = gendep.get_generator()
|
||
|
gen_exe = generator.get_exe()
|
||
|
if isinstance(gen_exe, build.Executable):
|
||
|
all_deps[gen_exe.get_id()] = gen_exe
|
||
|
for d in itertools.chain(generator.depends, gendep.depends):
|
||
|
if isinstance(d, build.CustomTargetIndex):
|
||
|
all_deps[d.get_id()] = d.target
|
||
|
elif isinstance(d, build.Target):
|
||
|
all_deps[d.get_id()] = d
|
||
|
# FIXME: we don't handle other kinds of deps correctly here, such
|
||
|
# as GeneratedLists, StructuredSources, and generated File.
|
||
|
|
||
|
if not t or not recursive:
|
||
|
return all_deps
|
||
|
ret = self.get_target_deps(all_deps, recursive)
|
||
|
ret.update(all_deps)
|
||
|
return ret
|
||
|
|
||
|
def generate_solution_dirs(self, ofile: str, parents: T.Sequence[Path]) -> None:
|
||
|
prj_templ = 'Project("{%s}") = "%s", "%s", "{%s}"\n'
|
||
|
iterpaths = reversed(parents)
|
||
|
# Skip first path
|
||
|
next(iterpaths)
|
||
|
for path in iterpaths:
|
||
|
if path not in self.subdirs:
|
||
|
basename = path.name
|
||
|
identifier = generate_guid_from_path(path, 'subdir')
|
||
|
# top-level directories have None as their parent_dir
|
||
|
parent_dir = path.parent
|
||
|
parent_identifier = self.subdirs[parent_dir][0] \
|
||
|
if parent_dir != PurePath('.') else None
|
||
|
self.subdirs[path] = (identifier, parent_identifier)
|
||
|
prj_line = prj_templ % (
|
||
|
self.environment.coredata.lang_guids['directory'],
|
||
|
basename, basename, self.subdirs[path][0])
|
||
|
ofile.write(prj_line)
|
||
|
ofile.write('EndProject\n')
|
||
|
|
||
|
def generate_solution(self, sln_filename: str, projlist: T.List[Project]) -> None:
|
||
|
default_projlist = self.get_build_by_default_targets()
|
||
|
default_projlist.update(self.get_testlike_targets())
|
||
|
sln_filename_tmp = sln_filename + '~'
|
||
|
# Note using the utf-8 BOM requires the blank line, otherwise Visual Studio Version Selector fails.
|
||
|
# Without the BOM, VSVS fails if there is a blank line.
|
||
|
with open(sln_filename_tmp, 'w', encoding='utf-8-sig') as ofile:
|
||
|
ofile.write('\nMicrosoft Visual Studio Solution File, Format Version %s\n' % self.sln_file_version)
|
||
|
ofile.write('# Visual Studio %s\n' % self.sln_version_comment)
|
||
|
prj_templ = 'Project("{%s}") = "%s", "%s", "{%s}"\n'
|
||
|
for prj in projlist:
|
||
|
if self.environment.coredata.get_option(OptionKey('layout')) == 'mirror':
|
||
|
self.generate_solution_dirs(ofile, prj[1].parents)
|
||
|
target = self.build.targets[prj[0]]
|
||
|
lang = 'default'
|
||
|
if hasattr(target, 'compilers') and target.compilers:
|
||
|
for lang_out in target.compilers.keys():
|
||
|
lang = lang_out
|
||
|
break
|
||
|
prj_line = prj_templ % (
|
||
|
self.environment.coredata.lang_guids[lang],
|
||
|
prj[0], prj[1], prj[2])
|
||
|
ofile.write(prj_line)
|
||
|
target_dict = {target.get_id(): target}
|
||
|
# Get recursive deps
|
||
|
recursive_deps = self.get_target_deps(
|
||
|
target_dict, recursive=True)
|
||
|
ofile.write('EndProject\n')
|
||
|
for dep, target in recursive_deps.items():
|
||
|
if prj[0] in default_projlist:
|
||
|
default_projlist[dep] = target
|
||
|
|
||
|
test_line = prj_templ % (self.environment.coredata.lang_guids['default'],
|
||
|
'RUN_TESTS', 'RUN_TESTS.vcxproj',
|
||
|
self.environment.coredata.test_guid)
|
||
|
ofile.write(test_line)
|
||
|
ofile.write('EndProject\n')
|
||
|
if self.gen_lite: # REGEN is replaced by the lighter-weight RECONFIGURE utility, for now. See comment in 'gen_regenproj'
|
||
|
regen_proj_name = 'RECONFIGURE'
|
||
|
regen_proj_fname = 'RECONFIGURE.vcxproj'
|
||
|
else:
|
||
|
regen_proj_name = 'REGEN'
|
||
|
regen_proj_fname = 'REGEN.vcxproj'
|
||
|
regen_line = prj_templ % (self.environment.coredata.lang_guids['default'],
|
||
|
regen_proj_name, regen_proj_fname,
|
||
|
self.environment.coredata.regen_guid)
|
||
|
ofile.write(regen_line)
|
||
|
ofile.write('EndProject\n')
|
||
|
install_line = prj_templ % (self.environment.coredata.lang_guids['default'],
|
||
|
'RUN_INSTALL', 'RUN_INSTALL.vcxproj',
|
||
|
self.environment.coredata.install_guid)
|
||
|
ofile.write(install_line)
|
||
|
ofile.write('EndProject\n')
|
||
|
ofile.write('Global\n')
|
||
|
ofile.write('\tGlobalSection(SolutionConfigurationPlatforms) = '
|
||
|
'preSolution\n')
|
||
|
multi_config_buildtype_list = coredata.get_genvs_default_buildtype_list() if self.gen_lite else [self.buildtype]
|
||
|
for buildtype in multi_config_buildtype_list:
|
||
|
ofile.write('\t\t%s|%s = %s|%s\n' %
|
||
|
(buildtype, self.platform, buildtype,
|
||
|
self.platform))
|
||
|
ofile.write('\tEndGlobalSection\n')
|
||
|
ofile.write('\tGlobalSection(ProjectConfigurationPlatforms) = '
|
||
|
'postSolution\n')
|
||
|
# REGEN project (multi-)configurations
|
||
|
for buildtype in multi_config_buildtype_list:
|
||
|
ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
|
||
|
(self.environment.coredata.regen_guid, buildtype,
|
||
|
self.platform, buildtype, self.platform))
|
||
|
if not self.gen_lite: # With a 'genvslite'-generated solution, the regen (i.e. reconfigure) utility is only intended to run when the user explicitly builds this proj.
|
||
|
ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' %
|
||
|
(self.environment.coredata.regen_guid, buildtype,
|
||
|
self.platform, buildtype, self.platform))
|
||
|
# Create the solution configuration
|
||
|
for p in projlist:
|
||
|
if p[3] is MachineChoice.BUILD:
|
||
|
config_platform = self.build_platform
|
||
|
else:
|
||
|
config_platform = self.platform
|
||
|
# Add to the list of projects in this solution
|
||
|
for buildtype in multi_config_buildtype_list:
|
||
|
ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
|
||
|
(p[2], buildtype, self.platform,
|
||
|
buildtype, config_platform))
|
||
|
# If we're building the solution with Visual Studio's build system, enable building of buildable
|
||
|
# projects. However, if we're building with meson (via --genvslite), then, since each project's
|
||
|
# 'build' action just ends up doing the same 'meson compile ...' we don't want the 'solution build'
|
||
|
# repeatedly going off and doing the same 'meson compile ...' multiple times over, so we just
|
||
|
# leave it up to the user to select or build just one project.
|
||
|
# FIXME: Would be slightly nicer if we could enable building of just one top level target/project,
|
||
|
# but not sure how to identify that.
|
||
|
if not self.gen_lite and \
|
||
|
p[0] in default_projlist and \
|
||
|
not isinstance(self.build.targets[p[0]], build.RunTarget):
|
||
|
ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' %
|
||
|
(p[2], buildtype, self.platform,
|
||
|
buildtype, config_platform))
|
||
|
# RUN_TESTS and RUN_INSTALL project (multi-)configurations
|
||
|
for buildtype in multi_config_buildtype_list:
|
||
|
ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
|
||
|
(self.environment.coredata.test_guid, buildtype,
|
||
|
self.platform, buildtype, self.platform))
|
||
|
ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' %
|
||
|
(self.environment.coredata.install_guid, buildtype,
|
||
|
self.platform, buildtype, self.platform))
|
||
|
ofile.write('\tEndGlobalSection\n')
|
||
|
ofile.write('\tGlobalSection(SolutionProperties) = preSolution\n')
|
||
|
ofile.write('\t\tHideSolutionNode = FALSE\n')
|
||
|
ofile.write('\tEndGlobalSection\n')
|
||
|
if self.subdirs:
|
||
|
ofile.write('\tGlobalSection(NestedProjects) = '
|
||
|
'preSolution\n')
|
||
|
for p in projlist:
|
||
|
if p[1].parent != PurePath('.'):
|
||
|
ofile.write("\t\t{{{}}} = {{{}}}\n".format(p[2], self.subdirs[p[1].parent][0]))
|
||
|
for subdir in self.subdirs.values():
|
||
|
if subdir[1]:
|
||
|
ofile.write("\t\t{{{}}} = {{{}}}\n".format(subdir[0], subdir[1]))
|
||
|
ofile.write('\tEndGlobalSection\n')
|
||
|
ofile.write('EndGlobal\n')
|
||
|
replace_if_different(sln_filename, sln_filename_tmp)
|
||
|
|
||
|
def generate_projects(self, vslite_ctx: dict = None) -> T.List[Project]:
|
||
|
startup_project = self.environment.coredata.options[OptionKey('backend_startup_project')].value
|
||
|
projlist: T.List[Project] = []
|
||
|
startup_idx = 0
|
||
|
for (i, (name, target)) in enumerate(self.build.targets.items()):
|
||
|
if startup_project and startup_project == target.get_basename():
|
||
|
startup_idx = i
|
||
|
outdir = Path(
|
||
|
self.environment.get_build_dir(),
|
||
|
self.get_target_dir(target)
|
||
|
)
|
||
|
outdir.mkdir(exist_ok=True, parents=True)
|
||
|
fname = target.get_basename() + '.vcxproj'
|
||
|
target_dir = PurePath(self.get_target_dir(target))
|
||
|
relname = target_dir / fname
|
||
|
projfile_path = outdir / fname
|
||
|
proj_uuid = self.environment.coredata.target_guids[name]
|
||
|
generated = self.gen_vcxproj(target, str(projfile_path), proj_uuid, vslite_ctx)
|
||
|
if generated:
|
||
|
projlist.append((name, relname, proj_uuid, target.for_machine))
|
||
|
|
||
|
# Put the startup project first in the project list
|
||
|
if startup_idx:
|
||
|
projlist.insert(0, projlist.pop(startup_idx))
|
||
|
|
||
|
return projlist
|
||
|
|
||
|
def split_sources(self, srclist):
|
||
|
sources = []
|
||
|
headers = []
|
||
|
objects = []
|
||
|
languages = []
|
||
|
for i in srclist:
|
||
|
if self.environment.is_header(i):
|
||
|
headers.append(i)
|
||
|
elif self.environment.is_object(i):
|
||
|
objects.append(i)
|
||
|
elif self.environment.is_source(i):
|
||
|
sources.append(i)
|
||
|
lang = self.lang_from_source_file(i)
|
||
|
if lang not in languages:
|
||
|
languages.append(lang)
|
||
|
elif self.environment.is_library(i):
|
||
|
pass
|
||
|
else:
|
||
|
# Everything that is not an object or source file is considered a header.
|
||
|
headers.append(i)
|
||
|
return sources, headers, objects, languages
|
||
|
|
||
|
def target_to_build_root(self, target):
|
||
|
if self.get_target_dir(target) == '':
|
||
|
return ''
|
||
|
|
||
|
directories = os.path.normpath(self.get_target_dir(target)).split(os.sep)
|
||
|
return os.sep.join(['..'] * len(directories))
|
||
|
|
||
|
def quote_arguments(self, arr):
|
||
|
return ['"%s"' % i for i in arr]
|
||
|
|
||
|
def add_project_reference(self, root: ET.Element, include: str, projid: str, link_outputs: bool = False) -> None:
|
||
|
ig = ET.SubElement(root, 'ItemGroup')
|
||
|
pref = ET.SubElement(ig, 'ProjectReference', Include=include)
|
||
|
ET.SubElement(pref, 'Project').text = '{%s}' % projid
|
||
|
if not link_outputs:
|
||
|
# Do not link in generated .lib files from dependencies automatically.
|
||
|
# We only use the dependencies for ordering and link in the generated
|
||
|
# objects and .lib files manually.
|
||
|
ET.SubElement(pref, 'LinkLibraryDependencies').text = 'false'
|
||
|
|
||
|
def add_target_deps(self, root: ET.Element, target):
|
||
|
target_dict = {target.get_id(): target}
|
||
|
for dep in self.get_target_deps(target_dict).values():
|
||
|
if dep.get_id() in self.handled_target_deps[target.get_id()]:
|
||
|
# This dependency was already handled manually.
|
||
|
continue
|
||
|
relpath = self.get_target_dir_relative_to(dep, target)
|
||
|
vcxproj = os.path.join(relpath, dep.get_id() + '.vcxproj')
|
||
|
tid = self.environment.coredata.target_guids[dep.get_id()]
|
||
|
self.add_project_reference(root, vcxproj, tid)
|
||
|
|
||
|
def create_basic_project_filters(self) -> ET.Element:
|
||
|
root = ET.Element('Project', {'ToolsVersion': '4.0',
|
||
|
'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
|
||
|
return root
|
||
|
|
||
|
def create_basic_project(self, target_name, *,
|
||
|
temp_dir,
|
||
|
guid,
|
||
|
conftype='Utility',
|
||
|
target_ext=None,
|
||
|
target_platform=None) -> T.Tuple[ET.Element, ET.Element]:
|
||
|
root = ET.Element('Project', {'DefaultTargets': "Build",
|
||
|
'ToolsVersion': '4.0',
|
||
|
'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'})
|
||
|
|
||
|
confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'})
|
||
|
if not target_platform:
|
||
|
target_platform = self.platform
|
||
|
|
||
|
multi_config_buildtype_list = coredata.get_genvs_default_buildtype_list() if self.gen_lite else [self.buildtype]
|
||
|
for buildtype in multi_config_buildtype_list:
|
||
|
prjconf = ET.SubElement(confitems, 'ProjectConfiguration',
|
||
|
{'Include': buildtype + '|' + target_platform})
|
||
|
ET.SubElement(prjconf, 'Configuration').text = buildtype
|
||
|
ET.SubElement(prjconf, 'Platform').text = target_platform
|
||
|
|
||
|
# Globals
|
||
|
globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals')
|
||
|
guidelem = ET.SubElement(globalgroup, 'ProjectGuid')
|
||
|
guidelem.text = '{%s}' % guid
|
||
|
kw = ET.SubElement(globalgroup, 'Keyword')
|
||
|
kw.text = self.platform + 'Proj'
|
||
|
|
||
|
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props')
|
||
|
|
||
|
# Configuration
|
||
|
type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration')
|
||
|
ET.SubElement(type_config, 'ConfigurationType').text = conftype
|
||
|
if self.platform_toolset:
|
||
|
ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset
|
||
|
|
||
|
# This must come AFTER the '<PropertyGroup Label="Configuration">' element; importing before the 'PlatformToolset' elt
|
||
|
# gets set leads to msbuild failures reporting -
|
||
|
# "The build tools for v142 (Platform Toolset = 'v142') cannot be found. ... please install v142 build tools."
|
||
|
# This is extremely unhelpful and misleading since the v14x build tools ARE installed.
|
||
|
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props')
|
||
|
|
||
|
if not self.gen_lite: # Plenty of elements aren't necessary for 'makefile'-style project that just redirects to meson builds
|
||
|
# XXX Wasn't here before for anything but gen_vcxproj , but seems fine?
|
||
|
ns = ET.SubElement(globalgroup, 'RootNamespace')
|
||
|
ns.text = target_name
|
||
|
|
||
|
p = ET.SubElement(globalgroup, 'Platform')
|
||
|
p.text = target_platform
|
||
|
pname = ET.SubElement(globalgroup, 'ProjectName')
|
||
|
pname.text = target_name
|
||
|
if self.windows_target_platform_version:
|
||
|
ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version
|
||
|
ET.SubElement(globalgroup, 'UseMultiToolTask').text = 'true'
|
||
|
|
||
|
ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte'
|
||
|
# Fixme: wasn't here before for gen_vcxproj()
|
||
|
ET.SubElement(type_config, 'UseOfMfc').text = 'false'
|
||
|
|
||
|
# Project information
|
||
|
direlem = ET.SubElement(root, 'PropertyGroup')
|
||
|
fver = ET.SubElement(direlem, '_ProjectFileVersion')
|
||
|
fver.text = self.project_file_version
|
||
|
outdir = ET.SubElement(direlem, 'OutDir')
|
||
|
outdir.text = '.\\'
|
||
|
intdir = ET.SubElement(direlem, 'IntDir')
|
||
|
intdir.text = temp_dir + '\\'
|
||
|
|
||
|
tname = ET.SubElement(direlem, 'TargetName')
|
||
|
tname.text = target_name
|
||
|
|
||
|
if target_ext:
|
||
|
ET.SubElement(direlem, 'TargetExt').text = target_ext
|
||
|
|
||
|
return (root, type_config)
|
||
|
|
||
|
def gen_run_target_vcxproj(self, target: build.RunTarget, ofname: str, guid: str) -> None:
|
||
|
(root, type_config) = self.create_basic_project(target.name,
|
||
|
temp_dir=target.get_id(),
|
||
|
guid=guid)
|
||
|
depend_files = self.get_custom_target_depend_files(target)
|
||
|
|
||
|
if not target.command:
|
||
|
# This is an alias target and thus doesn't run any command. It's
|
||
|
# enough to emit the references to the other projects for them to
|
||
|
# be built/run/..., if necessary.
|
||
|
assert isinstance(target, build.AliasTarget)
|
||
|
assert len(depend_files) == 0
|
||
|
else:
|
||
|
assert not isinstance(target, build.AliasTarget)
|
||
|
|
||
|
target_env = self.get_run_target_env(target)
|
||
|
_, _, cmd_raw = self.eval_custom_target_command(target)
|
||
|
wrapper_cmd, _ = self.as_meson_exe_cmdline(target.command[0], cmd_raw[1:],
|
||
|
force_serialize=True, env=target_env,
|
||
|
verbose=True)
|
||
|
self.add_custom_build(root, 'run_target', ' '.join(self.quote_arguments(wrapper_cmd)),
|
||
|
deps=depend_files)
|
||
|
|
||
|
# The import is needed even for alias targets, otherwise the build
|
||
|
# target isn't defined
|
||
|
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
|
||
|
self.add_regen_dependency(root)
|
||
|
self.add_target_deps(root, target)
|
||
|
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
|
||
|
|
||
|
def gen_custom_target_vcxproj(self, target: build.CustomTarget, ofname: str, guid: str) -> None:
|
||
|
if target.for_machine is MachineChoice.BUILD:
|
||
|
platform = self.build_platform
|
||
|
else:
|
||
|
platform = self.platform
|
||
|
(root, type_config) = self.create_basic_project(target.name,
|
||
|
temp_dir=target.get_id(),
|
||
|
guid=guid,
|
||
|
target_platform=platform)
|
||
|
# We need to always use absolute paths because our invocation is always
|
||
|
# from the target dir, not the build root.
|
||
|
target.absolute_paths = True
|
||
|
(srcs, ofilenames, cmd) = self.eval_custom_target_command(target, True)
|
||
|
depend_files = self.get_custom_target_depend_files(target, True)
|
||
|
# Always use a wrapper because MSBuild eats random characters when
|
||
|
# there are many arguments.
|
||
|
tdir_abs = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target))
|
||
|
extra_bdeps = target.get_transitive_build_target_deps()
|
||
|
wrapper_cmd, _ = self.as_meson_exe_cmdline(target.command[0], cmd[1:],
|
||
|
# All targets run from the target dir
|
||
|
workdir=tdir_abs,
|
||
|
extra_bdeps=extra_bdeps,
|
||
|
capture=ofilenames[0] if target.capture else None,
|
||
|
feed=srcs[0] if target.feed else None,
|
||
|
force_serialize=True,
|
||
|
env=target.env,
|
||
|
verbose=target.console)
|
||
|
if target.build_always_stale:
|
||
|
# Use a nonexistent file to always consider the target out-of-date.
|
||
|
ofilenames += [self.nonexistent_file(os.path.join(self.environment.get_scratch_dir(),
|
||
|
'outofdate.file'))]
|
||
|
self.add_custom_build(root, 'custom_target', ' '.join(self.quote_arguments(wrapper_cmd)),
|
||
|
deps=wrapper_cmd[-1:] + srcs + depend_files, outputs=ofilenames,
|
||
|
verify_files=not target.build_always_stale)
|
||
|
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
|
||
|
self.generate_custom_generator_commands(target, root)
|
||
|
self.add_regen_dependency(root)
|
||
|
self.add_target_deps(root, target)
|
||
|
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
|
||
|
|
||
|
def gen_compile_target_vcxproj(self, target: build.CompileTarget, ofname: str, guid: str) -> None:
|
||
|
if target.for_machine is MachineChoice.BUILD:
|
||
|
platform = self.build_platform
|
||
|
else:
|
||
|
platform = self.platform
|
||
|
(root, type_config) = self.create_basic_project(target.name,
|
||
|
temp_dir=target.get_id(),
|
||
|
guid=guid,
|
||
|
target_platform=platform)
|
||
|
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
|
||
|
target.generated = [self.compile_target_to_generator(target)]
|
||
|
target.sources = []
|
||
|
self.generate_custom_generator_commands(target, root)
|
||
|
self.add_regen_dependency(root)
|
||
|
self.add_target_deps(root, target)
|
||
|
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
|
||
|
|
||
|
@classmethod
|
||
|
def lang_from_source_file(cls, src):
|
||
|
ext = src.split('.')[-1]
|
||
|
if ext in compilers.c_suffixes:
|
||
|
return 'c'
|
||
|
if ext in compilers.cpp_suffixes:
|
||
|
return 'cpp'
|
||
|
raise MesonException(f'Could not guess language from source file {src}.')
|
||
|
|
||
|
def add_pch(self, pch_sources, lang, inc_cl):
|
||
|
if lang in pch_sources:
|
||
|
self.use_pch(pch_sources, lang, inc_cl)
|
||
|
|
||
|
def create_pch(self, pch_sources, lang, inc_cl):
|
||
|
pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
|
||
|
pch.text = 'Create'
|
||
|
self.add_pch_files(pch_sources, lang, inc_cl)
|
||
|
|
||
|
def use_pch(self, pch_sources, lang, inc_cl):
|
||
|
pch = ET.SubElement(inc_cl, 'PrecompiledHeader')
|
||
|
pch.text = 'Use'
|
||
|
header = self.add_pch_files(pch_sources, lang, inc_cl)
|
||
|
pch_include = ET.SubElement(inc_cl, 'ForcedIncludeFiles')
|
||
|
pch_include.text = header + ';%(ForcedIncludeFiles)'
|
||
|
|
||
|
def add_pch_files(self, pch_sources, lang, inc_cl):
|
||
|
header = os.path.basename(pch_sources[lang][0])
|
||
|
pch_file = ET.SubElement(inc_cl, 'PrecompiledHeaderFile')
|
||
|
# When USING PCHs, MSVC will not do the regular include
|
||
|
# directory lookup, but simply use a string match to find the
|
||
|
# PCH to use. That means the #include directive must match the
|
||
|
# pch_file.text used during PCH CREATION verbatim.
|
||
|
# When CREATING a PCH, MSVC will do the include directory
|
||
|
# lookup to find the actual PCH header to use. Thus, the PCH
|
||
|
# header must either be in the include_directories of the target
|
||
|
# or be in the same directory as the PCH implementation.
|
||
|
pch_file.text = header
|
||
|
pch_out = ET.SubElement(inc_cl, 'PrecompiledHeaderOutputFile')
|
||
|
pch_out.text = f'$(IntDir)$(TargetName)-{lang}.pch'
|
||
|
|
||
|
# Need to set the name for the pdb, as cl otherwise gives it a static
|
||
|
# name. Which leads to problems when there is more than one pch
|
||
|
# (e.g. for different languages).
|
||
|
pch_pdb = ET.SubElement(inc_cl, 'ProgramDataBaseFileName')
|
||
|
pch_pdb.text = f'$(IntDir)$(TargetName)-{lang}.pdb'
|
||
|
|
||
|
return header
|
||
|
|
||
|
def is_argument_with_msbuild_xml_entry(self, entry):
|
||
|
# Remove arguments that have a top level XML entry so
|
||
|
# they are not used twice.
|
||
|
# FIXME add args as needed.
|
||
|
if entry[1:].startswith('fsanitize'):
|
||
|
return True
|
||
|
return entry[1:].startswith('M')
|
||
|
|
||
|
def add_additional_options(self, lang, parent_node, file_args):
|
||
|
args = []
|
||
|
for arg in file_args[lang].to_native():
|
||
|
if self.is_argument_with_msbuild_xml_entry(arg):
|
||
|
continue
|
||
|
if arg == '%(AdditionalOptions)':
|
||
|
args.append(arg)
|
||
|
else:
|
||
|
args.append(self.escape_additional_option(arg))
|
||
|
ET.SubElement(parent_node, "AdditionalOptions").text = ' '.join(args)
|
||
|
|
||
|
# Set up each project's source file ('CLCompile') element with appropriate preprocessor, include dir, and compile option values for correct intellisense.
|
||
|
def add_project_nmake_defs_incs_and_opts(self, parent_node, src: str, defs_paths_opts_per_lang_and_buildtype: dict, platform: str):
|
||
|
# For compactness, sources whose type matches the primary src type (i.e. most frequent in the set of source types used in the target/project,
|
||
|
# according to the 'captured_build_args' map), can simply reference the preprocessor definitions, include dirs, and compile option NMake fields of
|
||
|
# the project itself.
|
||
|
# However, if a src is of a non-primary type, it could have totally different defs/dirs/options so we're going to have to fill in the full, verbose
|
||
|
# set of values for these fields, which needs to be fully expanded per build type / configuration.
|
||
|
#
|
||
|
# FIXME: Suppose a project contains .cpp and .c src files with different compile defs/dirs/options, while also having .h files, some of which
|
||
|
# are included by .cpp sources and others included by .c sources: How do we know whether the .h source should be using the .cpp or .c src
|
||
|
# defs/dirs/options? Might it also be possible for a .h header to be shared between .cpp and .c sources? If so, I don't see how we can
|
||
|
# correctly configure these intellisense fields.
|
||
|
# For now, all sources/headers that fail to find their extension's language in the '...nmake_defs_paths_opts...' map will just adopt the project
|
||
|
# defs/dirs/opts that are set for the nominal 'primary' src type.
|
||
|
ext = src.split('.')[-1]
|
||
|
lang = compilers.compilers.SUFFIX_TO_LANG.get(ext, None)
|
||
|
if lang in defs_paths_opts_per_lang_and_buildtype.keys():
|
||
|
# This is a non-primary src type for which can't simply reference the project's nmake fields;
|
||
|
# we must laboriously fill in the fields for all buildtypes.
|
||
|
for buildtype in coredata.get_genvs_default_buildtype_list():
|
||
|
(defs, paths, opts) = defs_paths_opts_per_lang_and_buildtype[lang][buildtype]
|
||
|
condition = f'\'$(Configuration)|$(Platform)\'==\'{buildtype}|{platform}\''
|
||
|
ET.SubElement(parent_node, 'PreprocessorDefinitions', Condition=condition).text = defs
|
||
|
ET.SubElement(parent_node, 'AdditionalIncludeDirectories', Condition=condition).text = paths
|
||
|
ET.SubElement(parent_node, 'AdditionalOptions', Condition=condition).text = opts
|
||
|
else: # Can't find bespoke nmake defs/dirs/opts fields for this extention, so just reference the project's fields
|
||
|
ET.SubElement(parent_node, 'PreprocessorDefinitions').text = '$(NMakePreprocessorDefinitions)'
|
||
|
ET.SubElement(parent_node, 'AdditionalIncludeDirectories').text = '$(NMakeIncludeSearchPath)'
|
||
|
ET.SubElement(parent_node, 'AdditionalOptions').text = '$(AdditionalOptions)'
|
||
|
|
||
|
def add_preprocessor_defines(self, lang, parent_node, file_defines):
|
||
|
defines = []
|
||
|
for define in file_defines[lang]:
|
||
|
if define == '%(PreprocessorDefinitions)':
|
||
|
defines.append(define)
|
||
|
else:
|
||
|
defines.append(self.escape_preprocessor_define(define))
|
||
|
ET.SubElement(parent_node, "PreprocessorDefinitions").text = ';'.join(defines)
|
||
|
|
||
|
def add_include_dirs(self, lang, parent_node, file_inc_dirs):
|
||
|
dirs = file_inc_dirs[lang]
|
||
|
ET.SubElement(parent_node, "AdditionalIncludeDirectories").text = ';'.join(dirs)
|
||
|
|
||
|
@staticmethod
|
||
|
def has_objects(objects, additional_objects, generated_objects):
|
||
|
# Ignore generated objects, those are automatically used by MSBuild because they are part of
|
||
|
# the CustomBuild Outputs.
|
||
|
return len(objects) + len(additional_objects) > 0
|
||
|
|
||
|
@staticmethod
|
||
|
def add_generated_objects(node, generated_objects):
|
||
|
# Do not add generated objects to project file. Those are automatically used by MSBuild, because
|
||
|
# they are part of the CustomBuild Outputs.
|
||
|
return
|
||
|
|
||
|
@staticmethod
|
||
|
def escape_preprocessor_define(define: str) -> str:
|
||
|
# See: https://msdn.microsoft.com/en-us/library/bb383819.aspx
|
||
|
table = str.maketrans({'%': '%25', '$': '%24', '@': '%40',
|
||
|
"'": '%27', ';': '%3B', '?': '%3F', '*': '%2A',
|
||
|
# We need to escape backslash because it'll be un-escaped by
|
||
|
# Windows during process creation when it parses the arguments
|
||
|
# Basically, this converts `\` to `\\`.
|
||
|
'\\': '\\\\'})
|
||
|
return define.translate(table)
|
||
|
|
||
|
@staticmethod
|
||
|
def escape_additional_option(option: str) -> str:
|
||
|
# See: https://msdn.microsoft.com/en-us/library/bb383819.aspx
|
||
|
table = str.maketrans({'%': '%25', '$': '%24', '@': '%40',
|
||
|
"'": '%27', ';': '%3B', '?': '%3F', '*': '%2A', ' ': '%20'})
|
||
|
option = option.translate(table)
|
||
|
# Since we're surrounding the option with ", if it ends in \ that will
|
||
|
# escape the " when the process arguments are parsed and the starting
|
||
|
# " will not terminate. So we escape it if that's the case. I'm not
|
||
|
# kidding, this is how escaping works for process args on Windows.
|
||
|
if option.endswith('\\'):
|
||
|
option += '\\'
|
||
|
return f'"{option}"'
|
||
|
|
||
|
@staticmethod
|
||
|
def add_filter_info(list_filters_path, filter_group, sub_element, file_path, forced_filter_name=None, down=''):
|
||
|
filter_inc_cl = ET.SubElement(filter_group, sub_element, Include=file_path)
|
||
|
|
||
|
# Force the subdir
|
||
|
if forced_filter_name:
|
||
|
filter_path = forced_filter_name
|
||
|
else:
|
||
|
# Create a subdir following the placement if on the same drive
|
||
|
filter_path = Path(file_path).resolve().parent
|
||
|
if Path(file_path).drive == Path(down).drive:
|
||
|
filter_path = Path(os.path.relpath(str(filter_path), down)).as_posix().replace('../', '').replace('..', '')
|
||
|
else:
|
||
|
return # No filter needed
|
||
|
|
||
|
# Needed to have non posix path
|
||
|
filter_path = filter_path.replace('/', '\\')
|
||
|
|
||
|
if filter_path and filter_path != '.':
|
||
|
# Remove ending backslash
|
||
|
filter_path = filter_path.rstrip('\\')
|
||
|
# Create a hierarchical level of directories
|
||
|
list_path = filter_path.split('\\')
|
||
|
new_filter_path = ''
|
||
|
for path in list_path:
|
||
|
if new_filter_path:
|
||
|
new_filter_path = new_filter_path + '\\' + path
|
||
|
else:
|
||
|
new_filter_path = path
|
||
|
list_filters_path.add(new_filter_path)
|
||
|
# Create a new filter node for the current file added
|
||
|
ET.SubElement(filter_inc_cl, 'Filter').text = filter_path
|
||
|
|
||
|
@staticmethod
|
||
|
def split_link_args(args):
|
||
|
"""
|
||
|
Split a list of link arguments into three lists:
|
||
|
* library search paths
|
||
|
* library filenames (or paths)
|
||
|
* other link arguments
|
||
|
"""
|
||
|
lpaths = []
|
||
|
libs = []
|
||
|
other = []
|
||
|
for arg in args:
|
||
|
if arg.startswith('/LIBPATH:'):
|
||
|
lpath = arg[9:]
|
||
|
# De-dup library search paths by removing older entries when
|
||
|
# a new one is found. This is necessary because unlike other
|
||
|
# search paths such as the include path, the library is
|
||
|
# searched for in the newest (right-most) search path first.
|
||
|
if lpath in lpaths:
|
||
|
lpaths.remove(lpath)
|
||
|
lpaths.append(lpath)
|
||
|
elif arg.startswith(('/', '-')):
|
||
|
other.append(arg)
|
||
|
# It's ok if we miss libraries with non-standard extensions here.
|
||
|
# They will go into the general link arguments.
|
||
|
elif arg.endswith('.lib') or arg.endswith('.a'):
|
||
|
# De-dup
|
||
|
if arg not in libs:
|
||
|
libs.append(arg)
|
||
|
else:
|
||
|
other.append(arg)
|
||
|
return lpaths, libs, other
|
||
|
|
||
|
def _get_cl_compiler(self, target):
|
||
|
for lang, c in target.compilers.items():
|
||
|
if lang in {'c', 'cpp'}:
|
||
|
return c
|
||
|
# No source files, only objects, but we still need a compiler, so
|
||
|
# return a found compiler
|
||
|
if len(target.objects) > 0:
|
||
|
for lang, c in self.environment.coredata.compilers[target.for_machine].items():
|
||
|
if lang in {'c', 'cpp'}:
|
||
|
return c
|
||
|
raise MesonException('Could not find a C or C++ compiler. MSVC can only build C/C++ projects.')
|
||
|
|
||
|
def _prettyprint_vcxproj_xml(self, tree: ET.ElementTree, ofname: str) -> None:
|
||
|
ofname_tmp = ofname + '~'
|
||
|
tree.write(ofname_tmp, encoding='utf-8', xml_declaration=True)
|
||
|
|
||
|
# ElementTree cannot do pretty-printing, so do it manually
|
||
|
doc = xml.dom.minidom.parse(ofname_tmp)
|
||
|
with open(ofname_tmp, 'w', encoding='utf-8') as of:
|
||
|
of.write(doc.toprettyxml())
|
||
|
replace_if_different(ofname, ofname_tmp)
|
||
|
|
||
|
# Returns: (target_args,file_args), (target_defines,file_defines), (target_inc_dirs,file_inc_dirs)
|
||
|
def get_args_defines_and_inc_dirs(self, target, compiler, generated_files_include_dirs, proj_to_src_root, proj_to_src_dir, build_args):
|
||
|
# Arguments, include dirs, defines for all files in the current target
|
||
|
target_args = []
|
||
|
target_defines = []
|
||
|
target_inc_dirs = []
|
||
|
# Arguments, include dirs, defines passed to individual files in
|
||
|
# a target; perhaps because the args are language-specific
|
||
|
#
|
||
|
# file_args is also later split out into defines and include_dirs in
|
||
|
# case someone passed those in there
|
||
|
file_args: T.Dict[str, CompilerArgs] = {l: c.compiler_args() for l, c in target.compilers.items()}
|
||
|
file_defines = {l: [] for l in target.compilers}
|
||
|
file_inc_dirs = {l: [] for l in target.compilers}
|
||
|
# The order in which these compile args are added must match
|
||
|
# generate_single_compile() and generate_basic_compiler_args()
|
||
|
for l, comp in target.compilers.items():
|
||
|
if l in file_args:
|
||
|
file_args[l] += compilers.get_base_compile_args(
|
||
|
target.get_options(), comp)
|
||
|
file_args[l] += comp.get_option_compile_args(
|
||
|
target.get_options())
|
||
|
|
||
|
# Add compile args added using add_project_arguments()
|
||
|
for l, args in self.build.projects_args[target.for_machine].get(target.subproject, {}).items():
|
||
|
if l in file_args:
|
||
|
file_args[l] += args
|
||
|
# Add compile args added using add_global_arguments()
|
||
|
# These override per-project arguments
|
||
|
for l, args in self.build.global_args[target.for_machine].items():
|
||
|
if l in file_args:
|
||
|
file_args[l] += args
|
||
|
# Compile args added from the env or cross file: CFLAGS/CXXFLAGS, etc. We want these
|
||
|
# to override all the defaults, but not the per-target compile args.
|
||
|
for l in file_args.keys():
|
||
|
file_args[l] += target.get_option(OptionKey('args', machine=target.for_machine, lang=l))
|
||
|
for args in file_args.values():
|
||
|
# This is where Visual Studio will insert target_args, target_defines,
|
||
|
# etc, which are added later from external deps (see below).
|
||
|
args += ['%(AdditionalOptions)', '%(PreprocessorDefinitions)', '%(AdditionalIncludeDirectories)']
|
||
|
# Add custom target dirs as includes automatically, but before
|
||
|
# target-specific include dirs. See _generate_single_compile() in
|
||
|
# the ninja backend for caveats.
|
||
|
args += ['-I' + arg for arg in generated_files_include_dirs]
|
||
|
# Add include dirs from the `include_directories:` kwarg on the target
|
||
|
# and from `include_directories:` of internal deps of the target.
|
||
|
#
|
||
|
# Target include dirs should override internal deps include dirs.
|
||
|
# This is handled in BuildTarget.process_kwargs()
|
||
|
#
|
||
|
# Include dirs from internal deps should override include dirs from
|
||
|
# external deps and must maintain the order in which they are
|
||
|
# specified. Hence, we must reverse so that the order is preserved.
|
||
|
#
|
||
|
# These are per-target, but we still add them as per-file because we
|
||
|
# need them to be looked in first.
|
||
|
for d in reversed(target.get_include_dirs()):
|
||
|
# reversed is used to keep order of includes
|
||
|
for i in reversed(d.get_incdirs()):
|
||
|
curdir = os.path.join(d.get_curdir(), i)
|
||
|
try:
|
||
|
# Add source subdir first so that the build subdir overrides it
|
||
|
args.append('-I' + os.path.join(proj_to_src_root, curdir)) # src dir
|
||
|
args.append('-I' + self.relpath(curdir, target.subdir)) # build dir
|
||
|
except ValueError:
|
||
|
# Include is on different drive
|
||
|
args.append('-I' + os.path.normpath(curdir))
|
||
|
for i in d.get_extra_build_dirs():
|
||
|
curdir = os.path.join(d.get_curdir(), i)
|
||
|
args.append('-I' + self.relpath(curdir, target.subdir)) # build dir
|
||
|
# Add per-target compile args, f.ex, `c_args : ['/DFOO']`. We set these
|
||
|
# near the end since these are supposed to override everything else.
|
||
|
for l, args in target.extra_args.items():
|
||
|
if l in file_args:
|
||
|
file_args[l] += args
|
||
|
# The highest priority includes. In order of directory search:
|
||
|
# target private dir, target build dir, target source dir
|
||
|
for args in file_args.values():
|
||
|
t_inc_dirs = [self.relpath(self.get_target_private_dir(target),
|
||
|
self.get_target_dir(target))]
|
||
|
if target.implicit_include_directories:
|
||
|
t_inc_dirs += ['.', proj_to_src_dir]
|
||
|
args += ['-I' + arg for arg in t_inc_dirs]
|
||
|
|
||
|
# Split preprocessor defines and include directories out of the list of
|
||
|
# all extra arguments. The rest go into %(AdditionalOptions).
|
||
|
for l, args in file_args.items():
|
||
|
for arg in args[:]:
|
||
|
if arg.startswith(('-D', '/D')) or arg == '%(PreprocessorDefinitions)':
|
||
|
file_args[l].remove(arg)
|
||
|
# Don't escape the marker
|
||
|
if arg == '%(PreprocessorDefinitions)':
|
||
|
define = arg
|
||
|
else:
|
||
|
define = arg[2:]
|
||
|
# De-dup
|
||
|
if define not in file_defines[l]:
|
||
|
file_defines[l].append(define)
|
||
|
elif arg.startswith(('-I', '/I')) or arg == '%(AdditionalIncludeDirectories)':
|
||
|
file_args[l].remove(arg)
|
||
|
# Don't escape the marker
|
||
|
if arg == '%(AdditionalIncludeDirectories)':
|
||
|
inc_dir = arg
|
||
|
else:
|
||
|
inc_dir = arg[2:]
|
||
|
# De-dup
|
||
|
if inc_dir not in file_inc_dirs[l]:
|
||
|
file_inc_dirs[l].append(inc_dir)
|
||
|
# Add include dirs to target as well so that "Go to Document" works in headers
|
||
|
if inc_dir not in target_inc_dirs:
|
||
|
target_inc_dirs.append(inc_dir)
|
||
|
|
||
|
# Split compile args needed to find external dependencies
|
||
|
# Link args are added while generating the link command
|
||
|
for d in reversed(target.get_external_deps()):
|
||
|
# Cflags required by external deps might have UNIX-specific flags,
|
||
|
# so filter them out if needed
|
||
|
if d.name != 'openmp':
|
||
|
d_compile_args = compiler.unix_args_to_native(d.get_compile_args())
|
||
|
for arg in d_compile_args:
|
||
|
if arg.startswith(('-D', '/D')):
|
||
|
define = arg[2:]
|
||
|
# De-dup
|
||
|
if define in target_defines:
|
||
|
target_defines.remove(define)
|
||
|
target_defines.append(define)
|
||
|
elif arg.startswith(('-I', '/I')):
|
||
|
inc_dir = arg[2:]
|
||
|
# De-dup
|
||
|
if inc_dir not in target_inc_dirs:
|
||
|
target_inc_dirs.append(inc_dir)
|
||
|
else:
|
||
|
target_args.append(arg)
|
||
|
|
||
|
if '/Gw' in build_args:
|
||
|
target_args.append('/Gw')
|
||
|
|
||
|
return (target_args, file_args), (target_defines, file_defines), (target_inc_dirs, file_inc_dirs)
|
||
|
|
||
|
@staticmethod
|
||
|
def get_build_args(compiler, buildtype: str, optimization_level: str, debug: bool, sanitize: str) -> T.List[str]:
|
||
|
build_args = compiler.get_buildtype_args(buildtype)
|
||
|
build_args += compiler.get_optimization_args(optimization_level)
|
||
|
build_args += compiler.get_debug_args(debug)
|
||
|
build_args += compiler.sanitizer_compile_args(sanitize)
|
||
|
|
||
|
return build_args
|
||
|
|
||
|
#Convert a list of compile arguments from -
|
||
|
# [ '-I..\\some\\dir\\include', '-I../../some/other/dir', '/MDd', '/W2', '/std:c++17', '/Od', '/Zi', '-DSOME_DEF=1', '-DANOTHER_DEF=someval', ...]
|
||
|
#to -
|
||
|
# 'SOME_DEF=1;ANOTHER_DEF=someval;'
|
||
|
#which is the format required by the visual studio project's NMakePreprocessorDefinitions field.
|
||
|
@staticmethod
|
||
|
def extract_nmake_preprocessor_defs(captured_build_args: list[str]) -> str:
|
||
|
defs = ''
|
||
|
for arg in captured_build_args:
|
||
|
if arg.startswith(('-D', '/D')):
|
||
|
defs += arg[2:] + ';'
|
||
|
return defs
|
||
|
|
||
|
#Convert a list of compile arguments from -
|
||
|
# [ '-I..\\some\\dir\\include', '-I../../some/other/dir', '/MDd', '/W2', '/std:c++17', '/Od', '/Zi', '-DSOME_DEF=1', '-DANOTHER_DEF=someval', ...]
|
||
|
#to -
|
||
|
# '..\\some\\dir\\include;../../some/other/dir;'
|
||
|
#which is the format required by the visual studio project's NMakePreprocessorDefinitions field.
|
||
|
@staticmethod
|
||
|
def extract_nmake_include_paths(captured_build_args: list[str]) -> str:
|
||
|
paths = ''
|
||
|
for arg in captured_build_args:
|
||
|
if arg.startswith(('-I', '/I')):
|
||
|
paths += arg[2:] + ';'
|
||
|
return paths
|
||
|
|
||
|
#Convert a list of compile arguments from -
|
||
|
# [ '-I..\\some\\dir\\include', '-I../../some/other/dir', '/MDd', '/W2', '/std:c++17', '/Od', '/Zi', '-DSOME_DEF=1', '-DANOTHER_DEF=someval', ...]
|
||
|
#to -
|
||
|
# '/MDd;/W2;/std:c++17;/Od/Zi'
|
||
|
#which is the format required by the visual studio project's NMakePreprocessorDefinitions field.
|
||
|
@staticmethod
|
||
|
def extract_intellisense_additional_compiler_options(captured_build_args: list[str]) -> str:
|
||
|
additional_opts = ''
|
||
|
for arg in captured_build_args:
|
||
|
if (not arg.startswith(('-D', '/D', '-I', '/I'))) and arg.startswith(('-', '/')):
|
||
|
additional_opts += arg + ';'
|
||
|
return additional_opts
|
||
|
|
||
|
@staticmethod
|
||
|
def get_nmake_base_meson_command_and_exe_search_paths() -> T.Tuple[str, str]:
|
||
|
meson_cmd_list = mesonlib.get_meson_command()
|
||
|
assert (len(meson_cmd_list) == 1) or (len(meson_cmd_list) == 2)
|
||
|
# We expect get_meson_command() to either be of the form -
|
||
|
# 1: ['path/to/meson.exe']
|
||
|
# or -
|
||
|
# 2: ['path/to/python.exe', 'and/path/to/meson.py']
|
||
|
# so we'd like to ensure our makefile-style project invokes the same meson executable or python src as this instance.
|
||
|
exe_search_paths = os.path.dirname(meson_cmd_list[0])
|
||
|
nmake_base_meson_command = os.path.basename(meson_cmd_list[0])
|
||
|
if len(meson_cmd_list) != 1:
|
||
|
# We expect to be dealing with case '2', shown above.
|
||
|
# With Windows, it's also possible that we get a path to the second element of meson_cmd_list that contains spaces
|
||
|
# (e.g. 'and/path to/meson.py'). So, because this will end up directly in the makefile/NMake command lines, we'd
|
||
|
# better always enclose it in quotes. Only strictly necessary for paths with spaces but no harm for paths without -
|
||
|
nmake_base_meson_command += ' \"' + meson_cmd_list[1] + '\"'
|
||
|
exe_search_paths += ';' + os.path.dirname(meson_cmd_list[1])
|
||
|
|
||
|
# Additionally, in some cases, we appear to have to add 'C:\Windows\system32;C:\Windows' to the 'Path' environment (via the
|
||
|
# ExecutablePath element), without which, the 'meson compile ...' (NMakeBuildCommandLine) command can fail (failure to find
|
||
|
# stdio.h and similar), so something is quietly switching some critical build behaviour based on the presence of these in
|
||
|
# the 'Path'.
|
||
|
# Not sure if this ultimately comes down to some 'find and guess' hidden behaviours within meson or within MSVC tools, but
|
||
|
# I guess some projects may implicitly rely on this behaviour.
|
||
|
# Things would be cleaner, more robust, repeatable, and portable if meson (and msvc tools) replaced all this kind of
|
||
|
# find/guess behaviour with the requirement that things just be explicitly specified by the user.
|
||
|
# An example of this can be seen with -
|
||
|
# 1: Download https://github.com/facebook/zstd source
|
||
|
# 2: cd to the 'zstd-dev\build\meson' dir
|
||
|
# 3: meson setup -Dbin_programs=true -Dbin_contrib=true --genvslite vs2022 builddir_vslite
|
||
|
# 4: Open the generated 'builddir_vslite_vs\zstd.sln' and build through a project, which should explicitly add the above to
|
||
|
# the project's 'Executable Directories' paths and build successfully.
|
||
|
# 5: Remove 'C:\Windows\system32;C:\Windows;' from the same project's 'Executable Directories' paths and rebuild.
|
||
|
# This should now fail.
|
||
|
# It feels uncomfortable to do this but what better alternative is there (and might this introduce new problems)? -
|
||
|
exe_search_paths += ';C:\\Windows\\system32;C:\\Windows'
|
||
|
# A meson project that explicitly specifies compiler/linker tools and sdk/include paths is not going to have any problems
|
||
|
# with this addition.
|
||
|
|
||
|
return (nmake_base_meson_command, exe_search_paths)
|
||
|
|
||
|
def add_gen_lite_makefile_vcxproj_elements(self,
|
||
|
root: ET.Element,
|
||
|
platform: str,
|
||
|
target_ext: str,
|
||
|
vslite_ctx: dict,
|
||
|
target,
|
||
|
proj_to_build_root: str,
|
||
|
primary_src_lang: T.Optional[str]) -> None:
|
||
|
ET.SubElement(root, 'ImportGroup', Label='ExtensionSettings')
|
||
|
ET.SubElement(root, 'ImportGroup', Label='Shared')
|
||
|
prop_sheets_grp = ET.SubElement(root, 'ImportGroup', Label='PropertySheets')
|
||
|
ET.SubElement(prop_sheets_grp, 'Import', {'Project': r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props',
|
||
|
'Condition': r"exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')",
|
||
|
'Label': 'LocalAppDataPlatform'
|
||
|
})
|
||
|
ET.SubElement(root, 'PropertyGroup', Label='UserMacros')
|
||
|
|
||
|
(nmake_base_meson_command, exe_search_paths) = Vs2010Backend.get_nmake_base_meson_command_and_exe_search_paths()
|
||
|
|
||
|
# Relative path from this .vcxproj to the directory containing the set of '..._[debug/debugoptimized/release]' setup meson build dirs.
|
||
|
proj_to_multiconfigured_builds_parent_dir = os.path.join(proj_to_build_root, '..')
|
||
|
|
||
|
# Conditional property groups per configuration (buildtype). E.g. -
|
||
|
# <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='release|x64'">
|
||
|
multi_config_buildtype_list = coredata.get_genvs_default_buildtype_list()
|
||
|
for buildtype in multi_config_buildtype_list:
|
||
|
per_config_prop_group = ET.SubElement(root, 'PropertyGroup', Condition=f'\'$(Configuration)|$(Platform)\'==\'{buildtype}|{platform}\'')
|
||
|
(_, build_dir_tail) = os.path.split(self.src_to_build)
|
||
|
meson_build_dir_for_buildtype = build_dir_tail[:-2] + buildtype # Get the buildtype suffixed 'builddir_[debug/release/etc]' from 'builddir_vs', for example.
|
||
|
proj_to_build_dir_for_buildtype = str(os.path.join(proj_to_multiconfigured_builds_parent_dir, meson_build_dir_for_buildtype))
|
||
|
ET.SubElement(per_config_prop_group, 'OutDir').text = f'{proj_to_build_dir_for_buildtype}\\'
|
||
|
ET.SubElement(per_config_prop_group, 'IntDir').text = f'{proj_to_build_dir_for_buildtype}\\'
|
||
|
ET.SubElement(per_config_prop_group, 'NMakeBuildCommandLine').text = f'{nmake_base_meson_command} compile -C "{proj_to_build_dir_for_buildtype}"'
|
||
|
ET.SubElement(per_config_prop_group, 'NMakeOutput').text = f'$(OutDir){target.name}{target_ext}'
|
||
|
captured_build_args = vslite_ctx[buildtype][target.get_id()]
|
||
|
# 'captured_build_args' is a dictionary, mapping from each src file type to a list of compile args to use for that type.
|
||
|
# Usually, there's just one but we could have multiple src types. However, since there's only one field for the makefile
|
||
|
# project's NMake... preprocessor/include intellisense fields, we'll just use the first src type we have to fill in
|
||
|
# these fields. Then, any src files in this VS project that aren't of this first src type will then need to override
|
||
|
# its intellisense fields instead of simply referencing the values in the project.
|
||
|
ET.SubElement(per_config_prop_group, 'NMakeReBuildCommandLine').text = f'{nmake_base_meson_command} compile -C "{proj_to_build_dir_for_buildtype}" --clean && {nmake_base_meson_command} compile -C "{proj_to_build_dir_for_buildtype}"'
|
||
|
ET.SubElement(per_config_prop_group, 'NMakeCleanCommandLine').text = f'{nmake_base_meson_command} compile -C "{proj_to_build_dir_for_buildtype}" --clean'
|
||
|
# Need to set the 'ExecutablePath' element for the above NMake... commands to be able to invoke the meson command.
|
||
|
ET.SubElement(per_config_prop_group, 'ExecutablePath').text = exe_search_paths
|
||
|
# We may not have any src files and so won't have a primary src language. In which case, we've nothing to fill in for this target's intellisense fields -
|
||
|
if primary_src_lang:
|
||
|
primary_src_type_build_args = captured_build_args[primary_src_lang]
|
||
|
ET.SubElement(per_config_prop_group, 'NMakePreprocessorDefinitions').text = Vs2010Backend.extract_nmake_preprocessor_defs(primary_src_type_build_args)
|
||
|
ET.SubElement(per_config_prop_group, 'NMakeIncludeSearchPath').text = Vs2010Backend.extract_nmake_include_paths(primary_src_type_build_args)
|
||
|
ET.SubElement(per_config_prop_group, 'AdditionalOptions').text = Vs2010Backend.extract_intellisense_additional_compiler_options(primary_src_type_build_args)
|
||
|
|
||
|
# Unless we explicitly specify the following empty path elements, the project is assigned a load of nasty defaults that fill these
|
||
|
# with values like -
|
||
|
# $(VC_IncludePath);$(WindowsSDK_IncludePath);
|
||
|
# which are all based on the current install environment (a recipe for non-reproducibility problems), not the paths that will be used by
|
||
|
# the actual meson compile jobs. Although these elements look like they're only for MSBuild operations, they're not needed with our simple,
|
||
|
# lite/makefile-style projects so let's just remove them in case they do get used/confused by intellisense.
|
||
|
ET.SubElement(per_config_prop_group, 'IncludePath')
|
||
|
ET.SubElement(per_config_prop_group, 'ExternalIncludePath')
|
||
|
ET.SubElement(per_config_prop_group, 'ReferencePath')
|
||
|
ET.SubElement(per_config_prop_group, 'LibraryPath')
|
||
|
ET.SubElement(per_config_prop_group, 'LibraryWPath')
|
||
|
ET.SubElement(per_config_prop_group, 'SourcePath')
|
||
|
ET.SubElement(per_config_prop_group, 'ExcludePath')
|
||
|
|
||
|
def add_non_makefile_vcxproj_elements(
|
||
|
self,
|
||
|
root: ET.Element,
|
||
|
type_config: ET.Element,
|
||
|
target,
|
||
|
platform: str,
|
||
|
subsystem,
|
||
|
build_args,
|
||
|
target_args,
|
||
|
target_defines,
|
||
|
target_inc_dirs,
|
||
|
file_args
|
||
|
) -> None:
|
||
|
compiler = self._get_cl_compiler(target)
|
||
|
buildtype_link_args = compiler.get_buildtype_linker_args(self.buildtype)
|
||
|
|
||
|
# Prefix to use to access the build root from the vcxproj dir
|
||
|
down = self.target_to_build_root(target)
|
||
|
|
||
|
# FIXME: Should the following just be set in create_basic_project(), even if
|
||
|
# irrelevant for current target?
|
||
|
|
||
|
# FIXME: Meson's LTO support needs to be integrated here
|
||
|
ET.SubElement(type_config, 'WholeProgramOptimization').text = 'false'
|
||
|
# Let VS auto-set the RTC level
|
||
|
ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'Default'
|
||
|
# Incremental linking increases code size
|
||
|
if '/INCREMENTAL:NO' in buildtype_link_args:
|
||
|
ET.SubElement(type_config, 'LinkIncremental').text = 'false'
|
||
|
|
||
|
# Build information
|
||
|
compiles = ET.SubElement(root, 'ItemDefinitionGroup')
|
||
|
clconf = ET.SubElement(compiles, 'ClCompile')
|
||
|
if True in ((dep.name == 'openmp') for dep in target.get_external_deps()):
|
||
|
ET.SubElement(clconf, 'OpenMPSupport').text = 'true'
|
||
|
# CRT type; debug or release
|
||
|
vscrt_type = target.get_option(OptionKey('b_vscrt'))
|
||
|
if vscrt_type == 'from_buildtype':
|
||
|
if self.buildtype == 'debug':
|
||
|
ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
|
||
|
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL'
|
||
|
else:
|
||
|
ET.SubElement(type_config, 'UseDebugLibraries').text = 'false'
|
||
|
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL'
|
||
|
elif vscrt_type == 'static_from_buildtype':
|
||
|
if self.buildtype == 'debug':
|
||
|
ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
|
||
|
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebug'
|
||
|
else:
|
||
|
ET.SubElement(type_config, 'UseDebugLibraries').text = 'false'
|
||
|
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded'
|
||
|
elif vscrt_type == 'mdd':
|
||
|
ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
|
||
|
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL'
|
||
|
elif vscrt_type == 'mt':
|
||
|
# FIXME, wrong
|
||
|
ET.SubElement(type_config, 'UseDebugLibraries').text = 'false'
|
||
|
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded'
|
||
|
elif vscrt_type == 'mtd':
|
||
|
# FIXME, wrong
|
||
|
ET.SubElement(type_config, 'UseDebugLibraries').text = 'true'
|
||
|
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebug'
|
||
|
else:
|
||
|
ET.SubElement(type_config, 'UseDebugLibraries').text = 'false'
|
||
|
ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL'
|
||
|
# Sanitizers
|
||
|
if '/fsanitize=address' in build_args:
|
||
|
ET.SubElement(type_config, 'EnableASAN').text = 'true'
|
||
|
# Debug format
|
||
|
if '/ZI' in build_args:
|
||
|
ET.SubElement(clconf, 'DebugInformationFormat').text = 'EditAndContinue'
|
||
|
elif '/Zi' in build_args:
|
||
|
ET.SubElement(clconf, 'DebugInformationFormat').text = 'ProgramDatabase'
|
||
|
elif '/Z7' in build_args:
|
||
|
ET.SubElement(clconf, 'DebugInformationFormat').text = 'OldStyle'
|
||
|
else:
|
||
|
ET.SubElement(clconf, 'DebugInformationFormat').text = 'None'
|
||
|
# Runtime checks
|
||
|
if '/RTC1' in build_args:
|
||
|
ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'EnableFastChecks'
|
||
|
elif '/RTCu' in build_args:
|
||
|
ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'UninitializedLocalUsageCheck'
|
||
|
elif '/RTCs' in build_args:
|
||
|
ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'StackFrameRuntimeCheck'
|
||
|
# Exception handling has to be set in the xml in addition to the "AdditionalOptions" because otherwise
|
||
|
# cl will give warning D9025: overriding '/Ehs' with cpp_eh value
|
||
|
if 'cpp' in target.compilers:
|
||
|
eh = target.get_option(OptionKey('eh', machine=target.for_machine, lang='cpp'))
|
||
|
if eh == 'a':
|
||
|
ET.SubElement(clconf, 'ExceptionHandling').text = 'Async'
|
||
|
elif eh == 's':
|
||
|
ET.SubElement(clconf, 'ExceptionHandling').text = 'SyncCThrow'
|
||
|
elif eh == 'none':
|
||
|
ET.SubElement(clconf, 'ExceptionHandling').text = 'false'
|
||
|
else: # 'sc' or 'default'
|
||
|
ET.SubElement(clconf, 'ExceptionHandling').text = 'Sync'
|
||
|
|
||
|
if len(target_args) > 0:
|
||
|
target_args.append('%(AdditionalOptions)')
|
||
|
ET.SubElement(clconf, "AdditionalOptions").text = ' '.join(target_args)
|
||
|
ET.SubElement(clconf, 'AdditionalIncludeDirectories').text = ';'.join(target_inc_dirs)
|
||
|
target_defines.append('%(PreprocessorDefinitions)')
|
||
|
ET.SubElement(clconf, 'PreprocessorDefinitions').text = ';'.join(target_defines)
|
||
|
ET.SubElement(clconf, 'FunctionLevelLinking').text = 'true'
|
||
|
# Warning level
|
||
|
warning_level = T.cast('str', target.get_option(OptionKey('warning_level')))
|
||
|
ET.SubElement(clconf, 'WarningLevel').text = 'Level' + str(1 + int(warning_level))
|
||
|
if target.get_option(OptionKey('werror')):
|
||
|
ET.SubElement(clconf, 'TreatWarningAsError').text = 'true'
|
||
|
# Optimization flags
|
||
|
o_flags = split_o_flags_args(build_args)
|
||
|
if '/Ox' in o_flags:
|
||
|
ET.SubElement(clconf, 'Optimization').text = 'Full'
|
||
|
elif '/O2' in o_flags:
|
||
|
ET.SubElement(clconf, 'Optimization').text = 'MaxSpeed'
|
||
|
elif '/O1' in o_flags:
|
||
|
ET.SubElement(clconf, 'Optimization').text = 'MinSpace'
|
||
|
elif '/Od' in o_flags:
|
||
|
ET.SubElement(clconf, 'Optimization').text = 'Disabled'
|
||
|
if '/Oi' in o_flags:
|
||
|
ET.SubElement(clconf, 'IntrinsicFunctions').text = 'true'
|
||
|
if '/Ob1' in o_flags:
|
||
|
ET.SubElement(clconf, 'InlineFunctionExpansion').text = 'OnlyExplicitInline'
|
||
|
elif '/Ob2' in o_flags:
|
||
|
ET.SubElement(clconf, 'InlineFunctionExpansion').text = 'AnySuitable'
|
||
|
# Size-preserving flags
|
||
|
if '/Os' in o_flags:
|
||
|
ET.SubElement(clconf, 'FavorSizeOrSpeed').text = 'Size'
|
||
|
# Note: setting FavorSizeOrSpeed with clang-cl conflicts with /Od and can make debugging difficult, so don't.
|
||
|
elif '/Od' not in o_flags:
|
||
|
ET.SubElement(clconf, 'FavorSizeOrSpeed').text = 'Speed'
|
||
|
# Note: SuppressStartupBanner is /NOLOGO and is 'true' by default
|
||
|
self.generate_lang_standard_info(file_args, clconf)
|
||
|
|
||
|
resourcecompile = ET.SubElement(compiles, 'ResourceCompile')
|
||
|
ET.SubElement(resourcecompile, 'PreprocessorDefinitions')
|
||
|
|
||
|
# Linker options
|
||
|
link = ET.SubElement(compiles, 'Link')
|
||
|
extra_link_args = compiler.compiler_args()
|
||
|
# FIXME: Can these buildtype linker args be added as tags in the
|
||
|
# vcxproj file (similar to buildtype compiler args) instead of in
|
||
|
# AdditionalOptions?
|
||
|
extra_link_args += compiler.get_buildtype_linker_args(self.buildtype)
|
||
|
# BEN: always generate PDBs actually.
|
||
|
self.generate_debug_information(link)
|
||
|
# Generate Debug info
|
||
|
#if self.debug:
|
||
|
# self.generate_debug_information(link)
|
||
|
#else:
|
||
|
# ET.SubElement(link, 'GenerateDebugInformation').text = 'false'
|
||
|
if not isinstance(target, build.StaticLibrary):
|
||
|
if isinstance(target, build.SharedModule):
|
||
|
extra_link_args += compiler.get_std_shared_module_link_args(target.get_options())
|
||
|
# Add link args added using add_project_link_arguments()
|
||
|
extra_link_args += self.build.get_project_link_args(compiler, target.subproject, target.for_machine)
|
||
|
# Add link args added using add_global_link_arguments()
|
||
|
# These override per-project link arguments
|
||
|
extra_link_args += self.build.get_global_link_args(compiler, target.for_machine)
|
||
|
# Link args added from the env: LDFLAGS, or the cross file. We want
|
||
|
# these to override all the defaults but not the per-target link
|
||
|
# args.
|
||
|
extra_link_args += self.environment.coredata.get_external_link_args(
|
||
|
target.for_machine, compiler.get_language())
|
||
|
# Only non-static built targets need link args and link dependencies
|
||
|
extra_link_args += target.link_args
|
||
|
# External deps must be last because target link libraries may depend on them.
|
||
|
for dep in target.get_external_deps():
|
||
|
# Extend without reordering or de-dup to preserve `-L -l` sets
|
||
|
# https://github.com/mesonbuild/meson/issues/1718
|
||
|
if dep.name == 'openmp':
|
||
|
ET.SubElement(clconf, 'OpenMPSupport').text = 'true'
|
||
|
else:
|
||
|
extra_link_args.extend_direct(dep.get_link_args())
|
||
|
for d in target.get_dependencies():
|
||
|
if isinstance(d, build.StaticLibrary):
|
||
|
for dep in d.get_external_deps():
|
||
|
if dep.name == 'openmp':
|
||
|
ET.SubElement(clconf, 'OpenMPSupport').text = 'true'
|
||
|
else:
|
||
|
extra_link_args.extend_direct(dep.get_link_args())
|
||
|
# Add link args for c_* or cpp_* build options. Currently this only
|
||
|
# adds c_winlibs and cpp_winlibs when building for Windows. This needs
|
||
|
# to be after all internal and external libraries so that unresolved
|
||
|
# symbols from those can be found here. This is needed when the
|
||
|
# *_winlibs that we want to link to are static mingw64 libraries.
|
||
|
extra_link_args += compiler.get_option_link_args(target.get_options())
|
||
|
(additional_libpaths, additional_links, extra_link_args) = self.split_link_args(extra_link_args.to_native())
|
||
|
|
||
|
# Add more libraries to be linked if needed
|
||
|
for t in target.get_dependencies():
|
||
|
if isinstance(t, build.CustomTargetIndex):
|
||
|
# We don't need the actual project here, just the library name
|
||
|
lobj = t
|
||
|
else:
|
||
|
lobj = self.build.targets[t.get_id()]
|
||
|
linkname = os.path.join(down, self.get_target_filename_for_linking(lobj))
|
||
|
if t in target.link_whole_targets:
|
||
|
if compiler.id == 'msvc' and version_compare(compiler.version, '<19.00.23918'):
|
||
|
# Expand our object lists manually if we are on pre-Visual Studio 2015 Update 2
|
||
|
l = t.extract_all_objects(False)
|
||
|
|
||
|
# Unfortunately, we can't use self.object_filename_from_source()
|
||
|
for gen in l.genlist:
|
||
|
for src in gen.get_outputs():
|
||
|
if self.environment.is_source(src):
|
||
|
path = self.get_target_generated_dir(t, gen, src)
|
||
|
gen_src_ext = '.' + os.path.splitext(path)[1][1:]
|
||
|
extra_link_args.append(path[:-len(gen_src_ext)] + '.obj')
|
||
|
|
||
|
for src in l.srclist:
|
||
|
obj_basename = None
|
||
|
if self.environment.is_source(src):
|
||
|
obj_basename = self.object_filename_from_source(t, src)
|
||
|
target_private_dir = self.relpath(self.get_target_private_dir(t),
|
||
|
self.get_target_dir(t))
|
||
|
rel_obj = os.path.join(target_private_dir, obj_basename)
|
||
|
extra_link_args.append(rel_obj)
|
||
|
|
||
|
extra_link_args.extend(self.flatten_object_list(t))
|
||
|
else:
|
||
|
# /WHOLEARCHIVE:foo must go into AdditionalOptions
|
||
|
extra_link_args += compiler.get_link_whole_for(linkname)
|
||
|
# To force Visual Studio to build this project even though it
|
||
|
# has no sources, we include a reference to the vcxproj file
|
||
|
# that builds this target. Technically we should add this only
|
||
|
# if the current target has no sources, but it doesn't hurt to
|
||
|
# have 'extra' references.
|
||
|
trelpath = self.get_target_dir_relative_to(t, target)
|
||
|
tvcxproj = os.path.join(trelpath, t.get_id() + '.vcxproj')
|
||
|
tid = self.environment.coredata.target_guids[t.get_id()]
|
||
|
self.add_project_reference(root, tvcxproj, tid, link_outputs=True)
|
||
|
# Mark the dependency as already handled to not have
|
||
|
# multiple references to the same target.
|
||
|
self.handled_target_deps[target.get_id()].append(t.get_id())
|
||
|
else:
|
||
|
# Other libraries go into AdditionalDependencies
|
||
|
if linkname not in additional_links:
|
||
|
additional_links.append(linkname)
|
||
|
for lib in self.get_custom_target_provided_libraries(target):
|
||
|
additional_links.append(self.relpath(lib, self.get_target_dir(target)))
|
||
|
|
||
|
if len(extra_link_args) > 0:
|
||
|
extra_link_args.append('%(AdditionalOptions)')
|
||
|
ET.SubElement(link, "AdditionalOptions").text = ' '.join(extra_link_args)
|
||
|
if len(additional_libpaths) > 0:
|
||
|
additional_libpaths.insert(0, '%(AdditionalLibraryDirectories)')
|
||
|
ET.SubElement(link, 'AdditionalLibraryDirectories').text = ';'.join(additional_libpaths)
|
||
|
if len(additional_links) > 0:
|
||
|
additional_links.append('%(AdditionalDependencies)')
|
||
|
ET.SubElement(link, 'AdditionalDependencies').text = ';'.join(additional_links)
|
||
|
ofile = ET.SubElement(link, 'OutputFile')
|
||
|
ofile.text = f'$(OutDir){target.get_filename()}'
|
||
|
subsys = ET.SubElement(link, 'SubSystem')
|
||
|
subsys.text = subsystem
|
||
|
if isinstance(target, (build.SharedLibrary, build.Executable)) and target.get_import_filename():
|
||
|
# DLLs built with MSVC always have an import library except when
|
||
|
# they're data-only DLLs, but we don't support those yet.
|
||
|
ET.SubElement(link, 'ImportLibrary').text = target.get_import_filename()
|
||
|
if isinstance(target, build.SharedLibrary):
|
||
|
# Add module definitions file, if provided
|
||
|
if target.vs_module_defs:
|
||
|
relpath = os.path.join(down, target.vs_module_defs.rel_to_builddir(self.build_to_src))
|
||
|
ET.SubElement(link, 'ModuleDefinitionFile').text = relpath
|
||
|
# BEN: always generate PDBs actually.
|
||
|
#if self.debug:
|
||
|
pdb = ET.SubElement(link, 'ProgramDataBaseFileName')
|
||
|
pdb.text = f'$(OutDir){target.name}.pdb'
|
||
|
targetmachine = ET.SubElement(link, 'TargetMachine')
|
||
|
if target.for_machine is MachineChoice.BUILD:
|
||
|
targetplatform = platform.lower()
|
||
|
else:
|
||
|
targetplatform = self.platform.lower()
|
||
|
if targetplatform == 'win32':
|
||
|
targetmachine.text = 'MachineX86'
|
||
|
elif targetplatform == 'x64' or detect_microsoft_gdk(targetplatform):
|
||
|
targetmachine.text = 'MachineX64'
|
||
|
elif targetplatform == 'arm':
|
||
|
targetmachine.text = 'MachineARM'
|
||
|
elif targetplatform == 'arm64':
|
||
|
targetmachine.text = 'MachineARM64'
|
||
|
elif targetplatform == 'arm64ec':
|
||
|
targetmachine.text = 'MachineARM64EC'
|
||
|
else:
|
||
|
raise MesonException('Unsupported Visual Studio target machine: ' + targetplatform)
|
||
|
# /nologo
|
||
|
ET.SubElement(link, 'SuppressStartupBanner').text = 'true'
|
||
|
# /release
|
||
|
if not target.get_option(OptionKey('debug')):
|
||
|
ET.SubElement(link, 'SetChecksum').text = 'true'
|
||
|
|
||
|
# Visual studio doesn't simply allow the src files of a project to be added with the 'Condition=...' attribute,
|
||
|
# to allow us to point to the different debug/debugoptimized/release sets of generated src files for each of
|
||
|
# the solution's configurations. Similarly, 'ItemGroup' also doesn't support 'Condition'. So, without knowing
|
||
|
# a better (simple) alternative, for now, we'll repoint these generated sources (which will be incorrectly
|
||
|
# pointing to non-existent files under our '[builddir]_vs' directory) to the appropriate location under one of
|
||
|
# our buildtype build directores (e.g. '[builddir]_debug').
|
||
|
# This will at least allow the user to open the files of generated sources listed in the solution explorer,
|
||
|
# once a build/compile has generated these sources.
|
||
|
#
|
||
|
# This modifies the paths in 'gen_files' in place, as opposed to returning a new list of modified paths.
|
||
|
def relocate_generated_file_paths_to_concrete_build_dir(self, gen_files: T.List[str], target: T.Union[build.Target, build.CustomTargetIndex]) -> None:
|
||
|
(_, build_dir_tail) = os.path.split(self.src_to_build)
|
||
|
meson_build_dir_for_buildtype = build_dir_tail[:-2] + coredata.get_genvs_default_buildtype_list()[0] # Get the first buildtype suffixed dir (i.e. '[builddir]_debug') from '[builddir]_vs'
|
||
|
# Relative path from this .vcxproj to the directory containing the set of '..._[debug/debugoptimized/release]' setup meson build dirs.
|
||
|
proj_to_build_root = self.target_to_build_root(target)
|
||
|
proj_to_multiconfigured_builds_parent_dir = os.path.join(proj_to_build_root, '..')
|
||
|
proj_to_build_dir_for_buildtype = str(os.path.join(proj_to_multiconfigured_builds_parent_dir, meson_build_dir_for_buildtype))
|
||
|
relocate_to_concrete_builddir_target = os.path.normpath(os.path.join(proj_to_build_dir_for_buildtype, self.get_target_dir(target)))
|
||
|
for idx, file_path in enumerate(gen_files):
|
||
|
gen_files[idx] = os.path.normpath(os.path.join(relocate_to_concrete_builddir_target, file_path))
|
||
|
|
||
|
# Returns bool indicating whether the .vcxproj has been generated.
|
||
|
# Under some circumstances, it's unnecessary to create some .vcxprojs, so, when generating the .sln,
|
||
|
# we need to respect that not all targets will have generated a project.
|
||
|
def gen_vcxproj(self, target: build.BuildTarget, ofname: str, guid: str, vslite_ctx: dict = None) -> bool:
|
||
|
mlog.debug(f'Generating vcxproj {target.name}.')
|
||
|
subsystem = 'Windows'
|
||
|
self.handled_target_deps[target.get_id()] = []
|
||
|
|
||
|
if self.gen_lite:
|
||
|
if not isinstance(target, build.BuildTarget):
|
||
|
# Since we're going to delegate all building to the one true meson build command, we don't need
|
||
|
# to generate .vcxprojs for targets that don't add any source files or just perform custom build
|
||
|
# commands. These are targets of types CustomTarget or RunTarget. So let's just skip generating
|
||
|
# these otherwise insubstantial non-BuildTarget targets.
|
||
|
return False
|
||
|
conftype = 'Makefile'
|
||
|
elif isinstance(target, build.Executable):
|
||
|
conftype = 'Application'
|
||
|
if target.gui_app is not None:
|
||
|
if not target.gui_app:
|
||
|
subsystem = 'Console'
|
||
|
else:
|
||
|
# If someone knows how to set the version properly,
|
||
|
# please send a patch.
|
||
|
subsystem = target.win_subsystem.split(',')[0]
|
||
|
elif isinstance(target, build.StaticLibrary):
|
||
|
conftype = 'StaticLibrary'
|
||
|
elif isinstance(target, build.SharedLibrary):
|
||
|
conftype = 'DynamicLibrary'
|
||
|
elif isinstance(target, build.CustomTarget):
|
||
|
self.gen_custom_target_vcxproj(target, ofname, guid)
|
||
|
return True
|
||
|
elif isinstance(target, build.RunTarget):
|
||
|
self.gen_run_target_vcxproj(target, ofname, guid)
|
||
|
return True
|
||
|
elif isinstance(target, build.CompileTarget):
|
||
|
self.gen_compile_target_vcxproj(target, ofname, guid)
|
||
|
return True
|
||
|
else:
|
||
|
raise MesonException(f'Unknown target type for {target.get_basename()}')
|
||
|
|
||
|
(sources, headers, objects, _languages) = self.split_sources(target.sources)
|
||
|
if target.is_unity:
|
||
|
sources = self.generate_unity_files(target, sources)
|
||
|
if target.for_machine is MachineChoice.BUILD:
|
||
|
platform = self.build_platform
|
||
|
else:
|
||
|
platform = self.platform
|
||
|
|
||
|
tfilename = os.path.splitext(target.get_filename())
|
||
|
|
||
|
(root, type_config) = self.create_basic_project(tfilename[0],
|
||
|
temp_dir=target.get_id(),
|
||
|
guid=guid,
|
||
|
conftype=conftype,
|
||
|
target_ext=tfilename[1],
|
||
|
target_platform=platform)
|
||
|
|
||
|
# vcxproj.filters file
|
||
|
root_filter = self.create_basic_project_filters()
|
||
|
|
||
|
generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands(
|
||
|
target, root)
|
||
|
(gen_src, gen_hdrs, gen_objs, _gen_langs) = self.split_sources(generated_files)
|
||
|
(custom_src, custom_hdrs, custom_objs, _custom_langs) = self.split_sources(custom_target_output_files)
|
||
|
gen_src += custom_src
|
||
|
gen_hdrs += custom_hdrs
|
||
|
|
||
|
compiler = self._get_cl_compiler(target)
|
||
|
build_args = Vs2010Backend.get_build_args(compiler, self.buildtype, self.optimization, self.debug, self.sanitize)
|
||
|
|
||
|
assert isinstance(target, (build.Executable, build.SharedLibrary, build.StaticLibrary, build.SharedModule)), 'for mypy'
|
||
|
# Prefix to use to access the build root from the vcxproj dir
|
||
|
proj_to_build_root = self.target_to_build_root(target)
|
||
|
# Prefix to use to access the source tree's root from the vcxproj dir
|
||
|
proj_to_src_root = os.path.join(proj_to_build_root, self.build_to_src)
|
||
|
# Prefix to use to access the source tree's subdir from the vcxproj dir
|
||
|
proj_to_src_dir = os.path.join(proj_to_src_root, self.get_target_dir(target))
|
||
|
|
||
|
(target_args, file_args), (target_defines, file_defines), (target_inc_dirs, file_inc_dirs) = self.get_args_defines_and_inc_dirs(
|
||
|
target, compiler, generated_files_include_dirs, proj_to_src_root, proj_to_src_dir, build_args)
|
||
|
|
||
|
if self.gen_lite:
|
||
|
assert vslite_ctx is not None
|
||
|
primary_src_lang = get_primary_source_lang(target.sources, custom_src)
|
||
|
self.add_gen_lite_makefile_vcxproj_elements(root, platform, tfilename[1], vslite_ctx, target, proj_to_build_root, primary_src_lang)
|
||
|
else:
|
||
|
self.add_non_makefile_vcxproj_elements(root, type_config, target, platform, subsystem, build_args, target_args, target_defines, target_inc_dirs, file_args)
|
||
|
|
||
|
meson_file_group = ET.SubElement(root, 'ItemGroup')
|
||
|
ET.SubElement(meson_file_group, 'None', Include=os.path.join(proj_to_src_dir, build_filename))
|
||
|
|
||
|
# Visual Studio can't load projects that present duplicated items. Filter them out
|
||
|
# by keeping track of already added paths.
|
||
|
def path_normalize_add(path, lis):
|
||
|
normalized = os.path.normcase(os.path.normpath(path))
|
||
|
if normalized not in lis:
|
||
|
lis.append(normalized)
|
||
|
return True
|
||
|
else:
|
||
|
return False
|
||
|
|
||
|
pch_sources = {}
|
||
|
if self.target_uses_pch(target):
|
||
|
for lang in ['c', 'cpp']:
|
||
|
pch = target.get_pch(lang)
|
||
|
if not pch:
|
||
|
continue
|
||
|
if compiler.id == 'msvc':
|
||
|
if len(pch) == 1:
|
||
|
# Auto generate PCH.
|
||
|
src = os.path.join(proj_to_build_root, self.create_msvc_pch_implementation(target, lang, pch[0]))
|
||
|
pch_header_dir = os.path.dirname(os.path.join(proj_to_src_dir, pch[0]))
|
||
|
else:
|
||
|
src = os.path.join(proj_to_src_dir, pch[1])
|
||
|
pch_header_dir = None
|
||
|
pch_sources[lang] = [pch[0], src, lang, pch_header_dir]
|
||
|
else:
|
||
|
# I don't know whether its relevant but let's handle other compilers
|
||
|
# used with a vs backend
|
||
|
pch_sources[lang] = [pch[0], None, lang, None]
|
||
|
|
||
|
list_filters_path = set()
|
||
|
|
||
|
previous_includes = []
|
||
|
if len(headers) + len(gen_hdrs) + len(target.extra_files) + len(pch_sources) > 0:
|
||
|
if self.gen_lite and gen_hdrs:
|
||
|
# Although we're constructing our .vcxproj under our '..._vs' directory, we want to reference generated files
|
||
|
# in our concrete build directories (e.g. '..._debug'), where generated files will exist after building.
|
||
|
self.relocate_generated_file_paths_to_concrete_build_dir(gen_hdrs, target)
|
||
|
|
||
|
# Filter information
|
||
|
filter_group_include = ET.SubElement(root_filter, 'ItemGroup')
|
||
|
|
||
|
inc_hdrs = ET.SubElement(root, 'ItemGroup')
|
||
|
for h in headers:
|
||
|
relpath = os.path.join(proj_to_build_root, h.rel_to_builddir(self.build_to_src))
|
||
|
if path_normalize_add(relpath, previous_includes):
|
||
|
self.add_filter_info(list_filters_path, filter_group_include, 'ClInclude', relpath, h.subdir)
|
||
|
ET.SubElement(inc_hdrs, 'CLInclude', Include=relpath)
|
||
|
for h in gen_hdrs:
|
||
|
if path_normalize_add(h, previous_includes):
|
||
|
self.add_filter_info(list_filters_path, filter_group_include, 'ClInclude', h)
|
||
|
ET.SubElement(inc_hdrs, 'CLInclude', Include=h)
|
||
|
for h in target.extra_files:
|
||
|
relpath = os.path.join(proj_to_build_root, h.rel_to_builddir(self.build_to_src))
|
||
|
if path_normalize_add(relpath, previous_includes):
|
||
|
self.add_filter_info(list_filters_path, filter_group_include, 'ClInclude', relpath, h.subdir)
|
||
|
ET.SubElement(inc_hdrs, 'CLInclude', Include=relpath)
|
||
|
for headers in pch_sources.values():
|
||
|
path = os.path.join(proj_to_src_dir, headers[0])
|
||
|
if path_normalize_add(path, previous_includes):
|
||
|
self.add_filter_info(list_filters_path, filter_group_include, 'ClInclude', path, 'pch')
|
||
|
ET.SubElement(inc_hdrs, 'CLInclude', Include=path)
|
||
|
|
||
|
previous_sources = []
|
||
|
if len(sources) + len(gen_src) + len(pch_sources) > 0:
|
||
|
if self.gen_lite:
|
||
|
# Get data to fill in intellisense fields for sources that can't reference the project-wide values
|
||
|
defs_paths_opts_per_lang_and_buildtype = get_non_primary_lang_intellisense_fields(
|
||
|
vslite_ctx,
|
||
|
target.get_id(),
|
||
|
primary_src_lang)
|
||
|
if gen_src:
|
||
|
# Although we're constructing our .vcxproj under our '..._vs' directory, we want to reference generated files
|
||
|
# in our concrete build directories (e.g. '..._debug'), where generated files will exist after building.
|
||
|
self.relocate_generated_file_paths_to_concrete_build_dir(gen_src, target)
|
||
|
|
||
|
# Filter information
|
||
|
filter_group_compile = ET.SubElement(root_filter, 'ItemGroup')
|
||
|
|
||
|
inc_src = ET.SubElement(root, 'ItemGroup')
|
||
|
for s in sources:
|
||
|
relpath = os.path.join(proj_to_build_root, s.rel_to_builddir(self.build_to_src))
|
||
|
if path_normalize_add(relpath, previous_sources):
|
||
|
self.add_filter_info(list_filters_path, filter_group_compile, 'CLCompile', relpath, s.subdir)
|
||
|
inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath)
|
||
|
if self.gen_lite:
|
||
|
self.add_project_nmake_defs_incs_and_opts(inc_cl, relpath, defs_paths_opts_per_lang_and_buildtype, platform)
|
||
|
else:
|
||
|
lang = Vs2010Backend.lang_from_source_file(s)
|
||
|
self.add_pch(pch_sources, lang, inc_cl)
|
||
|
self.add_additional_options(lang, inc_cl, file_args)
|
||
|
self.add_preprocessor_defines(lang, inc_cl, file_defines)
|
||
|
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
|
||
|
ET.SubElement(inc_cl, 'ObjectFileName').text = "$(IntDir)" + \
|
||
|
self.object_filename_from_source(target, s)
|
||
|
for s in gen_src:
|
||
|
if path_normalize_add(s, previous_sources):
|
||
|
self.add_filter_info(list_filters_path, filter_group_compile, 'CLCompile', s)
|
||
|
inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=s)
|
||
|
if self.gen_lite:
|
||
|
self.add_project_nmake_defs_incs_and_opts(inc_cl, s, defs_paths_opts_per_lang_and_buildtype, platform)
|
||
|
else:
|
||
|
lang = Vs2010Backend.lang_from_source_file(s)
|
||
|
self.add_pch(pch_sources, lang, inc_cl)
|
||
|
self.add_additional_options(lang, inc_cl, file_args)
|
||
|
self.add_preprocessor_defines(lang, inc_cl, file_defines)
|
||
|
self.add_include_dirs(lang, inc_cl, file_inc_dirs)
|
||
|
s = File.from_built_file(target.get_subdir(), s)
|
||
|
ET.SubElement(inc_cl, 'ObjectFileName').text = "$(IntDir)" + \
|
||
|
self.object_filename_from_source(target, s)
|
||
|
for lang, headers in pch_sources.items():
|
||
|
impl = headers[1]
|
||
|
if impl and path_normalize_add(impl, previous_sources):
|
||
|
self.add_filter_info(list_filters_path, filter_group_compile, 'CLCompile', impl, 'pch')
|
||
|
inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=impl)
|
||
|
self.create_pch(pch_sources, lang, inc_cl)
|
||
|
if self.gen_lite:
|
||
|
self.add_project_nmake_defs_incs_and_opts(inc_cl, impl, defs_paths_opts_per_lang_and_buildtype, platform)
|
||
|
else:
|
||
|
self.add_additional_options(lang, inc_cl, file_args)
|
||
|
self.add_preprocessor_defines(lang, inc_cl, file_defines)
|
||
|
pch_header_dir = pch_sources[lang][3]
|
||
|
if pch_header_dir:
|
||
|
inc_dirs = copy.deepcopy(file_inc_dirs)
|
||
|
inc_dirs[lang] = [pch_header_dir] + inc_dirs[lang]
|
||
|
else:
|
||
|
inc_dirs = file_inc_dirs
|
||
|
self.add_include_dirs(lang, inc_cl, inc_dirs)
|
||
|
# XXX: Do we need to set the object file name here too?
|
||
|
|
||
|
# Filter information
|
||
|
filter_group = ET.SubElement(root_filter, 'ItemGroup')
|
||
|
for filter_dir in list_filters_path:
|
||
|
filter = ET.SubElement(filter_group, 'Filter', Include=filter_dir)
|
||
|
ET.SubElement(filter, 'UniqueIdentifier').text = '{' + str(uuid.uuid4()) + '}'
|
||
|
|
||
|
additional_objects = []
|
||
|
for o in self.flatten_object_list(target, proj_to_build_root)[0]:
|
||
|
assert isinstance(o, str)
|
||
|
additional_objects.append(o)
|
||
|
for o in custom_objs:
|
||
|
additional_objects.append(o)
|
||
|
|
||
|
previous_objects = []
|
||
|
if self.has_objects(objects, additional_objects, gen_objs):
|
||
|
inc_objs = ET.SubElement(root, 'ItemGroup')
|
||
|
for s in objects:
|
||
|
relpath = os.path.join(proj_to_build_root, s.rel_to_builddir(self.build_to_src))
|
||
|
if path_normalize_add(relpath, previous_objects):
|
||
|
ET.SubElement(inc_objs, 'Object', Include=relpath)
|
||
|
for s in additional_objects:
|
||
|
if path_normalize_add(s, previous_objects):
|
||
|
ET.SubElement(inc_objs, 'Object', Include=s)
|
||
|
self.add_generated_objects(inc_objs, gen_objs)
|
||
|
|
||
|
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
|
||
|
self.add_regen_dependency(root)
|
||
|
if not self.gen_lite:
|
||
|
# Injecting further target dependencies into this vcxproj implies and forces a Visual Studio BUILD dependency,
|
||
|
# which we don't want when using 'genvslite'. A gen_lite build as little involvement with the visual studio's
|
||
|
# build system as possible.
|
||
|
self.add_target_deps(root, target)
|
||
|
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
|
||
|
self._prettyprint_vcxproj_xml(ET.ElementTree(root_filter), ofname + '.filters')
|
||
|
return True
|
||
|
|
||
|
def gen_regenproj(self):
|
||
|
# To fully adapt the REGEN work for a 'genvslite' solution, to check timestamps, settings, and regenerate the
|
||
|
# '[builddir]_vs' solution/vcxprojs, as well as regenerating the accompanying buildtype-suffixed ninja build
|
||
|
# directories (from which we need to first collect correct, updated preprocessor defs and compiler options in
|
||
|
# order to fill in the regenerated solution's intellisense settings) would require some non-trivial intrusion
|
||
|
# into the 'meson --internal regencheck ./meson-private' execution path (and perhaps also the '--internal
|
||
|
# regenerate' and even 'meson setup --reconfigure' code). So, for now, we'll instead give the user a simpler
|
||
|
# 'reconfigure' utility project that just runs 'meson setup --reconfigure [builddir]_[buildtype] [srcdir]' on
|
||
|
# each of the ninja build dirs.
|
||
|
#
|
||
|
# FIXME: That will keep the building and compiling correctly configured but obviously won't update the
|
||
|
# solution and vcxprojs, which may allow solution src files and intellisense options to go out-of-date; the
|
||
|
# user would still have to manually 'meson setup --genvslite [vsxxxx] [builddir] [srcdir]' to fully regenerate
|
||
|
# a complete and correct solution.
|
||
|
if self.gen_lite:
|
||
|
project_name = 'RECONFIGURE'
|
||
|
ofname = os.path.join(self.environment.get_build_dir(), 'RECONFIGURE.vcxproj')
|
||
|
conftype = 'Makefile'
|
||
|
# I find the REGEN project doesn't work; it fails to invoke the appropriate -
|
||
|
# python meson.py --internal regencheck builddir\meson-private
|
||
|
# command, despite the fact that manually running such a command in a shell runs just fine.
|
||
|
# Running/building the regen project produces the error -
|
||
|
# ...Microsoft.CppBuild.targets(460,5): error MSB8020: The build tools for ClangCL (Platform Toolset = 'ClangCL') cannot be found. To build using the ClangCL build tools, please install ...
|
||
|
# Not sure why but a simple makefile-style project that executes the full '...regencheck...' command actually works (and seems a little simpler).
|
||
|
# Although I've limited this change to only happen under '--genvslite', perhaps ...
|
||
|
# FIXME : Should all utility projects use the simpler and less problematic makefile-style project?
|
||
|
else:
|
||
|
project_name = 'REGEN'
|
||
|
ofname = os.path.join(self.environment.get_build_dir(), 'REGEN.vcxproj')
|
||
|
conftype = 'Utility'
|
||
|
|
||
|
guid = self.environment.coredata.regen_guid
|
||
|
(root, type_config) = self.create_basic_project(project_name,
|
||
|
temp_dir='regen-temp',
|
||
|
guid=guid,
|
||
|
conftype=conftype
|
||
|
)
|
||
|
|
||
|
if self.gen_lite:
|
||
|
(nmake_base_meson_command, exe_search_paths) = Vs2010Backend.get_nmake_base_meson_command_and_exe_search_paths()
|
||
|
all_configs_prop_group = ET.SubElement(root, 'PropertyGroup')
|
||
|
|
||
|
# Multi-line command to reconfigure all buildtype-suffixed build dirs
|
||
|
multi_config_buildtype_list = coredata.get_genvs_default_buildtype_list()
|
||
|
(_, build_dir_tail) = os.path.split(self.src_to_build)
|
||
|
proj_to_multiconfigured_builds_parent_dir = '..' # We know this RECONFIGURE.vcxproj will always be in the '[buildir]_vs' dir.
|
||
|
proj_to_src_dir = self.build_to_src
|
||
|
reconfigure_all_cmd = ''
|
||
|
for buildtype in multi_config_buildtype_list:
|
||
|
meson_build_dir_for_buildtype = build_dir_tail[:-2] + buildtype # Get the buildtype suffixed 'builddir_[debug/release/etc]' from 'builddir_vs', for example.
|
||
|
proj_to_build_dir_for_buildtype = str(os.path.join(proj_to_multiconfigured_builds_parent_dir, meson_build_dir_for_buildtype))
|
||
|
reconfigure_all_cmd += f'{nmake_base_meson_command} setup --reconfigure "{proj_to_build_dir_for_buildtype}" "{proj_to_src_dir}"\n'
|
||
|
ET.SubElement(all_configs_prop_group, 'NMakeBuildCommandLine').text = reconfigure_all_cmd
|
||
|
ET.SubElement(all_configs_prop_group, 'NMakeReBuildCommandLine').text = reconfigure_all_cmd
|
||
|
ET.SubElement(all_configs_prop_group, 'NMakeCleanCommandLine').text = ''
|
||
|
|
||
|
#Need to set the 'ExecutablePath' element for the above NMake... commands to be able to execute
|
||
|
ET.SubElement(all_configs_prop_group, 'ExecutablePath').text = exe_search_paths
|
||
|
else:
|
||
|
action = ET.SubElement(root, 'ItemDefinitionGroup')
|
||
|
midl = ET.SubElement(action, 'Midl')
|
||
|
ET.SubElement(midl, "AdditionalIncludeDirectories").text = '%(AdditionalIncludeDirectories)'
|
||
|
ET.SubElement(midl, "OutputDirectory").text = '$(IntDir)'
|
||
|
ET.SubElement(midl, 'HeaderFileName').text = '%(Filename).h'
|
||
|
ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb'
|
||
|
ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c'
|
||
|
ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c'
|
||
|
regen_command = self.environment.get_build_command() + ['--internal', 'regencheck']
|
||
|
cmd_templ = '''call %s > NUL
|
||
|
"%s" "%s"'''
|
||
|
regen_command = cmd_templ % \
|
||
|
(self.get_vcvars_command(), '" "'.join(regen_command), self.environment.get_scratch_dir())
|
||
|
self.add_custom_build(root, 'regen', regen_command, deps=self.get_regen_filelist(),
|
||
|
outputs=[Vs2010Backend.get_regen_stampfile(self.environment.get_build_dir())],
|
||
|
msg='Checking whether solution needs to be regenerated.')
|
||
|
|
||
|
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
|
||
|
ET.SubElement(root, 'ImportGroup', Label='ExtensionTargets')
|
||
|
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
|
||
|
|
||
|
def gen_testproj(self):
|
||
|
project_name = 'RUN_TESTS'
|
||
|
ofname = os.path.join(self.environment.get_build_dir(), f'{project_name}.vcxproj')
|
||
|
guid = self.environment.coredata.test_guid
|
||
|
if self.gen_lite:
|
||
|
(root, type_config) = self.create_basic_project(project_name,
|
||
|
temp_dir='install-temp',
|
||
|
guid=guid,
|
||
|
conftype='Makefile'
|
||
|
)
|
||
|
(nmake_base_meson_command, exe_search_paths) = Vs2010Backend.get_nmake_base_meson_command_and_exe_search_paths()
|
||
|
multi_config_buildtype_list = coredata.get_genvs_default_buildtype_list()
|
||
|
(_, build_dir_tail) = os.path.split(self.src_to_build)
|
||
|
proj_to_multiconfigured_builds_parent_dir = '..' # We know this .vcxproj will always be in the '[buildir]_vs' dir.
|
||
|
# Add appropriate 'test' commands for the 'build' action of this project, for all buildtypes
|
||
|
for buildtype in multi_config_buildtype_list:
|
||
|
meson_build_dir_for_buildtype = build_dir_tail[:-2] + buildtype # Get the buildtype suffixed 'builddir_[debug/release/etc]' from 'builddir_vs', for example.
|
||
|
proj_to_build_dir_for_buildtype = str(os.path.join(proj_to_multiconfigured_builds_parent_dir, meson_build_dir_for_buildtype))
|
||
|
test_cmd = f'{nmake_base_meson_command} test -C "{proj_to_build_dir_for_buildtype}" --no-rebuild'
|
||
|
if not self.environment.coredata.get_option(OptionKey('stdsplit')):
|
||
|
test_cmd += ' --no-stdsplit'
|
||
|
if self.environment.coredata.get_option(OptionKey('errorlogs')):
|
||
|
test_cmd += ' --print-errorlogs'
|
||
|
condition = f'\'$(Configuration)|$(Platform)\'==\'{buildtype}|{self.platform}\''
|
||
|
prop_group = ET.SubElement(root, 'PropertyGroup', Condition=condition)
|
||
|
ET.SubElement(prop_group, 'NMakeBuildCommandLine').text = test_cmd
|
||
|
#Need to set the 'ExecutablePath' element for the NMake... commands to be able to execute
|
||
|
ET.SubElement(prop_group, 'ExecutablePath').text = exe_search_paths
|
||
|
else:
|
||
|
(root, type_config) = self.create_basic_project(project_name,
|
||
|
temp_dir='test-temp',
|
||
|
guid=guid)
|
||
|
|
||
|
action = ET.SubElement(root, 'ItemDefinitionGroup')
|
||
|
midl = ET.SubElement(action, 'Midl')
|
||
|
ET.SubElement(midl, "AdditionalIncludeDirectories").text = '%(AdditionalIncludeDirectories)'
|
||
|
ET.SubElement(midl, "OutputDirectory").text = '$(IntDir)'
|
||
|
ET.SubElement(midl, 'HeaderFileName').text = '%(Filename).h'
|
||
|
ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb'
|
||
|
ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c'
|
||
|
ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c'
|
||
|
# FIXME: No benchmarks?
|
||
|
test_command = self.environment.get_build_command() + ['test', '--no-rebuild']
|
||
|
if not self.environment.coredata.get_option(OptionKey('stdsplit')):
|
||
|
test_command += ['--no-stdsplit']
|
||
|
if self.environment.coredata.get_option(OptionKey('errorlogs')):
|
||
|
test_command += ['--print-errorlogs']
|
||
|
self.serialize_tests()
|
||
|
self.add_custom_build(root, 'run_tests', '"%s"' % ('" "'.join(test_command)))
|
||
|
|
||
|
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
|
||
|
self.add_regen_dependency(root)
|
||
|
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
|
||
|
|
||
|
def gen_installproj(self):
|
||
|
project_name = 'RUN_INSTALL'
|
||
|
ofname = os.path.join(self.environment.get_build_dir(), f'{project_name}.vcxproj')
|
||
|
guid = self.environment.coredata.install_guid
|
||
|
if self.gen_lite:
|
||
|
(root, type_config) = self.create_basic_project(project_name,
|
||
|
temp_dir='install-temp',
|
||
|
guid=guid,
|
||
|
conftype='Makefile'
|
||
|
)
|
||
|
(nmake_base_meson_command, exe_search_paths) = Vs2010Backend.get_nmake_base_meson_command_and_exe_search_paths()
|
||
|
multi_config_buildtype_list = coredata.get_genvs_default_buildtype_list()
|
||
|
(_, build_dir_tail) = os.path.split(self.src_to_build)
|
||
|
proj_to_multiconfigured_builds_parent_dir = '..' # We know this .vcxproj will always be in the '[buildir]_vs' dir.
|
||
|
# Add appropriate 'install' commands for the 'build' action of this project, for all buildtypes
|
||
|
for buildtype in multi_config_buildtype_list:
|
||
|
meson_build_dir_for_buildtype = build_dir_tail[:-2] + buildtype # Get the buildtype suffixed 'builddir_[debug/release/etc]' from 'builddir_vs', for example.
|
||
|
proj_to_build_dir_for_buildtype = str(os.path.join(proj_to_multiconfigured_builds_parent_dir, meson_build_dir_for_buildtype))
|
||
|
install_cmd = f'{nmake_base_meson_command} install -C "{proj_to_build_dir_for_buildtype}" --no-rebuild'
|
||
|
condition = f'\'$(Configuration)|$(Platform)\'==\'{buildtype}|{self.platform}\''
|
||
|
prop_group = ET.SubElement(root, 'PropertyGroup', Condition=condition)
|
||
|
ET.SubElement(prop_group, 'NMakeBuildCommandLine').text = install_cmd
|
||
|
#Need to set the 'ExecutablePath' element for the NMake... commands to be able to execute
|
||
|
ET.SubElement(prop_group, 'ExecutablePath').text = exe_search_paths
|
||
|
else:
|
||
|
self.create_install_data_files()
|
||
|
|
||
|
(root, type_config) = self.create_basic_project(project_name,
|
||
|
temp_dir='install-temp',
|
||
|
guid=guid)
|
||
|
|
||
|
action = ET.SubElement(root, 'ItemDefinitionGroup')
|
||
|
midl = ET.SubElement(action, 'Midl')
|
||
|
ET.SubElement(midl, "AdditionalIncludeDirectories").text = '%(AdditionalIncludeDirectories)'
|
||
|
ET.SubElement(midl, "OutputDirectory").text = '$(IntDir)'
|
||
|
ET.SubElement(midl, 'HeaderFileName').text = '%(Filename).h'
|
||
|
ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb'
|
||
|
ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c'
|
||
|
ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c'
|
||
|
install_command = self.environment.get_build_command() + ['install', '--no-rebuild']
|
||
|
self.add_custom_build(root, 'run_install', '"%s"' % ('" "'.join(install_command)))
|
||
|
|
||
|
ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets')
|
||
|
self.add_regen_dependency(root)
|
||
|
self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname)
|
||
|
|
||
|
def add_custom_build(self, node: ET.Element, rulename: str, command: str, deps: T.Optional[T.List[str]] = None,
|
||
|
outputs: T.Optional[T.List[str]] = None, msg: T.Optional[str] = None, verify_files: bool = True) -> None:
|
||
|
igroup = ET.SubElement(node, 'ItemGroup')
|
||
|
rulefile = os.path.join(self.environment.get_scratch_dir(), rulename + '.rule')
|
||
|
if not os.path.exists(rulefile):
|
||
|
with open(rulefile, 'w', encoding='utf-8') as f:
|
||
|
f.write("# Meson regen file.")
|
||
|
custombuild = ET.SubElement(igroup, 'CustomBuild', Include=rulefile)
|
||
|
if msg:
|
||
|
message = ET.SubElement(custombuild, 'Message')
|
||
|
message.text = msg
|
||
|
if not verify_files:
|
||
|
ET.SubElement(custombuild, 'VerifyInputsAndOutputsExist').text = 'false'
|
||
|
|
||
|
# If a command ever were to change the current directory or set local
|
||
|
# variables this would need to be more complicated, as msbuild by
|
||
|
# default executes all CustomBuilds in a project using the same
|
||
|
# shell. Right now such tasks are all done inside the meson_exe
|
||
|
# wrapper. The trailing newline appears to be necessary to allow
|
||
|
# parallel custom builds to work.
|
||
|
ET.SubElement(custombuild, 'Command').text = f"{command}\n"
|
||
|
|
||
|
if not outputs:
|
||
|
# Use a nonexistent file to always consider the target out-of-date.
|
||
|
outputs = [self.nonexistent_file(os.path.join(self.environment.get_scratch_dir(),
|
||
|
'outofdate.file'))]
|
||
|
ET.SubElement(custombuild, 'Outputs').text = ';'.join(outputs)
|
||
|
if deps:
|
||
|
ET.SubElement(custombuild, 'AdditionalInputs').text = ';'.join(deps)
|
||
|
|
||
|
@staticmethod
|
||
|
def nonexistent_file(prefix: str) -> str:
|
||
|
i = 0
|
||
|
file = prefix
|
||
|
while os.path.exists(file):
|
||
|
file = '%s%d' % (prefix, i)
|
||
|
return file
|
||
|
|
||
|
def generate_debug_information(self, link: ET.Element) -> None:
|
||
|
# valid values for vs2015 is 'false', 'true', 'DebugFastLink'
|
||
|
ET.SubElement(link, 'GenerateDebugInformation').text = 'true'
|
||
|
|
||
|
def add_regen_dependency(self, root: ET.Element) -> None:
|
||
|
# For now, with 'genvslite' solutions, REGEN is replaced by the lighter-weight RECONFIGURE utility that is
|
||
|
# no longer a forced build dependency. See comment in 'gen_regenproj'
|
||
|
if not self.gen_lite:
|
||
|
regen_vcxproj = os.path.join(self.environment.get_build_dir(), 'REGEN.vcxproj')
|
||
|
self.add_project_reference(root, regen_vcxproj, self.environment.coredata.regen_guid)
|
||
|
|
||
|
def generate_lang_standard_info(self, file_args: T.Dict[str, CompilerArgs], clconf: ET.Element) -> None:
|
||
|
pass
|