arch-power: Fix memory leaks from address translation
[gem5.git] / SConstruct
1 # -*- mode:python -*-
2
3 # Copyright (c) 2013, 2015-2017 ARM Limited
4 # All rights reserved.
5 #
6 # The license below extends only to copyright in the software and shall
7 # not be construed as granting a license to any other intellectual
8 # property including but not limited to intellectual property relating
9 # to a hardware implementation of the functionality of the software
10 # licensed hereunder. You may use the software subject to the license
11 # terms below provided that you ensure that this notice is replicated
12 # unmodified and in its entirety in all distributions of the software,
13 # modified or unmodified, in source code or in binary form.
14 #
15 # Copyright (c) 2011 Advanced Micro Devices, Inc.
16 # Copyright (c) 2009 The Hewlett-Packard Development Company
17 # Copyright (c) 2004-2005 The Regents of The University of Michigan
18 # All rights reserved.
19 #
20 # Redistribution and use in source and binary forms, with or without
21 # modification, are permitted provided that the following conditions are
22 # met: redistributions of source code must retain the above copyright
23 # notice, this list of conditions and the following disclaimer;
24 # redistributions in binary form must reproduce the above copyright
25 # notice, this list of conditions and the following disclaimer in the
26 # documentation and/or other materials provided with the distribution;
27 # neither the name of the copyright holders nor the names of its
28 # contributors may be used to endorse or promote products derived from
29 # this software without specific prior written permission.
30 #
31 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42 #
43 # Authors: Steve Reinhardt
44 # Nathan Binkert
45
46 ###################################################
47 #
48 # SCons top-level build description (SConstruct) file.
49 #
50 # While in this directory ('gem5'), just type 'scons' to build the default
51 # configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52 # to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53 # the optimized full-system version).
54 #
55 # You can build gem5 in a different directory as long as there is a
56 # 'build/<CONFIG>' somewhere along the target path. The build system
57 # expects that all configs under the same build directory are being
58 # built for the same host system.
59 #
60 # Examples:
61 #
62 # The following two commands are equivalent. The '-u' option tells
63 # scons to search up the directory tree for this SConstruct file.
64 # % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65 # % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66 #
67 # The following two commands are equivalent and demonstrate building
68 # in a directory outside of the source tree. The '-C' option tells
69 # scons to chdir to the specified directory to find this SConstruct
70 # file.
71 # % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72 # % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73 #
74 # You can use 'scons -H' to print scons options. If you're in this
75 # 'gem5' directory (or use -u or -C to tell scons where to find this
76 # file), you can use 'scons -h' to print all the gem5-specific build
77 # options as well.
78 #
79 ###################################################
80
81 from __future__ import print_function
82
83 # Global Python includes
84 import itertools
85 import os
86 import re
87 import shutil
88 import subprocess
89 import sys
90
91 from os import mkdir, environ
92 from os.path import abspath, basename, dirname, expanduser, normpath
93 from os.path import exists, isdir, isfile
94 from os.path import join as joinpath, split as splitpath
95 from re import match
96
97 # SCons includes
98 import SCons
99 import SCons.Node
100
101 from m5.util import compareVersions, readCommand
102
103 help_texts = {
104 "options" : "",
105 "global_vars" : "",
106 "local_vars" : ""
107 }
108
109 Export("help_texts")
110
111
112 # There's a bug in scons in that (1) by default, the help texts from
113 # AddOption() are supposed to be displayed when you type 'scons -h'
114 # and (2) you can override the help displayed by 'scons -h' using the
115 # Help() function, but these two features are incompatible: once
116 # you've overridden the help text using Help(), there's no way to get
117 # at the help texts from AddOptions. See:
118 # http://scons.tigris.org/issues/show_bug.cgi?id=2356
119 # http://scons.tigris.org/issues/show_bug.cgi?id=2611
120 # This hack lets us extract the help text from AddOptions and
121 # re-inject it via Help(). Ideally someday this bug will be fixed and
122 # we can just use AddOption directly.
123 def AddLocalOption(*args, **kwargs):
124 col_width = 30
125
126 help = " " + ", ".join(args)
127 if "help" in kwargs:
128 length = len(help)
129 if length >= col_width:
130 help += "\n" + " " * col_width
131 else:
132 help += " " * (col_width - length)
133 help += kwargs["help"]
134 help_texts["options"] += help + "\n"
135
136 AddOption(*args, **kwargs)
137
138 AddLocalOption('--colors', dest='use_colors', action='store_true',
139 help="Add color to abbreviated scons output")
140 AddLocalOption('--no-colors', dest='use_colors', action='store_false',
141 help="Don't add color to abbreviated scons output")
142 AddLocalOption('--with-cxx-config', dest='with_cxx_config',
143 action='store_true',
144 help="Build with support for C++-based configuration")
145 AddLocalOption('--default', dest='default', type='string', action='store',
146 help='Override which build_opts file to use for defaults')
147 AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
148 help='Disable style checking hooks')
149 AddLocalOption('--no-lto', dest='no_lto', action='store_true',
150 help='Disable Link-Time Optimization for fast')
151 AddLocalOption('--force-lto', dest='force_lto', action='store_true',
152 help='Use Link-Time Optimization instead of partial linking' +
153 ' when the compiler doesn\'t support using them together.')
154 AddLocalOption('--update-ref', dest='update_ref', action='store_true',
155 help='Update test reference outputs')
156 AddLocalOption('--verbose', dest='verbose', action='store_true',
157 help='Print full tool command lines')
158 AddLocalOption('--without-python', dest='without_python',
159 action='store_true',
160 help='Build without Python configuration support')
161 AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
162 action='store_true',
163 help='Disable linking against tcmalloc')
164 AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
165 help='Build with Undefined Behavior Sanitizer if available')
166 AddLocalOption('--with-asan', dest='with_asan', action='store_true',
167 help='Build with Address Sanitizer if available')
168
169 if GetOption('no_lto') and GetOption('force_lto'):
170 print('--no-lto and --force-lto are mutually exclusive')
171 Exit(1)
172
173 ########################################################################
174 #
175 # Set up the main build environment.
176 #
177 ########################################################################
178
179 main = Environment()
180
181 from gem5_scons import Transform
182 from gem5_scons.util import get_termcap
183 termcap = get_termcap()
184
185 main_dict_keys = main.Dictionary().keys()
186
187 # Check that we have a C/C++ compiler
188 if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
189 print("No C++ compiler installed (package g++ on Ubuntu and RedHat)")
190 Exit(1)
191
192 ###################################################
193 #
194 # Figure out which configurations to set up based on the path(s) of
195 # the target(s).
196 #
197 ###################################################
198
199 # Find default configuration & binary.
200 Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
201
202 # helper function: find last occurrence of element in list
203 def rfind(l, elt, offs = -1):
204 for i in range(len(l)+offs, 0, -1):
205 if l[i] == elt:
206 return i
207 raise ValueError, "element not found"
208
209 # Take a list of paths (or SCons Nodes) and return a list with all
210 # paths made absolute and ~-expanded. Paths will be interpreted
211 # relative to the launch directory unless a different root is provided
212 def makePathListAbsolute(path_list, root=GetLaunchDir()):
213 return [abspath(joinpath(root, expanduser(str(p))))
214 for p in path_list]
215
216 # Each target must have 'build' in the interior of the path; the
217 # directory below this will determine the build parameters. For
218 # example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
219 # recognize that ALPHA_SE specifies the configuration because it
220 # follow 'build' in the build path.
221
222 # The funky assignment to "[:]" is needed to replace the list contents
223 # in place rather than reassign the symbol to a new list, which
224 # doesn't work (obviously!).
225 BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
226
227 # Generate a list of the unique build roots and configs that the
228 # collected targets reference.
229 variant_paths = []
230 build_root = None
231 for t in BUILD_TARGETS:
232 path_dirs = t.split('/')
233 try:
234 build_top = rfind(path_dirs, 'build', -2)
235 except:
236 print("Error: no non-leaf 'build' dir found on target path", t)
237 Exit(1)
238 this_build_root = joinpath('/',*path_dirs[:build_top+1])
239 if not build_root:
240 build_root = this_build_root
241 else:
242 if this_build_root != build_root:
243 print("Error: build targets not under same build root\n"
244 " %s\n %s" % (build_root, this_build_root))
245 Exit(1)
246 variant_path = joinpath('/',*path_dirs[:build_top+2])
247 if variant_path not in variant_paths:
248 variant_paths.append(variant_path)
249
250 # Make sure build_root exists (might not if this is the first build there)
251 if not isdir(build_root):
252 mkdir(build_root)
253 main['BUILDROOT'] = build_root
254
255 Export('main')
256
257 main.SConsignFile(joinpath(build_root, "sconsign"))
258
259 # Default duplicate option is to use hard links, but this messes up
260 # when you use emacs to edit a file in the target dir, as emacs moves
261 # file to file~ then copies to file, breaking the link. Symbolic
262 # (soft) links work better.
263 main.SetOption('duplicate', 'soft-copy')
264
265 #
266 # Set up global sticky variables... these are common to an entire build
267 # tree (not specific to a particular build like ALPHA_SE)
268 #
269
270 global_vars_file = joinpath(build_root, 'variables.global')
271
272 global_vars = Variables(global_vars_file, args=ARGUMENTS)
273
274 global_vars.AddVariables(
275 ('CC', 'C compiler', environ.get('CC', main['CC'])),
276 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
277 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
278 ('BATCH', 'Use batch pool for build and tests', False),
279 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
280 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
281 ('EXTRAS', 'Add extra directories to the compilation', '')
282 )
283
284 # Update main environment with values from ARGUMENTS & global_vars_file
285 global_vars.Update(main)
286 help_texts["global_vars"] += global_vars.GenerateHelpText(main)
287
288 # Save sticky variable settings back to current variables file
289 global_vars.Save(global_vars_file, main)
290
291 # Parse EXTRAS variable to build list of all directories where we're
292 # look for sources etc. This list is exported as extras_dir_list.
293 base_dir = main.srcdir.abspath
294 if main['EXTRAS']:
295 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
296 else:
297 extras_dir_list = []
298
299 Export('base_dir')
300 Export('extras_dir_list')
301
302 # the ext directory should be on the #includes path
303 main.Append(CPPPATH=[Dir('ext')])
304
305 # Add shared top-level headers
306 main.Prepend(CPPPATH=Dir('include'))
307
308 if GetOption('verbose'):
309 def MakeAction(action, string, *args, **kwargs):
310 return Action(action, *args, **kwargs)
311 else:
312 MakeAction = Action
313 main['CCCOMSTR'] = Transform("CC")
314 main['CXXCOMSTR'] = Transform("CXX")
315 main['ASCOMSTR'] = Transform("AS")
316 main['ARCOMSTR'] = Transform("AR", 0)
317 main['LINKCOMSTR'] = Transform("LINK", 0)
318 main['SHLINKCOMSTR'] = Transform("SHLINK", 0)
319 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
320 main['M4COMSTR'] = Transform("M4")
321 main['SHCCCOMSTR'] = Transform("SHCC")
322 main['SHCXXCOMSTR'] = Transform("SHCXX")
323 Export('MakeAction')
324
325 # Initialize the Link-Time Optimization (LTO) flags
326 main['LTO_CCFLAGS'] = []
327 main['LTO_LDFLAGS'] = []
328
329 # According to the readme, tcmalloc works best if the compiler doesn't
330 # assume that we're using the builtin malloc and friends. These flags
331 # are compiler-specific, so we need to set them after we detect which
332 # compiler we're using.
333 main['TCMALLOC_CCFLAGS'] = []
334
335 CXX_version = readCommand([main['CXX'],'--version'], exception=False)
336 CXX_V = readCommand([main['CXX'],'-V'], exception=False)
337
338 main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
339 main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
340 if main['GCC'] + main['CLANG'] > 1:
341 print('Error: How can we have two at the same time?')
342 Exit(1)
343
344 # Set up default C++ compiler flags
345 if main['GCC'] or main['CLANG']:
346 # As gcc and clang share many flags, do the common parts here
347 main.Append(CCFLAGS=['-pipe'])
348 main.Append(CCFLAGS=['-fno-strict-aliasing'])
349 # Enable -Wall and -Wextra and then disable the few warnings that
350 # we consistently violate
351 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
352 '-Wno-sign-compare', '-Wno-unused-parameter'])
353 # We always compile using C++11
354 main.Append(CXXFLAGS=['-std=c++11'])
355 if sys.platform.startswith('freebsd'):
356 main.Append(CCFLAGS=['-I/usr/local/include'])
357 main.Append(CXXFLAGS=['-I/usr/local/include'])
358
359 main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '')
360 main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}')
361 main['PLINKFLAGS'] = main.subst('${LINKFLAGS}')
362 shared_partial_flags = ['-r', '-nostdlib']
363 main.Append(PSHLINKFLAGS=shared_partial_flags)
364 main.Append(PLINKFLAGS=shared_partial_flags)
365
366 # Treat warnings as errors but white list some warnings that we
367 # want to allow (e.g., deprecation warnings).
368 main.Append(CCFLAGS=['-Werror',
369 '-Wno-error=deprecated-declarations',
370 '-Wno-error=deprecated',
371 '-Wno-ignored-qualifiers',
372 '-Wno-cast-function-type',
373 '-Wno-error=class-memaccess',
374 '-Wno-error=catch-value',
375 '-Wno-error=deprecated-copy',
376 '-Wno-error=address-of-packed-member',
377 '-Wno-error=array-bounds',
378 ])
379 else:
380 print(termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, end=' ')
381 print("Don't know what compiler options to use for your compiler.")
382 print(termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'])
383 print(termcap.Yellow + ' version:' + termcap.Normal, end = ' ')
384 if not CXX_version:
385 print(termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +
386 termcap.Normal)
387 else:
388 print(CXX_version.replace('\n', '<nl>'))
389 print(" If you're trying to use a compiler other than GCC")
390 print(" or clang, there appears to be something wrong with your")
391 print(" environment.")
392 print(" ")
393 print(" If you are trying to use a compiler other than those listed")
394 print(" above you will need to ease fix SConstruct and ")
395 print(" src/SConscript to support that compiler.")
396 Exit(1)
397
398 if main['GCC']:
399 # Check for a supported version of gcc. >= 4.8 is chosen for its
400 # level of c++11 support. See
401 # http://gcc.gnu.org/projects/cxx0x.html for details.
402 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
403 if compareVersions(gcc_version, "4.8") < 0:
404 print('Error: gcc version 4.8 or newer required.')
405 print(' Installed version: ', gcc_version)
406 Exit(1)
407
408 main['GCC_VERSION'] = gcc_version
409
410 if compareVersions(gcc_version, '4.9') >= 0:
411 # Incremental linking with LTO is currently broken in gcc versions
412 # 4.9 and above. A version where everything works completely hasn't
413 # yet been identified.
414 #
415 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67548
416 main['BROKEN_INCREMENTAL_LTO'] = True
417 if compareVersions(gcc_version, '6.0') >= 0:
418 # gcc versions 6.0 and greater accept an -flinker-output flag which
419 # selects what type of output the linker should generate. This is
420 # necessary for incremental lto to work, but is also broken in
421 # current versions of gcc. It may not be necessary in future
422 # versions. We add it here since it might be, and as a reminder that
423 # it exists. It's excluded if lto is being forced.
424 #
425 # https://gcc.gnu.org/gcc-6/changes.html
426 # https://gcc.gnu.org/ml/gcc-patches/2015-11/msg03161.html
427 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69866
428 if not GetOption('force_lto'):
429 main.Append(PSHLINKFLAGS='-flinker-output=rel')
430 main.Append(PLINKFLAGS='-flinker-output=rel')
431
432 # Make sure we warn if the user has requested to compile with the
433 # Undefined Benahvior Sanitizer and this version of gcc does not
434 # support it.
435 if GetOption('with_ubsan') and \
436 compareVersions(gcc_version, '4.9') < 0:
437 print(termcap.Yellow + termcap.Bold +
438 'Warning: UBSan is only supported using gcc 4.9 and later.' +
439 termcap.Normal)
440
441 disable_lto = GetOption('no_lto')
442 if not disable_lto and main.get('BROKEN_INCREMENTAL_LTO', False) and \
443 not GetOption('force_lto'):
444 print(termcap.Yellow + termcap.Bold +
445 'Warning: Your compiler doesn\'t support incremental linking' +
446 ' and lto at the same time, so lto is being disabled. To force' +
447 ' lto on anyway, use the --force-lto option. That will disable' +
448 ' partial linking.' +
449 termcap.Normal)
450 disable_lto = True
451
452 # Add the appropriate Link-Time Optimization (LTO) flags
453 # unless LTO is explicitly turned off. Note that these flags
454 # are only used by the fast target.
455 if not disable_lto:
456 # Pass the LTO flag when compiling to produce GIMPLE
457 # output, we merely create the flags here and only append
458 # them later
459 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
460
461 # Use the same amount of jobs for LTO as we are running
462 # scons with
463 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
464
465 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
466 '-fno-builtin-realloc', '-fno-builtin-free'])
467
468 # The address sanitizer is available for gcc >= 4.8
469 if GetOption('with_asan'):
470 if GetOption('with_ubsan') and \
471 compareVersions(main['GCC_VERSION'], '4.9') >= 0:
472 main.Append(CCFLAGS=['-fsanitize=address,undefined',
473 '-fno-omit-frame-pointer'],
474 LINKFLAGS='-fsanitize=address,undefined')
475 else:
476 main.Append(CCFLAGS=['-fsanitize=address',
477 '-fno-omit-frame-pointer'],
478 LINKFLAGS='-fsanitize=address')
479 # Only gcc >= 4.9 supports UBSan, so check both the version
480 # and the command-line option before adding the compiler and
481 # linker flags.
482 elif GetOption('with_ubsan') and \
483 compareVersions(main['GCC_VERSION'], '4.9') >= 0:
484 main.Append(CCFLAGS='-fsanitize=undefined')
485 main.Append(LINKFLAGS='-fsanitize=undefined')
486
487 elif main['CLANG']:
488 # Check for a supported version of clang, >= 3.1 is needed to
489 # support similar features as gcc 4.8. See
490 # http://clang.llvm.org/cxx_status.html for details
491 clang_version_re = re.compile(".* version (\d+\.\d+)")
492 clang_version_match = clang_version_re.search(CXX_version)
493 if (clang_version_match):
494 clang_version = clang_version_match.groups()[0]
495 if compareVersions(clang_version, "3.1") < 0:
496 print('Error: clang version 3.1 or newer required.')
497 print(' Installed version:', clang_version)
498 Exit(1)
499 else:
500 print('Error: Unable to determine clang version.')
501 Exit(1)
502
503 # clang has a few additional warnings that we disable, extraneous
504 # parantheses are allowed due to Ruby's printing of the AST,
505 # finally self assignments are allowed as the generated CPU code
506 # is relying on this
507 main.Append(CCFLAGS=['-Wno-parentheses',
508 '-Wno-self-assign',
509 # Some versions of libstdc++ (4.8?) seem to
510 # use struct hash and class hash
511 # interchangeably.
512 '-Wno-mismatched-tags',
513 ])
514
515 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
516
517 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
518 # opposed to libstdc++, as the later is dated.
519 if sys.platform == "darwin":
520 main.Append(CXXFLAGS=['-stdlib=libc++'])
521 main.Append(LIBS=['c++'])
522
523 # On FreeBSD we need libthr.
524 if sys.platform.startswith('freebsd'):
525 main.Append(LIBS=['thr'])
526
527 # We require clang >= 3.1, so there is no need to check any
528 # versions here.
529 if GetOption('with_ubsan'):
530 if GetOption('with_asan'):
531 env.Append(CCFLAGS=['-fsanitize=address,undefined',
532 '-fno-omit-frame-pointer'],
533 LINKFLAGS='-fsanitize=address,undefined')
534 else:
535 env.Append(CCFLAGS='-fsanitize=undefined',
536 LINKFLAGS='-fsanitize=undefined')
537
538 elif GetOption('with_asan'):
539 env.Append(CCFLAGS=['-fsanitize=address',
540 '-fno-omit-frame-pointer'],
541 LINKFLAGS='-fsanitize=address')
542
543 else:
544 print(termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, end=' ')
545 print("Don't know what compiler options to use for your compiler.")
546 print(termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'])
547 print(termcap.Yellow + ' version:' + termcap.Normal, end=' ')
548 if not CXX_version:
549 print(termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +
550 termcap.Normal)
551 else:
552 print(CXX_version.replace('\n', '<nl>'))
553 print(" If you're trying to use a compiler other than GCC")
554 print(" or clang, there appears to be something wrong with your")
555 print(" environment.")
556 print(" ")
557 print(" If you are trying to use a compiler other than those listed")
558 print(" above you will need to ease fix SConstruct and ")
559 print(" src/SConscript to support that compiler.")
560 Exit(1)
561
562 # Set up common yacc/bison flags (needed for Ruby)
563 main['YACCFLAGS'] = '-d'
564 main['YACCHXXFILESUFFIX'] = '.hh'
565
566 # Do this after we save setting back, or else we'll tack on an
567 # extra 'qdo' every time we run scons.
568 if main['BATCH']:
569 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
570 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
571 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
572 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
573 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
574
575 if sys.platform == 'cygwin':
576 # cygwin has some header file issues...
577 main.Append(CCFLAGS=["-Wno-uninitialized"])
578
579 # Check for the protobuf compiler
580 protoc_version = readCommand([main['PROTOC'], '--version'],
581 exception='').split()
582
583 # First two words should be "libprotoc x.y.z"
584 if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
585 print(termcap.Yellow + termcap.Bold +
586 'Warning: Protocol buffer compiler (protoc) not found.\n' +
587 ' Please install protobuf-compiler for tracing support.' +
588 termcap.Normal)
589 main['PROTOC'] = False
590 else:
591 # Based on the availability of the compress stream wrappers,
592 # require 2.1.0
593 min_protoc_version = '2.1.0'
594 if compareVersions(protoc_version[1], min_protoc_version) < 0:
595 print(termcap.Yellow + termcap.Bold +
596 'Warning: protoc version', min_protoc_version,
597 'or newer required.\n' +
598 ' Installed version:', protoc_version[1],
599 termcap.Normal)
600 main['PROTOC'] = False
601 else:
602 # Attempt to determine the appropriate include path and
603 # library path using pkg-config, that means we also need to
604 # check for pkg-config. Note that it is possible to use
605 # protobuf without the involvement of pkg-config. Later on we
606 # check go a library config check and at that point the test
607 # will fail if libprotobuf cannot be found.
608 if readCommand(['pkg-config', '--version'], exception=''):
609 try:
610 # Attempt to establish what linking flags to add for protobuf
611 # using pkg-config
612 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
613 except:
614 print(termcap.Yellow + termcap.Bold +
615 'Warning: pkg-config could not get protobuf flags.' +
616 termcap.Normal)
617
618
619 # Check for 'timeout' from GNU coreutils. If present, regressions will
620 # be run with a time limit. We require version 8.13 since we rely on
621 # support for the '--foreground' option.
622 if sys.platform.startswith('freebsd'):
623 timeout_lines = readCommand(['gtimeout', '--version'],
624 exception='').splitlines()
625 else:
626 timeout_lines = readCommand(['timeout', '--version'],
627 exception='').splitlines()
628 # Get the first line and tokenize it
629 timeout_version = timeout_lines[0].split() if timeout_lines else []
630 main['TIMEOUT'] = timeout_version and \
631 compareVersions(timeout_version[-1], '8.13') >= 0
632
633 # Add a custom Check function to test for structure members.
634 def CheckMember(context, include, decl, member, include_quotes="<>"):
635 context.Message("Checking for member %s in %s..." %
636 (member, decl))
637 text = """
638 #include %(header)s
639 int main(){
640 %(decl)s test;
641 (void)test.%(member)s;
642 return 0;
643 };
644 """ % { "header" : include_quotes[0] + include + include_quotes[1],
645 "decl" : decl,
646 "member" : member,
647 }
648
649 ret = context.TryCompile(text, extension=".cc")
650 context.Result(ret)
651 return ret
652
653 # Platform-specific configuration. Note again that we assume that all
654 # builds under a given build root run on the same host platform.
655 conf = Configure(main,
656 conf_dir = joinpath(build_root, '.scons_config'),
657 log_file = joinpath(build_root, 'scons_config.log'),
658 custom_tests = {
659 'CheckMember' : CheckMember,
660 })
661
662 # Check if we should compile a 64 bit binary on Mac OS X/Darwin
663 try:
664 import platform
665 uname = platform.uname()
666 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
667 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
668 main.Append(CCFLAGS=['-arch', 'x86_64'])
669 main.Append(CFLAGS=['-arch', 'x86_64'])
670 main.Append(LINKFLAGS=['-arch', 'x86_64'])
671 main.Append(ASFLAGS=['-arch', 'x86_64'])
672 except:
673 pass
674
675 # Recent versions of scons substitute a "Null" object for Configure()
676 # when configuration isn't necessary, e.g., if the "--help" option is
677 # present. Unfortuantely this Null object always returns false,
678 # breaking all our configuration checks. We replace it with our own
679 # more optimistic null object that returns True instead.
680 if not conf:
681 def NullCheck(*args, **kwargs):
682 return True
683
684 class NullConf:
685 def __init__(self, env):
686 self.env = env
687 def Finish(self):
688 return self.env
689 def __getattr__(self, mname):
690 return NullCheck
691
692 conf = NullConf(main)
693
694 # Cache build files in the supplied directory.
695 if main['M5_BUILD_CACHE']:
696 print('Using build cache located at', main['M5_BUILD_CACHE'])
697 CacheDir(main['M5_BUILD_CACHE'])
698
699 main['USE_PYTHON'] = not GetOption('without_python')
700 if main['USE_PYTHON']:
701 # Find Python include and library directories for embedding the
702 # interpreter. We rely on python-config to resolve the appropriate
703 # includes and linker flags. ParseConfig does not seem to understand
704 # the more exotic linker flags such as -Xlinker and -export-dynamic so
705 # we add them explicitly below. If you want to link in an alternate
706 # version of python, see above for instructions on how to invoke
707 # scons with the appropriate PATH set.
708 #
709 # First we check if python2-config exists, else we use python-config
710 python_config = readCommand(['which', 'python2-config'],
711 exception='').strip()
712 if not os.path.exists(python_config):
713 python_config = readCommand(['which', 'python-config'],
714 exception='').strip()
715 py_includes = readCommand([python_config, '--includes'],
716 exception='').split()
717 py_includes = filter(lambda s: match(r'.*\/include\/.*',s), py_includes)
718 # Strip the -I from the include folders before adding them to the
719 # CPPPATH
720 py_includes = map(lambda s: s[2:] if s.startswith('-I') else s, py_includes)
721 main.Append(CPPPATH=py_includes)
722
723 # Read the linker flags and split them into libraries and other link
724 # flags. The libraries are added later through the call the CheckLib.
725 py_ld_flags = readCommand([python_config, '--ldflags'],
726 exception='').split()
727 py_libs = []
728 for lib in py_ld_flags:
729 if not lib.startswith('-l'):
730 main.Append(LINKFLAGS=[lib])
731 else:
732 lib = lib[2:]
733 if lib not in py_libs:
734 py_libs.append(lib)
735
736 # verify that this stuff works
737 if not conf.CheckHeader('Python.h', '<>'):
738 print("Error: can't find Python.h header in", py_includes)
739 print("Install Python headers (package python-dev on " +
740 "Ubuntu and RedHat)")
741 Exit(1)
742
743 for lib in py_libs:
744 if not conf.CheckLib(lib):
745 print("Error: can't find library %s required by python" % lib)
746 Exit(1)
747
748 # On Solaris you need to use libsocket for socket ops
749 if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
750 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
751 print("Can't find library with socket calls (e.g. accept())")
752 Exit(1)
753
754 # Check for zlib. If the check passes, libz will be automatically
755 # added to the LIBS environment variable.
756 if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
757 print('Error: did not find needed zlib compression library '
758 'and/or zlib.h header file.')
759 print(' Please install zlib and try again.')
760 Exit(1)
761
762 # If we have the protobuf compiler, also make sure we have the
763 # development libraries. If the check passes, libprotobuf will be
764 # automatically added to the LIBS environment variable. After
765 # this, we can use the HAVE_PROTOBUF flag to determine if we have
766 # got both protoc and libprotobuf available.
767 main['HAVE_PROTOBUF'] = main['PROTOC'] and \
768 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
769 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
770
771 # If we have the compiler but not the library, print another warning.
772 if main['PROTOC'] and not main['HAVE_PROTOBUF']:
773 print(termcap.Yellow + termcap.Bold +
774 'Warning: did not find protocol buffer library and/or headers.\n' +
775 ' Please install libprotobuf-dev for tracing support.' +
776 termcap.Normal)
777
778 # Check for librt.
779 have_posix_clock = \
780 conf.CheckLibWithHeader(None, 'time.h', 'C',
781 'clock_nanosleep(0,0,NULL,NULL);') or \
782 conf.CheckLibWithHeader('rt', 'time.h', 'C',
783 'clock_nanosleep(0,0,NULL,NULL);')
784
785 have_posix_timers = \
786 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
787 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
788
789 if not GetOption('without_tcmalloc'):
790 if conf.CheckLib('tcmalloc'):
791 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
792 elif conf.CheckLib('tcmalloc_minimal'):
793 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
794 else:
795 print(termcap.Yellow + termcap.Bold +
796 "You can get a 12% performance improvement by "
797 "installing tcmalloc (libgoogle-perftools-dev package "
798 "on Ubuntu or RedHat)." + termcap.Normal)
799
800
801 # Detect back trace implementations. The last implementation in the
802 # list will be used by default.
803 backtrace_impls = [ "none" ]
804
805 backtrace_checker = 'char temp;' + \
806 ' backtrace_symbols_fd((void*)&temp, 0, 0);'
807 if conf.CheckLibWithHeader(None, 'execinfo.h', 'C', backtrace_checker):
808 backtrace_impls.append("glibc")
809 elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
810 backtrace_checker):
811 # NetBSD and FreeBSD need libexecinfo.
812 backtrace_impls.append("glibc")
813 main.Append(LIBS=['execinfo'])
814
815 if backtrace_impls[-1] == "none":
816 default_backtrace_impl = "none"
817 print(termcap.Yellow + termcap.Bold +
818 "No suitable back trace implementation found." +
819 termcap.Normal)
820
821 if not have_posix_clock:
822 print("Can't find library for POSIX clocks.")
823
824 # Check for <fenv.h> (C99 FP environment control)
825 have_fenv = conf.CheckHeader('fenv.h', '<>')
826 if not have_fenv:
827 print("Warning: Header file <fenv.h> not found.")
828 print(" This host has no IEEE FP rounding mode control.")
829
830 # Check for <png.h> (libpng library needed if wanting to dump
831 # frame buffer image in png format)
832 have_png = conf.CheckHeader('png.h', '<>')
833 if not have_png:
834 print("Warning: Header file <png.h> not found.")
835 print(" This host has no libpng library.")
836 print(" Disabling support for PNG framebuffers.")
837
838 # Check if we should enable KVM-based hardware virtualization. The API
839 # we rely on exists since version 2.6.36 of the kernel, but somehow
840 # the KVM_API_VERSION does not reflect the change. We test for one of
841 # the types as a fall back.
842 have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
843 if not have_kvm:
844 print("Info: Compatible header file <linux/kvm.h> not found, "
845 "disabling KVM support.")
846
847 # Check if the TUN/TAP driver is available.
848 have_tuntap = conf.CheckHeader('linux/if_tun.h', '<>')
849 if not have_tuntap:
850 print("Info: Compatible header file <linux/if_tun.h> not found.")
851
852 # x86 needs support for xsave. We test for the structure here since we
853 # won't be able to run new tests by the time we know which ISA we're
854 # targeting.
855 have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
856 '#include <linux/kvm.h>') != 0
857
858 # Check if the requested target ISA is compatible with the host
859 def is_isa_kvm_compatible(isa):
860 try:
861 import platform
862 host_isa = platform.machine()
863 except:
864 print("Warning: Failed to determine host ISA.")
865 return False
866
867 if not have_posix_timers:
868 print("Warning: Can not enable KVM, host seems to lack support "
869 "for POSIX timers")
870 return False
871
872 if isa == "arm":
873 return host_isa in ( "armv7l", "aarch64" )
874 elif isa == "x86":
875 if host_isa != "x86_64":
876 return False
877
878 if not have_kvm_xsave:
879 print("KVM on x86 requires xsave support in kernel headers.")
880 return False
881
882 return True
883 else:
884 return False
885
886
887 # Check if the exclude_host attribute is available. We want this to
888 # get accurate instruction counts in KVM.
889 main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
890 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
891
892
893 ######################################################################
894 #
895 # Finish the configuration
896 #
897 main = conf.Finish()
898
899 ######################################################################
900 #
901 # Collect all non-global variables
902 #
903
904 # Define the universe of supported ISAs
905 all_isa_list = [ ]
906 all_gpu_isa_list = [ ]
907 Export('all_isa_list')
908 Export('all_gpu_isa_list')
909
910 class CpuModel(object):
911 '''The CpuModel class encapsulates everything the ISA parser needs to
912 know about a particular CPU model.'''
913
914 # Dict of available CPU model objects. Accessible as CpuModel.dict.
915 dict = {}
916
917 # Constructor. Automatically adds models to CpuModel.dict.
918 def __init__(self, name, default=False):
919 self.name = name # name of model
920
921 # This cpu is enabled by default
922 self.default = default
923
924 # Add self to dict
925 if name in CpuModel.dict:
926 raise AttributeError, "CpuModel '%s' already registered" % name
927 CpuModel.dict[name] = self
928
929 Export('CpuModel')
930
931 # Sticky variables get saved in the variables file so they persist from
932 # one invocation to the next (unless overridden, in which case the new
933 # value becomes sticky).
934 sticky_vars = Variables(args=ARGUMENTS)
935 Export('sticky_vars')
936
937 # Sticky variables that should be exported
938 export_vars = []
939 Export('export_vars')
940
941 # For Ruby
942 all_protocols = []
943 Export('all_protocols')
944 protocol_dirs = []
945 Export('protocol_dirs')
946 slicc_includes = []
947 Export('slicc_includes')
948
949 # Walk the tree and execute all SConsopts scripts that wil add to the
950 # above variables
951 if GetOption('verbose'):
952 print("Reading SConsopts")
953 for bdir in [ base_dir ] + extras_dir_list:
954 if not isdir(bdir):
955 print("Error: directory '%s' does not exist" % bdir)
956 Exit(1)
957 for root, dirs, files in os.walk(bdir):
958 if 'SConsopts' in files:
959 if GetOption('verbose'):
960 print("Reading", joinpath(root, 'SConsopts'))
961 SConscript(joinpath(root, 'SConsopts'))
962
963 all_isa_list.sort()
964 all_gpu_isa_list.sort()
965
966 sticky_vars.AddVariables(
967 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
968 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
969 ListVariable('CPU_MODELS', 'CPU models',
970 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
971 sorted(CpuModel.dict.keys())),
972 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
973 False),
974 BoolVariable('SS_COMPATIBLE_FP',
975 'Make floating-point results compatible with SimpleScalar',
976 False),
977 BoolVariable('USE_SSE2',
978 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
979 False),
980 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
981 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
982 BoolVariable('USE_PNG', 'Enable support for PNG images', have_png),
983 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability',
984 False),
985 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models',
986 have_kvm),
987 BoolVariable('USE_TUNTAP',
988 'Enable using a tap device to bridge to the host network',
989 have_tuntap),
990 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
991 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
992 all_protocols),
993 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
994 backtrace_impls[-1], backtrace_impls)
995 )
996
997 # These variables get exported to #defines in config/*.hh (see src/SConscript).
998 export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
999 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP',
1000 'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST',
1001 'USE_PNG']
1002
1003 ###################################################
1004 #
1005 # Define a SCons builder for configuration flag headers.
1006 #
1007 ###################################################
1008
1009 # This function generates a config header file that #defines the
1010 # variable symbol to the current variable setting (0 or 1). The source
1011 # operands are the name of the variable and a Value node containing the
1012 # value of the variable.
1013 def build_config_file(target, source, env):
1014 (variable, value) = [s.get_contents() for s in source]
1015 f = file(str(target[0]), 'w')
1016 print('#define', variable, value, file=f)
1017 f.close()
1018 return None
1019
1020 # Combine the two functions into a scons Action object.
1021 config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1022
1023 # The emitter munges the source & target node lists to reflect what
1024 # we're really doing.
1025 def config_emitter(target, source, env):
1026 # extract variable name from Builder arg
1027 variable = str(target[0])
1028 # True target is config header file
1029 target = joinpath('config', variable.lower() + '.hh')
1030 val = env[variable]
1031 if isinstance(val, bool):
1032 # Force value to 0/1
1033 val = int(val)
1034 elif isinstance(val, str):
1035 val = '"' + val + '"'
1036
1037 # Sources are variable name & value (packaged in SCons Value nodes)
1038 return ([target], [Value(variable), Value(val)])
1039
1040 config_builder = Builder(emitter = config_emitter, action = config_action)
1041
1042 main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1043
1044 ###################################################
1045 #
1046 # Builders for static and shared partially linked object files.
1047 #
1048 ###################################################
1049
1050 partial_static_builder = Builder(action=SCons.Defaults.LinkAction,
1051 src_suffix='$OBJSUFFIX',
1052 src_builder=['StaticObject', 'Object'],
1053 LINKFLAGS='$PLINKFLAGS',
1054 LIBS='')
1055
1056 def partial_shared_emitter(target, source, env):
1057 for tgt in target:
1058 tgt.attributes.shared = 1
1059 return (target, source)
1060 partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction,
1061 emitter=partial_shared_emitter,
1062 src_suffix='$SHOBJSUFFIX',
1063 src_builder='SharedObject',
1064 SHLINKFLAGS='$PSHLINKFLAGS',
1065 LIBS='')
1066
1067 main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder,
1068 'PartialStatic' : partial_static_builder })
1069
1070 # builds in ext are shared across all configs in the build root.
1071 ext_dir = abspath(joinpath(str(main.root), 'ext'))
1072 ext_build_dirs = []
1073 for root, dirs, files in os.walk(ext_dir):
1074 if 'SConscript' in files:
1075 build_dir = os.path.relpath(root, ext_dir)
1076 ext_build_dirs.append(build_dir)
1077 main.SConscript(joinpath(root, 'SConscript'),
1078 variant_dir=joinpath(build_root, build_dir))
1079
1080 main.Prepend(CPPPATH=Dir('ext/pybind11/include/'))
1081
1082 ###################################################
1083 #
1084 # This builder and wrapper method are used to set up a directory with
1085 # switching headers. Those are headers which are in a generic location and
1086 # that include more specific headers from a directory chosen at build time
1087 # based on the current build settings.
1088 #
1089 ###################################################
1090
1091 def build_switching_header(target, source, env):
1092 path = str(target[0])
1093 subdir = str(source[0])
1094 dp, fp = os.path.split(path)
1095 dp = os.path.relpath(os.path.realpath(dp),
1096 os.path.realpath(env['BUILDDIR']))
1097 with open(path, 'w') as hdr:
1098 print('#include "%s/%s/%s"' % (dp, subdir, fp), file=hdr)
1099
1100 switching_header_action = MakeAction(build_switching_header,
1101 Transform('GENERATE'))
1102
1103 switching_header_builder = Builder(action=switching_header_action,
1104 source_factory=Value,
1105 single_source=True)
1106
1107 main.Append(BUILDERS = { 'SwitchingHeader': switching_header_builder })
1108
1109 def switching_headers(self, headers, source):
1110 for header in headers:
1111 self.SwitchingHeader(header, source)
1112
1113 main.AddMethod(switching_headers, 'SwitchingHeaders')
1114
1115 ###################################################
1116 #
1117 # Define build environments for selected configurations.
1118 #
1119 ###################################################
1120
1121 for variant_path in variant_paths:
1122 if not GetOption('silent'):
1123 print("Building in", variant_path)
1124
1125 # Make a copy of the build-root environment to use for this config.
1126 env = main.Clone()
1127 env['BUILDDIR'] = variant_path
1128
1129 # variant_dir is the tail component of build path, and is used to
1130 # determine the build parameters (e.g., 'ALPHA_SE')
1131 (build_root, variant_dir) = splitpath(variant_path)
1132
1133 # Set env variables according to the build directory config.
1134 sticky_vars.files = []
1135 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1136 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1137 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1138 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1139 if isfile(current_vars_file):
1140 sticky_vars.files.append(current_vars_file)
1141 if not GetOption('silent'):
1142 print("Using saved variables file %s" % current_vars_file)
1143 elif variant_dir in ext_build_dirs:
1144 # Things in ext are built without a variant directory.
1145 continue
1146 else:
1147 # Build dir-specific variables file doesn't exist.
1148
1149 # Make sure the directory is there so we can create it later
1150 opt_dir = dirname(current_vars_file)
1151 if not isdir(opt_dir):
1152 mkdir(opt_dir)
1153
1154 # Get default build variables from source tree. Variables are
1155 # normally determined by name of $VARIANT_DIR, but can be
1156 # overridden by '--default=' arg on command line.
1157 default = GetOption('default')
1158 opts_dir = joinpath(main.root.abspath, 'build_opts')
1159 if default:
1160 default_vars_files = [joinpath(build_root, 'variables', default),
1161 joinpath(opts_dir, default)]
1162 else:
1163 default_vars_files = [joinpath(opts_dir, variant_dir)]
1164 existing_files = filter(isfile, default_vars_files)
1165 if existing_files:
1166 default_vars_file = existing_files[0]
1167 sticky_vars.files.append(default_vars_file)
1168 print("Variables file %s not found,\n using defaults in %s"
1169 % (current_vars_file, default_vars_file))
1170 else:
1171 print("Error: cannot find variables file %s or "
1172 "default file(s) %s"
1173 % (current_vars_file, ' or '.join(default_vars_files)))
1174 Exit(1)
1175
1176 # Apply current variable settings to env
1177 sticky_vars.Update(env)
1178
1179 help_texts["local_vars"] += \
1180 "Build variables for %s:\n" % variant_dir \
1181 + sticky_vars.GenerateHelpText(env)
1182
1183 # Process variable settings.
1184
1185 if not have_fenv and env['USE_FENV']:
1186 print("Warning: <fenv.h> not available; "
1187 "forcing USE_FENV to False in", variant_dir + ".")
1188 env['USE_FENV'] = False
1189
1190 if not env['USE_FENV']:
1191 print("Warning: No IEEE FP rounding mode control in",
1192 variant_dir + ".")
1193 print(" FP results may deviate slightly from other platforms.")
1194
1195 if not have_png and env['USE_PNG']:
1196 print("Warning: <png.h> not available; "
1197 "forcing USE_PNG to False in", variant_dir + ".")
1198 env['USE_PNG'] = False
1199
1200 if env['USE_PNG']:
1201 env.Append(LIBS=['png'])
1202
1203 if env['EFENCE']:
1204 env.Append(LIBS=['efence'])
1205
1206 if env['USE_KVM']:
1207 if not have_kvm:
1208 print("Warning: Can not enable KVM, host seems to "
1209 "lack KVM support")
1210 env['USE_KVM'] = False
1211 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1212 print("Info: KVM support disabled due to unsupported host and "
1213 "target ISA combination")
1214 env['USE_KVM'] = False
1215
1216 if env['USE_TUNTAP']:
1217 if not have_tuntap:
1218 print("Warning: Can't connect EtherTap with a tap device.")
1219 env['USE_TUNTAP'] = False
1220
1221 if env['BUILD_GPU']:
1222 env.Append(CPPDEFINES=['BUILD_GPU'])
1223
1224 # Warn about missing optional functionality
1225 if env['USE_KVM']:
1226 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1227 print("Warning: perf_event headers lack support for the "
1228 "exclude_host attribute. KVM instruction counts will "
1229 "be inaccurate.")
1230
1231 # Save sticky variable settings back to current variables file
1232 sticky_vars.Save(current_vars_file, env)
1233
1234 if env['USE_SSE2']:
1235 env.Append(CCFLAGS=['-msse2'])
1236
1237 # The src/SConscript file sets up the build rules in 'env' according
1238 # to the configured variables. It returns a list of environments,
1239 # one for each variant build (debug, opt, etc.)
1240 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1241
1242 # base help text
1243 Help('''
1244 Usage: scons [scons options] [build variables] [target(s)]
1245
1246 Extra scons options:
1247 %(options)s
1248
1249 Global build variables:
1250 %(global_vars)s
1251
1252 %(local_vars)s
1253 ''' % help_texts)