|
| 1 | +# Copyright (c) Facebook, Inc. and its affiliates. |
| 2 | +# |
| 3 | +# This source code is licensed under the MIT license found in the |
| 4 | +# LICENSE file in the root directory of this source tree. |
| 5 | +"""This modules defines a class for describing LLVM autotuners.""" |
| 6 | +import tempfile |
| 7 | +from pathlib import Path |
| 8 | +from typing import Any, Dict |
| 9 | + |
| 10 | +from llvm_autotuning.autotuners.greedy import greedy # noqa autotuner |
| 11 | +from llvm_autotuning.autotuners.nevergrad_ import nevergrad # noqa autotuner |
| 12 | +from llvm_autotuning.autotuners.opentuner_ import opentuner_ga # noqa autotuner |
| 13 | +from llvm_autotuning.autotuners.random_ import random # noqa autotuner |
| 14 | +from llvm_autotuning.optimization_target import OptimizationTarget |
| 15 | +from pydantic import BaseModel, validator |
| 16 | + |
| 17 | +from compiler_gym.compiler_env_state import CompilerEnvState |
| 18 | +from compiler_gym.envs import CompilerEnv |
| 19 | +from compiler_gym.util.capture_output import capture_output |
| 20 | +from compiler_gym.util.runfiles_path import transient_cache_path |
| 21 | +from compiler_gym.util.temporary_working_directory import temporary_working_directory |
| 22 | +from compiler_gym.util.timer import Timer |
| 23 | + |
| 24 | + |
| 25 | +class Autotuner(BaseModel): |
| 26 | + |
| 27 | + algorithm: str |
| 28 | + """The name of the autotuner algorithm.""" |
| 29 | + |
| 30 | + optimization_target: OptimizationTarget |
| 31 | + """The target that the autotuner is optimizing for.""" |
| 32 | + |
| 33 | + search_time_seconds: int |
| 34 | + """The search budget of the autotuner.""" |
| 35 | + |
| 36 | + algorithm_config: Dict[str, Any] = {} |
| 37 | + """An optional dictionary of keyword arguments for the autotuner function.""" |
| 38 | + |
| 39 | + @property |
| 40 | + def autotune(self): |
| 41 | + """Return the autotuner function for this algorithm. |
| 42 | +
|
| 43 | + An autotuner function takes a single CompilerEnv argument and optional |
| 44 | + keyword configuration arguments (determined by algorithm_config) and |
| 45 | + tunes the environment, returning nothing. |
| 46 | + """ |
| 47 | + try: |
| 48 | + return globals()[self.algorithm] |
| 49 | + except KeyError as e: |
| 50 | + raise ValueError( |
| 51 | + f"Unknown autotuner: {self.algorithm}.\n" |
| 52 | + f"Make sure the {self.algorithm}() function definition is available " |
| 53 | + "in the global namespace of {__file__}." |
| 54 | + ) from e |
| 55 | + |
| 56 | + @property |
| 57 | + def autotune_kwargs(self) -> Dict[str, Any]: |
| 58 | + """Get the keyword arguments dictionary for the autotuner.""" |
| 59 | + kwargs = { |
| 60 | + "optimization_target": self.optimization_target, |
| 61 | + "search_time_seconds": self.search_time_seconds, |
| 62 | + } |
| 63 | + kwargs.update(self.algorithm_config) |
| 64 | + return kwargs |
| 65 | + |
| 66 | + def __call__(self, env: CompilerEnv, seed: int = 0xCC) -> CompilerEnvState: |
| 67 | + """Autotune the given environment. |
| 68 | +
|
| 69 | + :param env: The environment to autotune. |
| 70 | +
|
| 71 | + :param seed: The random seed for the autotuner. |
| 72 | +
|
| 73 | + :returns: A CompilerEnvState tuple describing the autotuning result. |
| 74 | + """ |
| 75 | + # Run the autotuner in a temporary working directory and capture the |
| 76 | + # stdout/stderr. |
| 77 | + with tempfile.TemporaryDirectory( |
| 78 | + dir=transient_cache_path("."), prefix="autotune-" |
| 79 | + ) as tmpdir: |
| 80 | + with temporary_working_directory(Path(tmpdir)): |
| 81 | + with capture_output(): |
| 82 | + with Timer() as timer: |
| 83 | + self.autotune(env, seed=seed, **self.autotune_kwargs) |
| 84 | + |
| 85 | + return CompilerEnvState( |
| 86 | + benchmark=env.benchmark.uri, |
| 87 | + commandline=env.commandline(), |
| 88 | + walltime=timer.time, |
| 89 | + reward=self.optimization_target.final_reward(env), |
| 90 | + ) |
| 91 | + |
| 92 | + # === Start of implementation details. === |
| 93 | + |
| 94 | + @validator("algorithm_config", pre=True) |
| 95 | + def validate_algorithm_config(cls, value) -> Dict[str, Any]: |
| 96 | + return value or {} |
0 commit comments