diff --git a/tools/engine_tool/test/build_command_test.dart b/tools/engine_tool/test/build_command_test.dart index 47f803d9a0abb..bc2551a91c6cb 100644 --- a/tools/engine_tool/test/build_command_test.dart +++ b/tools/engine_tool/test/build_command_test.dart @@ -8,7 +8,6 @@ import 'dart:convert' as convert; import 'package:engine_build_configs/engine_build_configs.dart'; import 'package:engine_tool/src/build_utils.dart'; import 'package:engine_tool/src/commands/command_runner.dart'; -import 'package:engine_tool/src/environment.dart'; import 'package:engine_tool/src/logger.dart'; import 'package:path/path.dart' as path; import 'package:platform/platform.dart'; @@ -18,494 +17,457 @@ import 'fixtures.dart' as fixtures; import 'utils.dart'; void main() { - final BuilderConfig linuxTestConfig = BuilderConfig.fromJson( + final linuxTestConfig = BuilderConfig.fromJson( path: 'ci/builders/linux_test_config.json', map: convert.jsonDecode(fixtures.testConfig('Linux', Platform.linux)) as Map, ); - final BuilderConfig macTestConfig = BuilderConfig.fromJson( + final macTestConfig = BuilderConfig.fromJson( path: 'ci/builders/mac_test_config.json', map: convert.jsonDecode(fixtures.testConfig('Mac-12', Platform.macOS)) as Map, ); - final BuilderConfig winTestConfig = BuilderConfig.fromJson( + final winTestConfig = BuilderConfig.fromJson( path: 'ci/builders/win_test_config.json', map: convert.jsonDecode(fixtures.testConfig('Windows-11', Platform.windows)) as Map, ); - final Map configs = { + final configs = { 'linux_test_config': linuxTestConfig, 'mac_test_config': macTestConfig, 'win_test_config': winTestConfig, }; - final List cannedProcesses = [ - CannedProcess((List command) => command.contains('desc'), - stdout: fixtures.gnDescOutput()), + final cannedProcesses = [ + CannedProcess( + (command) => command.contains('desc'), + stdout: fixtures.gnDescOutput(), + ), ]; test('can find host runnable build', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final List result = - runnableBuilds(testEnv.environment, configs, true); - expect(result.length, equals(4)); - expect(result[0].name, equals('ci/build_name')); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final result = runnableBuilds(testEnv.environment, configs, true); + expect(result.length, equals(4)); + expect(result[0].name, equals('ci/build_name')); }); test('build command invokes gn', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - ); - final int result = await runner.run([ - 'build', - '--config', - 'ci/build_name', - ]); - expect(result, equals(0)); - expect(testEnv.processHistory.length, greaterThanOrEqualTo(1)); - expect(testEnv.processHistory[0].command[0], contains('gn')); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + ); + final result = await runner.run([ + 'build', + '--config', + 'ci/build_name', + ]); + expect(result, equals(0)); + expect(testEnv.processHistory.length, greaterThanOrEqualTo(1)); + expect(testEnv.processHistory[0].command[0], contains('gn')); }); test('build command invokes ninja', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - ); - final int result = await runner.run([ - 'build', - '--config', - 'ci/build_name', - ]); - expect(result, equals(0)); - expect(testEnv.processHistory.length, greaterThanOrEqualTo(2)); - expect(testEnv.processHistory[1].command[0], contains('ninja')); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + ); + final result = await runner.run([ + 'build', + '--config', + 'ci/build_name', + ]); + expect(result, equals(0)); + expect(testEnv.processHistory.length, greaterThanOrEqualTo(2)); + expect(testEnv.processHistory[1].command[0], contains('ninja')); }); test('build command invokes generator', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - ); - final int result = await runner.run([ - 'build', - '--config', - 'ci/build_name', - ]); - expect(result, equals(0)); - expect(testEnv.processHistory.length, greaterThanOrEqualTo(3)); - expect( - testEnv.processHistory[2].command, - containsAllInOrder(['python3', 'gen/script.py']), - ); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + ); + final result = await runner.run([ + 'build', + '--config', + 'ci/build_name', + ]); + expect(result, equals(0)); + expect(testEnv.processHistory.length, greaterThanOrEqualTo(3)); + expect( + testEnv.processHistory[2].command, + containsAllInOrder(['python3', 'gen/script.py']), + ); }); test('build command does not invoke tests', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - ); - final int result = await runner.run([ - 'build', - '--config', - 'ci/build_name', - ]); - expect(result, equals(0)); - expect(testEnv.processHistory.length, lessThanOrEqualTo(4)); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + ); + final result = await runner.run([ + 'build', + '--config', + 'ci/build_name', + ]); + expect(result, equals(0)); + expect(testEnv.processHistory.length, lessThanOrEqualTo(4)); }); test('build command runs rbe on an rbe build', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( withRbe: true, cannedProcesses: cannedProcesses, ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - ); - final int result = await runner.run([ - 'build', - '--config', - 'ci/android_debug_rbe_arm64', - ]); - expect(result, equals(0)); - expect(testEnv.processHistory[0].command[0], - contains(path.join('tools', 'gn'))); - expect(testEnv.processHistory[0].command[2], equals('--rbe')); - expect(testEnv.processHistory[1].command[0], - contains(path.join('reclient', 'bootstrap'))); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + ); + final result = await runner.run([ + 'build', + '--config', + 'ci/android_debug_rbe_arm64', + ]); + expect(result, equals(0)); + expect(testEnv.processHistory[0].command[0], + contains(path.join('tools', 'gn'))); + expect(testEnv.processHistory[0].command[2], equals('--rbe')); + expect(testEnv.processHistory[1].command[0], + contains(path.join('reclient', 'bootstrap'))); }); test('build command plumbs -j to ninja', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( withRbe: true, cannedProcesses: cannedProcesses, ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - ); - final int result = await runner.run([ - 'build', - '--config', - 'ci/android_debug_rbe_arm64', - '-j', - '500', - ]); - expect(result, equals(0)); - expect(testEnv.processHistory[0].command[0], - contains(path.join('tools', 'gn'))); - expect(testEnv.processHistory[0].command[2], equals('--rbe')); - expect(testEnv.processHistory[2].command.contains('500'), isTrue); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + ); + final result = await runner.run([ + 'build', + '--config', + 'ci/android_debug_rbe_arm64', + '-j', + '500', + ]); + expect(result, equals(0)); + expect(testEnv.processHistory[0].command[0], + contains(path.join('tools', 'gn'))); + expect(testEnv.processHistory[0].command[2], equals('--rbe')); + expect(testEnv.processHistory[2].command.contains('500'), isTrue); }); test('build command fails when rbe is enabled but not supported', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, // Intentionally omit withRbe: true. // That means the //flutter/build/rbe directory will not be created. ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - ); - final int result = await runner.run([ - 'build', - '--config', - 'ci/android_debug_rbe_arm64', - '--rbe', - ]); - expect(result, equals(1)); - expect( - testEnv.testLogs.map((LogRecord r) => r.message).join(), - contains('RBE was requested but no RBE config was found'), - ); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + ); + final result = await runner.run([ + 'build', + '--config', + 'ci/android_debug_rbe_arm64', + '--rbe', + ]); + expect(result, equals(1)); + expect( + testEnv.testLogs.map((LogRecord r) => r.message).join(), + contains('RBE was requested but no RBE config was found'), + ); }); test('build command does not run rbe when disabled', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( withRbe: true, cannedProcesses: cannedProcesses, ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - ); - final int result = await runner.run([ - 'build', - '--config', - 'ci/android_debug_rbe_arm64', - '--no-rbe', - ]); - expect(result, equals(0)); - expect(testEnv.processHistory[0].command[0], - contains(path.join('tools', 'gn'))); - expect(testEnv.processHistory[0].command, - isNot(contains(['--rbe']))); - expect(testEnv.processHistory[1].command[0], - contains(path.join('ninja', 'ninja'))); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + ); + final result = await runner.run([ + 'build', + '--config', + 'ci/android_debug_rbe_arm64', + '--no-rbe', + ]); + expect(result, equals(0)); + expect(testEnv.processHistory[0].command[0], + contains(path.join('tools', 'gn'))); + expect(testEnv.processHistory[0].command, isNot(contains(['--rbe']))); + expect(testEnv.processHistory[1].command[0], + contains(path.join('ninja', 'ninja'))); }); test('build command does not run rbe when rbe configs do not exist', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - ); - final int result = await runner.run([ - 'build', - '--config', - 'ci/android_debug_rbe_arm64', - ]); - expect(result, equals(0)); - expect(testEnv.processHistory[0].command[0], - contains(path.join('tools', 'gn'))); - expect(testEnv.processHistory[0].command, - isNot(contains(['--rbe']))); - expect(testEnv.processHistory[1].command[0], - contains(path.join('ninja', 'ninja'))); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + ); + final result = await runner.run([ + 'build', + '--config', + 'ci/android_debug_rbe_arm64', + ]); + expect(result, equals(0)); + expect(testEnv.processHistory[0].command[0], + contains(path.join('tools', 'gn'))); + expect(testEnv.processHistory[0].command, isNot(contains(['--rbe']))); + expect(testEnv.processHistory[1].command[0], + contains(path.join('ninja', 'ninja'))); }); test('mangleConfigName removes the OS and adds ci/ as needed', () { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final Environment env = testEnv.environment; - expect(mangleConfigName(env, 'linux/build'), equals('build')); - expect(mangleConfigName(env, 'ci/build'), equals('ci/build')); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final env = testEnv.environment; + expect(mangleConfigName(env, 'linux/build'), equals('build')); + expect(mangleConfigName(env, 'ci/build'), equals('ci/build')); }); test('mangleConfigName throws when the input config name is malformed', () { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final Environment env = testEnv.environment; - expect( - () => mangleConfigName(env, 'build'), - throwsArgumentError, - ); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final env = testEnv.environment; + expect( + () => mangleConfigName(env, 'build'), + throwsArgumentError, + ); }); test('demangleConfigName adds the OS and removes ci/ as needed', () { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final Environment env = testEnv.environment; - expect(demangleConfigName(env, 'build'), equals('linux/build')); - expect(demangleConfigName(env, 'ci/build'), equals('ci/build')); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final env = testEnv.environment; + expect(demangleConfigName(env, 'build'), equals('linux/build')); + expect(demangleConfigName(env, 'ci/build'), equals('ci/build')); }); test('local config name on the command line is correctly translated', () async { - final BuilderConfig namespaceTestConfigs = BuilderConfig.fromJson( + final namespaceTestConfigs = BuilderConfig.fromJson( path: 'ci/builders/namespace_test_config.json', map: convert.jsonDecode(fixtures.configsToTestNamespacing) as Map, ); - final Map configs = { + final configs = { 'namespace_test_config': namespaceTestConfigs, }; - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - ); - final int result = await runner.run([ - 'build', - '--config', - 'host_debug', - ]); - expect(result, equals(0)); - expect(testEnv.processHistory[1].command[0], - contains(path.join('ninja', 'ninja'))); - expect( - testEnv.processHistory[1].command[2], contains('local_host_debug')); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + ); + final result = await runner.run([ + 'build', + '--config', + 'host_debug', + ]); + expect(result, equals(0)); + expect(testEnv.processHistory[1].command[0], + contains(path.join('ninja', 'ninja'))); + expect(testEnv.processHistory[1].command[2], contains('local_host_debug')); }); test('ci config name on the command line is correctly translated', () async { - final BuilderConfig namespaceTestConfigs = BuilderConfig.fromJson( + final namespaceTestConfigs = BuilderConfig.fromJson( path: 'ci/builders/namespace_test_config.json', map: convert.jsonDecode(fixtures.configsToTestNamespacing) as Map, ); - final Map configs = { + final configs = { 'namespace_test_config': namespaceTestConfigs, }; - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - final Environment env = testEnv.environment; - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: env, - configs: configs, - ); - final int result = await runner.run([ - 'build', - '--config', - 'ci/host_debug', - ]); - expect(result, equals(0)); - expect(testEnv.processHistory[1].command[0], - contains(path.join('ninja', 'ninja'))); - expect(testEnv.processHistory[1].command[2], contains('ci/host_debug')); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + ); + final result = await runner.run([ + 'build', + '--config', + 'ci/host_debug', + ]); + expect(result, equals(0)); + expect(testEnv.processHistory[1].command[0], + contains(path.join('ninja', 'ninja'))); + expect(testEnv.processHistory[1].command[2], contains('ci/host_debug')); }); test('build command invokes ninja with the specified target', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - ); - final int result = await runner.run([ - 'build', - '--config', - 'host_debug', - '//flutter/fml:fml_arc_unittests', - ]); - expect(result, equals(0)); - expect(testEnv.processHistory, containsCommand((List command) { - return command.length > 3 && - command[0].contains('ninja') && - command[1].contains('-C') && - command[2].endsWith('/host_debug') && - // TODO(matanlurey): Tighten this up to be more specific. - // The reason we need a broad check is because the test fixture - // always returns multiple targets for gn desc, even though that is - // not the actual behavior. - command.sublist(3).contains('flutter/fml:fml_arc_unittests'); - })); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + ); + final result = await runner.run([ + 'build', + '--config', + 'host_debug', + '//flutter/fml:fml_arc_unittests', + ]); + expect(result, equals(0)); + expect(testEnv.processHistory, containsCommand((command) { + return command.length > 3 && + command[0].contains('ninja') && + command[1].contains('-C') && + command[2].endsWith('/host_debug') && + // TODO(matanlurey): Tighten this up to be more specific. + // The reason we need a broad check is because the test fixture + // always returns multiple targets for gn desc, even though that is + // not the actual behavior. + command.sublist(3).contains('flutter/fml:fml_arc_unittests'); + })); }); test('build command invokes ninja with all matched targets', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - ); - final int result = await runner.run([ - 'build', - '--config', - 'host_debug', - '//flutter/...', - ]); - expect(result, equals(0)); - expect(testEnv.processHistory, containsCommand((List command) { - return command.length > 5 && - command[0].contains('ninja') && - command[1].contains('-C') && - command[2].endsWith('/host_debug') && - command[3] == 'flutter/display_list:display_list_unittests' && - command[4] == 'flutter/flow:flow_unittests' && - command[5] == 'flutter/fml:fml_arc_unittests'; - })); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + ); + final result = await runner.run([ + 'build', + '--config', + 'host_debug', + '//flutter/...', + ]); + expect(result, equals(0)); + expect(testEnv.processHistory, containsCommand((command) { + return command.length > 5 && + command[0].contains('ninja') && + command[1].contains('-C') && + command[2].endsWith('/host_debug') && + command[3] == 'flutter/display_list:display_list_unittests' && + command[4] == 'flutter/flow:flow_unittests' && + command[5] == 'flutter/fml:fml_arc_unittests'; + })); }); test('build command gracefully handles no matched targets', () async { - final List cannedProcesses = [ - CannedProcess((List command) => command.contains('desc'), - stdout: fixtures.gnDescOutputEmpty( - gnPattern: 'testing/scenario_app:sceario_app'), - exitCode: 1), + final cannedProcesses = [ + CannedProcess( + (command) => command.contains('desc'), + stdout: fixtures.gnDescOutputEmpty( + gnPattern: 'testing/scenario_app:sceario_app'), + exitCode: 1, + ), ]; - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - ); - final int result = await runner.run([ - 'build', - '--config', - 'host_debug', - // Intentionally omit the prefix '//flutter/' to trigger the warning. - '//testing/scenario_app', - ]); - expect(result, equals(0)); - expect(testEnv.testLogs.map((LogRecord r) => r.message).join(), - contains('No targets matched the pattern `testing/scenario_app')); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + ); + final result = await runner.run([ + 'build', + '--config', + 'host_debug', + // Intentionally omit the prefix '//flutter/' to trigger the warning. + '//testing/scenario_app', + ]); + expect(result, equals(0)); + expect( + testEnv.testLogs.map((LogRecord r) => r.message).join(), + contains('No targets matched the pattern `testing/scenario_app'), + ); }); test('et help build line length is not too big', () async { - final List prints = []; + final prints = []; await runZoned( () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, verbose: true, ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - help: true, - ); - final int result = await runner.run([ - 'help', - 'build', - ]); - expect(result, equals(0)); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + help: true, + ); + final result = await runner.run([ + 'help', + 'build', + ]); + expect(result, equals(0)); }, zoneSpecification: ZoneSpecification( print: (Zone self, ZoneDelegate parent, Zone zone, String line) { @@ -513,32 +475,30 @@ void main() { }, ), ); - for (final String line in prints) { + for (final line in prints) { expect(line.length, lessThanOrEqualTo(100)); } }); test('non-verbose "et help build" does not contain ci builds', () async { - final List prints = []; + final prints = []; await runZoned( () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( + final testEnv = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final ToolCommandRunner runner = ToolCommandRunner( - environment: testEnv.environment, - configs: configs, - help: true, - ); - final int result = await runner.run([ - 'help', - 'build', - ]); - expect(result, equals(0)); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final runner = ToolCommandRunner( + environment: testEnv.environment, + configs: configs, + help: true, + ); + final result = await runner.run([ + 'help', + 'build', + ]); + expect(result, equals(0)); }, zoneSpecification: ZoneSpecification( print: (Zone self, ZoneDelegate parent, Zone zone, String line) { @@ -546,7 +506,7 @@ void main() { }, ), ); - for (final String line in prints) { + for (final line in prints) { expect(line.contains('[ci/'), isFalse); } }); diff --git a/tools/engine_tool/test/gn_test.dart b/tools/engine_tool/test/gn_test.dart index cfa7e160b1aff..192b66cdc35a8 100644 --- a/tools/engine_tool/test/gn_test.dart +++ b/tools/engine_tool/test/gn_test.dart @@ -4,14 +4,15 @@ import 'package:engine_tool/src/gn.dart'; import 'package:engine_tool/src/label.dart'; +import 'package:engine_tool/src/logger.dart'; import 'package:test/test.dart'; import 'utils.dart'; void main() { test('gn.desc handles a non-zero exit code', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( - cannedProcesses: [ + final testEnv = TestEnvironment.withTestEngine( + cannedProcesses: [ CannedProcess( (List command) => command.contains('desc'), exitCode: 1, @@ -20,46 +21,56 @@ void main() { ), ], ); - try { - final Gn gn = Gn.fromEnvironment(testEnv.environment); - await gn.desc('out/Release', TargetPattern('//foo', 'bar')); - fail('Expected an exception'); - } catch (e) { - final String message = '$e'; - expect(message, contains('Failed to run')); - expect(message, contains('exit code 1')); - expect(message, contains('STDOUT:\nstdout')); - expect(message, contains('STDERR:\nstderr')); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final gn = Gn.fromEnvironment(testEnv.environment); + expect( + () => gn.desc('out/Release', TargetPattern('//foo', 'bar')), + throwsA( + isA().having( + (a) => a.toString(), + 'toString()', + allOf([ + contains('Failed to run'), + contains('exit code 1'), + contains('STDOUT:\nstdout'), + contains('STDERR:\nstderr'), + ]), + ), + ), + ); }); test('gn.desc handles unparseable stdout', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( - cannedProcesses: [ + final testEnv = TestEnvironment.withTestEngine( + cannedProcesses: [ CannedProcess( (List command) => command.contains('desc'), stdout: 'not json', ), ], ); - try { - final Gn gn = Gn.fromEnvironment(testEnv.environment); - await gn.desc('out/Release', TargetPattern('//foo', 'bar')); - fail('Expected an exception'); - } catch (e) { - final String message = '$e'; - expect(message, contains('Failed to parse JSON')); - expect(message, contains('not json')); - } finally { - testEnv.cleanup(); - } + addTearDown(testEnv.cleanup); + + final gn = Gn.fromEnvironment(testEnv.environment); + expect( + () => gn.desc('out/Release', TargetPattern('//foo', 'bar')), + throwsA( + isA().having( + (a) => a.toString(), + 'toString()', + allOf([ + contains('Failed to parse JSON'), + contains('not json'), + ]), + ), + ), + ); }); test('gn.desc parses build targets', () async { - final TestEnvironment testEnv = TestEnvironment.withTestEngine( - cannedProcesses: [ + final testEnv = TestEnvironment.withTestEngine( + cannedProcesses: [ CannedProcess( (List command) => command.contains('desc'), stdout: ''' @@ -82,31 +93,37 @@ void main() { ), ], ); - try { - final Gn gn = Gn.fromEnvironment(testEnv.environment); - final List targets = await gn.desc('out/Release', TargetPattern('//foo', 'bar')); - expect(targets, hasLength(3)); + addTearDown(testEnv.cleanup); + + final gn = Gn.fromEnvironment(testEnv.environment); + final targets = await gn.desc('out/Release', TargetPattern('//foo', 'bar')); + expect(targets, hasLength(3)); - // There should be exactly one binary test target and two library targets. - final ExecutableBuildTarget testTarget = targets.whereType().single; - expect(testTarget, ExecutableBuildTarget( - label: Label('//foo/bar', 'baz_test'), - testOnly: true, - executable: 'out/host_debug/foo/bar/baz_test', - )); + // There should be exactly one binary test target and two library targets. + final testTarget = targets.whereType().single; + expect( + testTarget, + ExecutableBuildTarget( + label: Label('//foo/bar', 'baz_test'), + testOnly: true, + executable: 'out/host_debug/foo/bar/baz_test', + )); - final List libraryTargets = targets.whereType().toList(); - expect(libraryTargets, hasLength(2)); - expect(libraryTargets.contains(LibraryBuildTarget( + final libraryTargets = targets.whereType().toList(); + expect(libraryTargets, hasLength(2)); + expect( + libraryTargets.contains(LibraryBuildTarget( label: Label('//foo/bar', 'baz_shared_library'), testOnly: false, - )), isTrue); - expect(libraryTargets.contains(LibraryBuildTarget( + )), + isTrue, + ); + expect( + libraryTargets.contains(LibraryBuildTarget( label: Label('//foo/bar', 'baz_static_library'), testOnly: false, - )), isTrue); - } finally { - testEnv.cleanup(); - } + )), + isTrue, + ); }); } diff --git a/tools/engine_tool/test/query_command_test.dart b/tools/engine_tool/test/query_command_test.dart index aef566489e46e..29ae27d3e16f6 100644 --- a/tools/engine_tool/test/query_command_test.dart +++ b/tools/engine_tool/test/query_command_test.dart @@ -6,7 +6,6 @@ import 'dart:convert' as convert; import 'package:engine_build_configs/engine_build_configs.dart'; import 'package:engine_tool/src/commands/command_runner.dart'; -import 'package:engine_tool/src/environment.dart'; import 'package:logging/logging.dart' as log; import 'package:platform/platform.dart'; import 'package:test/test.dart'; @@ -15,31 +14,32 @@ import 'fixtures.dart' as fixtures; import 'utils.dart'; void main() { - final BuilderConfig linuxTestConfig = BuilderConfig.fromJson( + final linuxTestConfig = BuilderConfig.fromJson( path: 'ci/builders/linux_test_config.json', map: convert.jsonDecode(fixtures.testConfig('Linux', Platform.linux)) as Map, ); - final BuilderConfig linuxTestConfig2 = BuilderConfig.fromJson( + final linuxTestConfig2 = BuilderConfig.fromJson( path: 'ci/builders/linux_test_config2.json', - map: convert.jsonDecode(fixtures.testConfig('Linux', Platform.linux, suffix: '2')) + map: convert.jsonDecode( + fixtures.testConfig('Linux', Platform.linux, suffix: '2')) as Map, ); - final BuilderConfig macTestConfig = BuilderConfig.fromJson( + final macTestConfig = BuilderConfig.fromJson( path: 'ci/builders/mac_test_config.json', map: convert.jsonDecode(fixtures.testConfig('Mac-12', Platform.macOS)) as Map, ); - final BuilderConfig winTestConfig = BuilderConfig.fromJson( + final winTestConfig = BuilderConfig.fromJson( path: 'ci/builders/win_test_config.json', map: convert.jsonDecode(fixtures.testConfig('Windows-11', Platform.windows)) as Map, ); - final Map configs = { + final configs = { 'linux_test_config': linuxTestConfig, 'linux_test_config2': linuxTestConfig2, 'mac_test_config': macTestConfig, @@ -50,27 +50,66 @@ void main() { return logs.map((log.LogRecord r) => r.message).toList(); } - final List cannedProcesses = [ - CannedProcess((List command) => command.contains('desc'), - stdout: fixtures.gnDescOutput()), + final cannedProcesses = [ + CannedProcess( + (command) => command.contains('desc'), + stdout: fixtures.gnDescOutput(), + ), ]; test('query command returns builds for the host platform.', () async { - final TestEnvironment testEnvironment = TestEnvironment.withTestEngine( + final testEnvironment = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final Environment env = testEnvironment.environment; - final ToolCommandRunner runner = ToolCommandRunner( - environment: env, - configs: configs, - ); - final int result = await runner.run([ - 'query', - 'builders', - ]); - expect(result, equals(0)); - expect( + addTearDown(testEnvironment.cleanup); + + final runner = ToolCommandRunner( + environment: testEnvironment.environment, + configs: configs, + ); + final result = await runner.run([ + 'query', + 'builders', + ]); + expect(result, equals(0)); + expect( + stringsFromLogs(testEnvironment.testLogs), + equals([ + 'Add --verbose to see detailed information about each builder\n', + '\n', + '"linux_test_config" builder:\n', + ' "ci/build_name" config\n', + ' "linux/host_debug" config\n', + ' "linux/android_debug_arm64" config\n', + ' "ci/android_debug_rbe_arm64" config\n', + '"linux_test_config2" builder:\n', + ' "ci/build_name2" config\n', + ' "linux/host_debug2" config\n', + ' "linux/android_debug2_arm64" config\n', + ' "ci/android_debug2_rbe_arm64" config\n', + ]), + ); + }); + + test('query command with --builder returns only from the named builder.', + () async { + final testEnvironment = TestEnvironment.withTestEngine( + cannedProcesses: cannedProcesses, + ); + addTearDown(testEnvironment.cleanup); + + final runner = ToolCommandRunner( + environment: testEnvironment.environment, + configs: configs, + ); + final result = await runner.run([ + 'query', + 'builders', + '--builder', + 'linux_test_config', + ]); + expect(result, equals(0)); + expect( stringsFromLogs(testEnvironment.testLogs), equals([ 'Add --verbose to see detailed information about each builder\n', @@ -80,118 +119,69 @@ void main() { ' "linux/host_debug" config\n', ' "linux/android_debug_arm64" config\n', ' "ci/android_debug_rbe_arm64" config\n', - '"linux_test_config2" builder:\n', - ' "ci/build_name2" config\n', - ' "linux/host_debug2" config\n', - ' "linux/android_debug2_arm64" config\n', - ' "ci/android_debug2_rbe_arm64" config\n', - ]), - ); - } finally { - testEnvironment.cleanup(); - } + ])); }); - test('query command with --builder returns only from the named builder.', - () async { - final TestEnvironment testEnvironment = TestEnvironment.withTestEngine( + test('query command with --all returns all builds.', () async { + final testEnvironment = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final Environment env = testEnvironment.environment; - final ToolCommandRunner runner = ToolCommandRunner( - environment: env, - configs: configs, - ); - final int result = await runner.run([ - 'query', - 'builders', - '--builder', - 'linux_test_config', - ]); - expect(result, equals(0)); - expect( - stringsFromLogs(testEnvironment.testLogs), - equals([ - 'Add --verbose to see detailed information about each builder\n', - '\n', - '"linux_test_config" builder:\n', - ' "ci/build_name" config\n', - ' "linux/host_debug" config\n', - ' "linux/android_debug_arm64" config\n', - ' "ci/android_debug_rbe_arm64" config\n', - ])); - } finally { - testEnvironment.cleanup(); - } - }); + addTearDown(testEnvironment.cleanup); - test('query command with --all returns all builds.', () async { - final TestEnvironment testEnvironment = TestEnvironment.withTestEngine( - cannedProcesses: cannedProcesses, + final runner = ToolCommandRunner( + environment: testEnvironment.environment, + configs: configs, + ); + final result = await runner.run([ + 'query', + 'builders', + '--all', + ]); + expect(result, equals(0)); + expect( + testEnvironment.testLogs.length, + equals(30), ); - try { - final Environment env = testEnvironment.environment; - final ToolCommandRunner runner = ToolCommandRunner( - environment: env, - configs: configs, - ); - final int result = await runner.run([ - 'query', - 'builders', - '--all', - ]); - expect(result, equals(0)); - expect( - testEnvironment.testLogs.length, - equals(30), - ); - } finally { - testEnvironment.cleanup(); - } }); test('query targets', () async { - final TestEnvironment testEnvironment = TestEnvironment.withTestEngine( + final testEnvironment = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - final Environment env = testEnvironment.environment; - final ToolCommandRunner runner = ToolCommandRunner( - environment: env, - configs: configs, - ); - final int result = await runner.run([ - 'query', - 'targets', - ]); - expect(result, equals(0)); - - final List expected = [ - '//flutter/display_list:display_list_unittests', - '//flutter/flow:flow_unittest', - '//flutter/fml:fml_arc_unittests', - ]; - - final List testLogs = stringsFromLogs(testEnvironment.testLogs); - for (final String testLog in testLogs) { - // Expect one of the expected targets to be in the output. - // Then remove it from the list of expected targets. - for (final String target in expected) { - if (testLog.contains(target)) { - expected.remove(target); - break; - } + addTearDown(testEnvironment.cleanup); + + final runner = ToolCommandRunner( + environment: testEnvironment.environment, + configs: configs, + ); + final result = await runner.run([ + 'query', + 'targets', + ]); + expect(result, equals(0)); + + final expected = [ + '//flutter/display_list:display_list_unittests', + '//flutter/flow:flow_unittest', + '//flutter/fml:fml_arc_unittests', + ]; + + final testLogs = stringsFromLogs(testEnvironment.testLogs); + for (final testLog in testLogs) { + // Expect one of the expected targets to be in the output. + // Then remove it from the list of expected targets. + for (final target in expected) { + if (testLog.contains(target)) { + expected.remove(target); + break; } } - - expect( - expected.isEmpty, - isTrue, - reason: 'All expected targets were found', - ); - } finally { - testEnvironment.cleanup(); } + + expect( + expected.isEmpty, + isTrue, + reason: 'All expected targets were found', + ); }); } diff --git a/tools/engine_tool/test/test_command_test.dart b/tools/engine_tool/test/test_command_test.dart index 792ecfb8be051..72e3219b459c5 100644 --- a/tools/engine_tool/test/test_command_test.dart +++ b/tools/engine_tool/test/test_command_test.dart @@ -6,7 +6,6 @@ import 'dart:convert' as convert; import 'package:engine_build_configs/engine_build_configs.dart'; import 'package:engine_tool/src/commands/command_runner.dart'; -import 'package:engine_tool/src/environment.dart'; import 'package:platform/platform.dart'; import 'package:test/test.dart'; @@ -14,38 +13,38 @@ import 'fixtures.dart' as fixtures; import 'utils.dart'; void main() { - final BuilderConfig linuxTestConfig = BuilderConfig.fromJson( + final linuxTestConfig = BuilderConfig.fromJson( path: 'ci/builders/linux_test_config.json', map: convert.jsonDecode(fixtures.testConfig('Linux', Platform.linux)) as Map, ); - final BuilderConfig macTestConfig = BuilderConfig.fromJson( + final macTestConfig = BuilderConfig.fromJson( path: 'ci/builders/mac_test_config.json', map: convert.jsonDecode(fixtures.testConfig('Mac-12', Platform.macOS)) as Map, ); - final BuilderConfig winTestConfig = BuilderConfig.fromJson( + final winTestConfig = BuilderConfig.fromJson( path: 'ci/builders/win_test_config.json', map: convert.jsonDecode(fixtures.testConfig('Windows-11', Platform.windows)) as Map, ); - final Map configs = { + final configs = { 'linux_test_config': linuxTestConfig, 'mac_test_config': macTestConfig, 'win_test_config': winTestConfig, }; test('test command executes test', () async { - final TestEnvironment testEnvironment = TestEnvironment.withTestEngine( + final testEnvironment = TestEnvironment.withTestEngine( // This test needs specific instrumentation. Ideally all tests should // use per-test environments and not rely on global state, but that is a // larger change (https://github.com/flutter/flutter/issues/148420). - cannedProcesses: [ + cannedProcesses: [ CannedProcess( - (List command) => command.contains('desc'), + (command) => command.contains('desc'), stdout: ''' { "//flutter/fml:display_list_unittests": { @@ -58,34 +57,33 @@ void main() { ), ], ); - try { - final Environment env = testEnvironment.environment; - final ToolCommandRunner runner = ToolCommandRunner( - environment: env, - configs: configs, - ); - final int result = await runner.run([ - 'test', - '//flutter/display_list:display_list_unittests', - ]); - expect(result, equals(0)); - expect(testEnvironment.processHistory.length, greaterThan(3)); - final int offset = testEnvironment.processHistory.length - 1; - expect(testEnvironment.processHistory[offset].command[0], - endsWith('display_list_unittests')); - } finally { - testEnvironment.cleanup(); - } + addTearDown(testEnvironment.cleanup); + + final runner = ToolCommandRunner( + environment: testEnvironment.environment, + configs: configs, + ); + final result = await runner.run([ + 'test', + '//flutter/display_list:display_list_unittests', + ]); + expect(result, equals(0)); + expect(testEnvironment.processHistory.length, greaterThan(3)); + final offset = testEnvironment.processHistory.length - 1; + expect( + testEnvironment.processHistory[offset].command[0], + endsWith('display_list_unittests'), + ); }); test('test command skips non-testonly executables', () async { - final TestEnvironment testEnvironment = TestEnvironment.withTestEngine( + final testEnvironment = TestEnvironment.withTestEngine( // This test needs specific instrumentation. Ideally all tests should // use per-test environments and not rely on global state, but that is a // larger change (https://github.com/flutter/flutter/issues/148420). - cannedProcesses: [ + cannedProcesses: [ CannedProcess( - (List command) => command.contains('desc'), + (command) => command.contains('desc'), stdout: ''' { "//flutter/fml:display_list_unittests": { @@ -103,22 +101,20 @@ void main() { ), ], ); - try { - final Environment env = testEnvironment.environment; - final ToolCommandRunner runner = ToolCommandRunner( - environment: env, - configs: configs, - ); - final int result = await runner.run([ - 'test', - '//...', - ]); - expect(result, equals(0)); - expect(testEnvironment.processHistory.where((ExecutedProcess process) { - return process.command[0].contains('protoc'); - }), isEmpty); - } finally { - testEnvironment.cleanup(); - } + addTearDown(testEnvironment.cleanup); + + final env = testEnvironment.environment; + final runner = ToolCommandRunner( + environment: env, + configs: configs, + ); + final result = await runner.run([ + 'test', + '//...', + ]); + expect(result, equals(0)); + expect(testEnvironment.processHistory.where((ExecutedProcess process) { + return process.command[0].contains('protoc'); + }), isEmpty); }); } diff --git a/tools/engine_tool/test/utils_test.dart b/tools/engine_tool/test/utils_test.dart index e376f7917f9e9..84bae34edbd4f 100644 --- a/tools/engine_tool/test/utils_test.dart +++ b/tools/engine_tool/test/utils_test.dart @@ -7,61 +7,69 @@ import 'package:test/test.dart'; import 'utils.dart'; void main() async { - final List cannedProcesses = [ - CannedProcess((List command) => command.contains('ulfuls'), - stdout: 'Ashita ga aru sa'), - CannedProcess((List command) => command.contains('quruli'), - stdout: 'Tokyo'), - CannedProcess((List command) => command.contains('elizaveta'), - stdout: 'Moshimo ano toki'), - CannedProcess((List command) => command.contains('scott_murphy'), - stdout: 'Donna toki mo'), + final cannedProcesses = [ + CannedProcess( + (command) => command.contains('ulfuls'), + stdout: 'Ashita ga aru sa', + ), + CannedProcess( + (command) => command.contains('quruli'), + stdout: 'Tokyo', + ), + CannedProcess( + (command) => command.contains('elizaveta'), + stdout: 'Moshimo ano toki', + ), + CannedProcess( + (command) => command.contains('scott_murphy'), + stdout: 'Donna toki mo', + ), ]; test('containsCommand passes if command matched', () async { - final TestEnvironment testEnvironment = TestEnvironment.withTestEngine( + final testEnvironment = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - await testEnvironment.environment.processRunner.runProcess( - ['ulfuls', '--lyrics'], - workingDirectory: testEnvironment.environment.engine.srcDir, - failOk: true); - await testEnvironment.environment.processRunner.runProcess( - ['quruli', '--lyrics'], - workingDirectory: testEnvironment.environment.engine.srcDir, - failOk: true); - final List history = testEnvironment.processHistory; - expect(history, containsCommand((List command) { - return command.isNotEmpty && command[0] == 'quruli'; - })); - expect(history, containsCommand((List command) { - return command.length > 1 && command[1] == '--lyrics'; - })); - } finally { - testEnvironment.cleanup(); - } + addTearDown(testEnvironment.cleanup); + + await testEnvironment.environment.processRunner.runProcess( + ['ulfuls', '--lyrics'], + workingDirectory: testEnvironment.environment.engine.srcDir, + failOk: true, + ); + await testEnvironment.environment.processRunner.runProcess( + ['quruli', '--lyrics'], + workingDirectory: testEnvironment.environment.engine.srcDir, + failOk: true, + ); + final history = testEnvironment.processHistory; + expect(history, containsCommand((command) { + return command.isNotEmpty && command[0] == 'quruli'; + })); + expect(history, containsCommand((command) { + return command.length > 1 && command[1] == '--lyrics'; + })); }); test('doesNotContainCommand passes if command not matched', () async { - final TestEnvironment testEnvironment = TestEnvironment.withTestEngine( + final testEnvironment = TestEnvironment.withTestEngine( cannedProcesses: cannedProcesses, ); - try { - await testEnvironment.environment.processRunner.runProcess( - ['elizaveta', '--lyrics'], - workingDirectory: testEnvironment.environment.engine.srcDir, - failOk: true); - await testEnvironment.environment.processRunner.runProcess( - ['scott_murphy', '--lyrics'], - workingDirectory: testEnvironment.environment.engine.srcDir, - failOk: true); - final List history = testEnvironment.processHistory; - expect(history, doesNotContainCommand((List command) { - return command.length > 1 && command[1] == '--not-an-option'; - })); - } finally { - testEnvironment.cleanup(); - } + addTearDown(testEnvironment.cleanup); + + await testEnvironment.environment.processRunner.runProcess( + ['elizaveta', '--lyrics'], + workingDirectory: testEnvironment.environment.engine.srcDir, + failOk: true, + ); + await testEnvironment.environment.processRunner.runProcess( + ['scott_murphy', '--lyrics'], + workingDirectory: testEnvironment.environment.engine.srcDir, + failOk: true, + ); + final history = testEnvironment.processHistory; + expect(history, doesNotContainCommand((command) { + return command.length > 1 && command[1] == '--not-an-option'; + })); }); }