From 0f9a6f8035439dcaf587ca6168a5e27c161f6252 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Thu, 23 Oct 2025 09:54:45 +0100 Subject: [PATCH 01/38] Upgrade cc-rules plugin to v0.5.3 (#3433) --- docs/BUILD | 2 +- plugins/BUILD | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/BUILD b/docs/BUILD index 8cf72d2c1..3d07f0c2b 100644 --- a/docs/BUILD +++ b/docs/BUILD @@ -60,7 +60,7 @@ plugins = { "python": "v1.14.0", "java": "v0.4.5", "go": "v1.26.0", - "cc": "v0.5.2", + "cc": "v0.5.3", "shell": "v0.2.0", "go-proto": "v0.3.0", "python-proto": "v0.1.0", diff --git a/plugins/BUILD b/plugins/BUILD index 258ba758d..44ac64489 100644 --- a/plugins/BUILD +++ b/plugins/BUILD @@ -7,7 +7,7 @@ plugin_repo( plugin_repo( name = "cc", plugin = "cc-rules", - revision = "v0.5.2", + revision = "v0.5.3", ) plugin_repo( From f573307f1fad3b44f8b357078b5383df6af5f299 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Tue, 28 Oct 2025 15:37:04 +0000 Subject: [PATCH 02/38] Bump arcat to v1.3.0 (#3441) This has no direct impact on Please, but the new `--include` option to the `zip` command in this version is necessary for a feature that will be added to the python-rules plugin imminently. --- src/parse/internal.tmpl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/parse/internal.tmpl b/src/parse/internal.tmpl index be7a29888..c877f58cb 100644 --- a/src/parse/internal.tmpl +++ b/src/parse/internal.tmpl @@ -1,14 +1,14 @@ remote_file( name = "arcat", - url = f"https://github.com/please-build/arcat/releases/download/v1.2.1/arcat-1.2.1-{CONFIG.HOSTOS}_{CONFIG.HOSTARCH}", + url = f"https://github.com/please-build/arcat/releases/download/v1.3.0/arcat-1.3.0-{CONFIG.HOSTOS}_{CONFIG.HOSTARCH}", out = "arcat", binary = True, hashes = [ - "507958f2e44e5de7529cb85fa9137ddcca2293daf6ef30dbc1a1cfa22e86ee96", # darwin_amd64 - "717cb15f1237010740be50df3561027a976ac5bd2a5d8f5c30d2ea57ccbcad82", # darwin_arm64 - "190fbf9cdcbcf53a82b886076c45a07323eeb6b4f460485c645cf2f306927c0c", # freebsd_amd64 - "e5b23a127c093939f21bf2f8cb66635b7f0b88cf8a3d1fca1bc19a114f5c5a0c", # linux_amd64 - "019ee52b534a3c48028e9a7229990055432af24a23892619e1a0d28eb3265245", # linux_arm64 + "27fa940b2a1fd2c8beb84d1e29ed7d04ecfca489e021ed9bd7c8e975e5f43839", # darwin_amd64 + "3191b2896451a4f3fd6d592a8a5c6684f4a18bc74f00e91e2488c228a24c1d4e", # darwin_arm64 + "aeaf7be02fb25495f0c4e142a5adb20ce08a2bb988e7be6436562d03fbae83b0", # freebsd_amd64 + "e09a689cebe9d9b27836c184e4955e8d6731c9453fe48124a37b6a173c6b04d6", # linux_amd64 + "c3792853393ca692fd07bd2fbfdf1e1cf6e636090e4e622b8a77f03609c724a9", # linux_arm64 ], visibility = ["PUBLIC"], ) From b46e5de4e31fe6d252211981bf4afe85d31716d2 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Tue, 28 Oct 2025 15:38:33 +0000 Subject: [PATCH 03/38] Tag v17.23.0 (#3442) --- ChangeLog | 4 ++++ VERSION | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/ChangeLog b/ChangeLog index e892077bd..4ffd56ebd 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,7 @@ +Version 17.23.0 +--------------- + * Bump arcat to v1.3.0, adding the `--include` option to the `zip` command (#3441) + Version 17.22.0 --------------- * Add `--audit_log_dir` option for logging of Please invocations, build commands and remote file fetching (#3425) diff --git a/VERSION b/VERSION index 721d4f737..85496bc4c 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -17.22.0 +17.23.0 From 4fec6ab3ccee95d424658ed238f2eef9a66c1934 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Wed, 29 Oct 2025 21:15:14 +0000 Subject: [PATCH 04/38] Define `PLZ_ENV` in build environments (#3444) It would be useful for executables to be able to tell whether they are running within a Please build environment, whether or not that environment is sandboxed. One definitive use case is when deciding whether or not resources should be loaded from `$TMP_DIR`, which has a special (and well-defined) meaning within a Please build environment but not outside of one, even though the variable may still be defined in the environment. The java-rules and python-rules plugins would both benefit from this, given that `java_binary` JARs and `python_binary` pexes have runtimes that are expected to be discovered dynamically at run time but will be located at different paths depending on whether they are executing inside a build environment. Define the `PLZ_ENV` variable in Please build environments. This of course does not guarantee that the environment was created by Please, but is a better indicator of that than heuristics such as testing for the presence of other variables that happen to be defined in certain build environments, such as `BUILD_CONFIG` and `_TEST_ID`. --- src/core/build_env.go | 1 + src/core/build_env_test.go | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/src/core/build_env.go b/src/core/build_env.go index 732b71b9f..b3c0bcfbb 100644 --- a/src/core/build_env.go +++ b/src/core/build_env.go @@ -22,6 +22,7 @@ type BuildEnv map[string]string // on any specific target etc. func GeneralBuildEnvironment(state *BuildState) BuildEnv { env := BuildEnv{ + "PLZ_ENV": "1", // Need this for certain tools, for example sass "LANG": state.Config.Build.Lang, // Need to know these for certain rules. diff --git a/src/core/build_env_test.go b/src/core/build_env_test.go index 0375e4483..ca2b22210 100644 --- a/src/core/build_env_test.go +++ b/src/core/build_env_test.go @@ -65,6 +65,7 @@ func TestExecEnvironment(t *testing.T) { env := ExecEnvironment(NewDefaultBuildState(), target, "/path/to/runtime/dir") + assert.Equal(t, env["PLZ_ENV"], "1") assert.Equal(t, env["DATA"], "pkg/data_file1") assert.Equal(t, env["TMP_DIR"], "/path/to/runtime/dir") assert.Equal(t, env["TMPDIR"], "/path/to/runtime/dir") @@ -104,6 +105,7 @@ func TestExecEnvironmentTestTarget(t *testing.T) { env := ExecEnvironment(state, testTarget, "/path/to/runtime/dir") + assert.Equal(t, env["PLZ_ENV"], "1") assert.Equal(t, env["DATA"], "pkg/data_file1 pkg/data_file2") assert.Equal(t, env["DATA_FILE2"], "pkg/data_file2") assert.Equal(t, env["TOOLS"], "plz-out/bin/tool1 plz-out/bin/tool2") @@ -138,6 +140,7 @@ func TestExecEnvironmentDebugTarget(t *testing.T) { env := ExecEnvironment(state, target, "/path/to/runtime/dir") + assert.Equal(t, env["PLZ_ENV"], "1") assert.Equal(t, env["DEBUG_DATA"], "pkg/data_file1") assert.Equal(t, env["DEBUG_TOOLS"], "plz-out/bin/tool1") assert.Equal(t, env["DEBUG_TOOLS_TOOL1"], "plz-out/bin/tool1") @@ -173,6 +176,7 @@ func TestExecEnvironmentDebugTestTarget(t *testing.T) { env := ExecEnvironment(state, testTarget, "/path/to/runtime/dir") + assert.Equal(t, env["PLZ_ENV"], "1") assert.Equal(t, env["DEBUG_DATA"], "pkg/data_file1") assert.Equal(t, env["DEBUG_TOOLS"], "plz-out/bin/tool1") assert.Equal(t, env["DEBUG_TOOLS_TOOL1"], "plz-out/bin/tool1") From c298f634dfb4f0fc3ff89f088f939b01d076d411 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Thu, 30 Oct 2025 19:02:24 +0000 Subject: [PATCH 05/38] Use cc-rules v0.5.4 (#3445) --- docs/BUILD | 2 +- plugins/BUILD | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/BUILD b/docs/BUILD index 3d07f0c2b..ce1186ca9 100644 --- a/docs/BUILD +++ b/docs/BUILD @@ -60,7 +60,7 @@ plugins = { "python": "v1.14.0", "java": "v0.4.5", "go": "v1.26.0", - "cc": "v0.5.3", + "cc": "v0.5.4", "shell": "v0.2.0", "go-proto": "v0.3.0", "python-proto": "v0.1.0", diff --git a/plugins/BUILD b/plugins/BUILD index 44ac64489..aa83d2ce6 100644 --- a/plugins/BUILD +++ b/plugins/BUILD @@ -7,7 +7,7 @@ plugin_repo( plugin_repo( name = "cc", plugin = "cc-rules", - revision = "v0.5.3", + revision = "v0.5.4", ) plugin_repo( From 8cebc1f40b5899ce674fbd0d2b35d42205b13078 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Thu, 6 Nov 2025 17:11:49 +0000 Subject: [PATCH 06/38] Bump cc-rules plugin to v0.6.0 (#3450) The only notable (but minor) change caused by this is that symbols are now stripped from the please_sandbox binary. --- docs/BUILD | 2 +- plugins/BUILD | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/BUILD b/docs/BUILD index ce1186ca9..452347864 100644 --- a/docs/BUILD +++ b/docs/BUILD @@ -60,7 +60,7 @@ plugins = { "python": "v1.14.0", "java": "v0.4.5", "go": "v1.26.0", - "cc": "v0.5.4", + "cc": "v0.6.0", "shell": "v0.2.0", "go-proto": "v0.3.0", "python-proto": "v0.1.0", diff --git a/plugins/BUILD b/plugins/BUILD index aa83d2ce6..de391c094 100644 --- a/plugins/BUILD +++ b/plugins/BUILD @@ -7,7 +7,7 @@ plugin_repo( plugin_repo( name = "cc", plugin = "cc-rules", - revision = "v0.5.4", + revision = "v0.6.0", ) plugin_repo( From 45c3a272c667d3625f3148da0e9c13aa72fb5275 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Thu, 13 Nov 2025 15:52:21 +0000 Subject: [PATCH 07/38] Implement run-time dependencies for binary targets (#3451) Add a `runtime_deps` parameter, allowing for the declaration of run-time dependencies, to `build_rule` and several of the built-in build definitions that permit binary outputs. The semantics of `runtime_deps` are a combination of the semantics of `deps` and `data`. If the output of a `build_rule` is marked as binary, targets listed in `runtime_deps` are: - guaranteed to have been built before the dependent target runs; - copied into build and test environments alongside the dependent target. Like build-time dependencies declared with `deps`, run-time dependencies are computed transitively; i.e., if target A has a run-time dependency on target B which in turn has a run-time dependency on target C, targets B and C will be built before `plz run`ning target A, and the outputs of targets B and C will be copied into build environments (or test environments, if target A is a test) alongside the outputs of target A. Unlike build-time dependencies, downward searches for transitive run-time dependencies are not blocked by the `output_is_complete` parameter. --- rules/builtins.build_defs | 12 +- rules/misc_rules.build_defs | 37 +++- src/build/incrementality_test.go | 7 +- src/core/build_target.go | 76 ++++++- src/core/build_target_test.go | 67 +++++- src/core/state.go | 26 +++ src/core/utils.go | 67 ++++++ src/parse/asp/builtins.go | 3 +- src/parse/asp/targets.go | 12 +- src/query/print.go | 3 + test/runtime_deps/BUILD | 68 ++++++ test/runtime_deps/repo/.plzconfig | 0 test/runtime_deps/repo/test/BUILD_FILE | 197 ++++++++++++++++++ tools/build_langserver/lsp/definition_test.go | 6 +- 14 files changed, 544 insertions(+), 37 deletions(-) create mode 100644 test/runtime_deps/BUILD create mode 100644 test/runtime_deps/repo/.plzconfig create mode 100644 test/runtime_deps/repo/test/BUILD_FILE diff --git a/rules/builtins.build_defs b/rules/builtins.build_defs index 5edca910d..b8e4ccafb 100644 --- a/rules/builtins.build_defs +++ b/rules/builtins.build_defs @@ -3,9 +3,9 @@ # Do not change the order of arguments to this function without updating the iota in targets.go to match it. def build_rule(name:str, cmd:str|dict='', test_cmd:str|dict='', debug_cmd:str='', srcs:list|dict=None, data:list|dict=None, - debug_data:list|dict=None, outs:list|dict=None, deps:list=None, exported_deps:list=None, secrets:list|dict=None, - tools:str|list|dict=None, test_tools:str|list|dict=None, debug_tools:str|list|dict=None, labels:list=None, - visibility:list=CONFIG.DEFAULT_VISIBILITY, hashes:list=None, binary:bool=False, test:bool=False, + debug_data:list|dict=None, outs:list|dict=None, deps:list=None, exported_deps:list=None, runtime_deps:list=None, + secrets:list|dict=None, tools:str|list|dict=None, test_tools:str|list|dict=None, debug_tools:str|list|dict=None, + labels:list=None, visibility:list=CONFIG.DEFAULT_VISIBILITY, hashes:list=None, binary:bool=False, test:bool=False, test_only:bool=CONFIG.DEFAULT_TESTONLY, building_description:str=None, needs_transitive_deps:bool=False, output_is_complete:bool=False, sandbox:bool=CONFIG.BUILD_SANDBOX, test_sandbox:bool=CONFIG.TEST_SANDBOX, no_test_output:bool=False, flaky:bool|int=0, build_timeout:int|str=0, test_timeout:int|str=0, pre_build:function=None, @@ -249,10 +249,12 @@ def has_label(name:str, prefix:str, all:bool=False) -> bool: return len(get_labels(name, prefix, all)) > 0 def add_label(name:str, label:str): pass -def add_dep(target:str, dep:str, exported:bool=False): +def add_dep(target:str, dep:str, exported:bool=False, runtime:bool=False): pass def add_exported_dep(target:str, dep:str): - add_dep(target, dep, True) + add_dep(target, dep, exported=True) +def add_runtime_dep(target:str, dep:str): + add_dep(target, dep, runtime=True) def add_data(target:str, datum:str|list|dict): pass def add_out(target:str, name:str, out:str=''): diff --git a/rules/misc_rules.build_defs b/rules/misc_rules.build_defs index 3d9a260eb..6992313db 100644 --- a/rules/misc_rules.build_defs +++ b/rules/misc_rules.build_defs @@ -2,7 +2,7 @@ def genrule(name:str, cmd:str|list|dict, srcs:list|dict=None, out:str=None, outs:list|dict=None, deps:list=None, - exported_deps:list=None, labels:list&features&tags=None, visibility:list=None, + exported_deps:list=None, runtime_deps:list=None, labels:list&features&tags=None, visibility:list=None, building_description:str='Building...', data:list|dict=None, hashes:list=None, timeout:int=0, binary:bool=False, sandbox:bool=None, needs_transitive_deps:bool=False, output_is_complete:bool=True, test_only:bool&testonly=False, secrets:list|dict=None, requires:list=None, provides:dict=None, @@ -34,6 +34,13 @@ def genrule(name:str, cmd:str|list|dict, srcs:list|dict=None, out:str=None, outs names to lists, with similar semantics to those of srcs. deps (list): Dependencies of this rule. exported_deps (list): Dependencies that will become visible to any rules that depend on this rule. + runtime_deps (list): Run-time dependencies of this rule. If this rule is run (i.e. with 'plz run'), + rules in this list, as well as those rules' transitive run-time dependencies, + are guaranteed to be built before this rule runs. If this rule is declared as + a dependency of another rule, the outputs of rules in this list, as well as + the outputs of those rules' transitive run-time dependencies, will exist in + the dependent rule's build environment. Requires the rule to produce a runnable + output (i.e. binary = True). tools (str | list | dict): Tools used to build this rule; similar to srcs but are not copied to the temporary build directory. Should be accessed via $(exe //path/to:tool) or similar. @@ -119,6 +126,7 @@ def genrule(name:str, cmd:str|list|dict, srcs:list|dict=None, out:str=None, outs cmd = ' && '.join(cmd) if isinstance(cmd, list) else cmd, deps = deps, exported_deps = exported_deps, + runtime_deps = runtime_deps, data = data, tools = tools, secrets = secrets, @@ -147,11 +155,12 @@ def genrule(name:str, cmd:str|list|dict, srcs:list|dict=None, out:str=None, outs def gentest(name:str, test_cmd:str|list|dict, labels:list&features&tags=None, cmd:str|list|dict=None, srcs:list|dict=None, - outs:list=None, deps:list=None, exported_deps:list=None, tools:str|list|dict=None, test_tools:str|list|dict=None, - data:list|dict=None, visibility:list=None, timeout:int=0, needs_transitive_deps:bool=False, - flaky:bool|int=0, secrets:list|dict=None, no_test_output:bool=False, test_outputs:list=None, - output_is_complete:bool=True, requires:list=None, sandbox:bool=None, size:str=None, local:bool=False, - pass_env:list=None, env:dict=None, exit_on_error:bool=CONFIG.EXIT_ON_ERROR, no_test_coverage:bool=False): + outs:list=None, deps:list=None, exported_deps:list=None, runtime_deps:list=None, tools:str|list|dict=None, + test_tools:str|list|dict=None, data:list|dict=None, visibility:list=None, timeout:int=0, + needs_transitive_deps:bool=False, flaky:bool|int=0, secrets:list|dict=None, no_test_output:bool=False, + test_outputs:list=None, output_is_complete:bool=True, requires:list=None, sandbox:bool=None, size:str=None, + local:bool=False, pass_env:list=None, env:dict=None, exit_on_error:bool=CONFIG.EXIT_ON_ERROR, + no_test_coverage:bool=False): """A rule which creates a test with an arbitrary command. The command must return zero on success and nonzero on failure. Test results are written @@ -172,6 +181,10 @@ def gentest(name:str, test_cmd:str|list|dict, labels:list&features&tags=None, cm outs (list): Output files of this rule. deps (list): Dependencies of this rule. exported_deps (list): Dependencies that will become visible to any rules that depend on this rule. + runtime_deps (list): Run-time dependencies of this rule. When the test command runs, rules in this + list, as well as those rules' transitive run-time dependencies, will exist in + the test environment. Requires the rule to produce a runnable output (i.e. + binary = True). tools (str | list | dict): Tools used to build this rule; similar to srcs but are not copied to the temporary build directory. test_tools (str | list | dict): Like tools but available to test_cmd instead. @@ -210,6 +223,7 @@ def gentest(name:str, test_cmd:str|list|dict, labels:list&features&tags=None, cm outs = outs, deps = deps, exported_deps = exported_deps, + runtime_deps = runtime_deps, data = data, tools = tools, test_tools = test_tools, @@ -256,7 +270,7 @@ def export_file(name:str, src:str, visibility:list=None, binary:bool=False, test ) -def filegroup(name:str, tag:str='', srcs:list=None, deps:list=None, exported_deps:list=None, +def filegroup(name:str, tag:str='', srcs:list=None, deps:list=None, exported_deps:list=None, runtime_deps:list=None, visibility:list=None, labels:list&features&tags=None, binary:bool=False, output_is_complete:bool=True, requires:list=None, provides:dict=None, hashes:list=None, test_only:bool&testonly=False): """Defines a collection of files which other rules can depend on. @@ -271,6 +285,8 @@ def filegroup(name:str, tag:str='', srcs:list=None, deps:list=None, exported_dep srcs (list): Source files for the rule. deps (list): Dependencies of the rule. exported_deps (list): Dependencies that will become visible to any rules that depend on this rule. + runtime_deps (list): Run-time dependencies of this rule. Requires the rule to produce a runnable + output (i.e. binary = True). visibility (list): Visibility declaration labels (list): Labels to apply to this rule binary (bool): True to mark the rule outputs as binary @@ -289,6 +305,7 @@ def filegroup(name:str, tag:str='', srcs:list=None, deps:list=None, exported_dep srcs=srcs, deps=deps, exported_deps=exported_deps, + runtime_deps=runtime_deps, visibility=visibility, building_description='Copying...', output_is_complete=output_is_complete, @@ -373,7 +390,7 @@ def system_library(name:str, srcs:list, deps:list=None, hashes:list=None, def remote_file(name:str, url:str|list, hashes:list=None, out:str=None, binary:bool=False, visibility:list=None, licences:list=None, test_only:bool&testonly=False, - labels:list=[], deps:list=None, exported_deps:list=None, + labels:list=[], deps:list=None, exported_deps:list=None, runtime_deps:list=None, extract:bool=False, strip_prefix:str='', _tag:str='',exported_files=[], entry_points:dict={}, username:str=None, password_file:str=None, headers:dict={}, secret_headers:dict={}, pass_env:list=[]): @@ -392,6 +409,8 @@ def remote_file(name:str, url:str|list, hashes:list=None, out:str=None, binary:b labels (list): Labels to apply to this rule. deps (list): List of extra dependencies for this rule. exported_deps (list): Dependencies that will become visible to any rules that depend on this rule. + runtime_deps (list): Run-time dependencies of this rule. Requires the rule to produce a runnable + output (i.e. binary = True). extract (bool): Extracts the contents of the downloaded file. It must be either zip or tar format. strip_prefix (str): When extracting, strip this prefix from the extracted files. @@ -454,6 +473,7 @@ def remote_file(name:str, url:str|list, hashes:list=None, out:str=None, binary:b building_description = 'Extracting...', deps = deps, exported_deps = exported_deps, + runtime_deps = runtime_deps, entry_points = entry_points, ) @@ -484,6 +504,7 @@ def remote_file(name:str, url:str|list, hashes:list=None, out:str=None, binary:b building_description = 'Fetching...', deps = deps, exported_deps = exported_deps, + runtime_deps = runtime_deps, test_only = test_only, labels = labels, sandbox = False, diff --git a/src/build/incrementality_test.go b/src/build/incrementality_test.go index a2ce75cdf..e57cc709a 100644 --- a/src/build/incrementality_test.go +++ b/src/build/incrementality_test.go @@ -81,9 +81,10 @@ var KnownFields = map[string]bool{ "Debug.namedTools": true, // These only contribute to the runtime hash, not at build time. - "Data": true, - "NamedData": true, - "ContainerSettings": true, + "runtimeDependencies": true, + "Data": true, + "NamedData": true, + "ContainerSettings": true, // These would ideally not contribute to the hash, but we need that at present // because we don't have a good way to force a recheck of its reverse dependencies. diff --git a/src/core/build_target.go b/src/core/build_target.go index 18dc8ed2f..3e2b8b32b 100644 --- a/src/core/build_target.go +++ b/src/core/build_target.go @@ -115,6 +115,8 @@ type BuildTarget struct { // Maps the original declaration to whatever dependencies actually got attached, // which may be more than one in some cases. Also contains info about exporting etc. dependencies []depInfo `name:"deps"` + // The run-time dependencies of this target. + runtimeDependencies []BuildLabel `name:"runtime_deps"` // List of build target patterns that can use this build target. Visibility []BuildLabel // Source files of this rule. Can refer to build rules themselves. @@ -307,6 +309,7 @@ type depInfo struct { resolved bool // has the graph resolved it exported bool // is it an exported dependency internal bool // is it an internal dependency (that is not picked up implicitly by transitive searches) + runtime bool // is it a run-time (and therefore implicitly transitive) dependency source bool // is it implicit because it's a source (not true if it's a dependency too) data bool // is it a data item for a test } @@ -632,7 +635,7 @@ func (target *BuildTarget) DeclaredDependenciesStrict() []BuildLabel { defer target.mutex.RUnlock() ret := make(BuildLabels, 0, len(target.dependencies)) for _, dep := range target.dependencies { - if !dep.exported && !dep.source && !target.IsTool(*dep.declared) { + if !dep.runtime && !dep.exported && !dep.source && !target.IsTool(*dep.declared) { ret = append(ret, *dep.declared) } } @@ -672,13 +675,13 @@ func (target *BuildTarget) ExternalDependencies() []*BuildTarget { return ret } -// BuildDependencies returns the build-time dependencies of this target (i.e. not data, internal nor source). +// BuildDependencies returns the build-time dependencies of this target (i.e. not run-time dependencies, data, internal nor source). func (target *BuildTarget) BuildDependencies() []*BuildTarget { target.mutex.RLock() defer target.mutex.RUnlock() ret := make(BuildTargets, 0, len(target.dependencies)) for _, deps := range target.dependencies { - if !deps.data && !deps.internal && !deps.source { + if !deps.runtime && !deps.data && !deps.internal && !deps.source { for _, dep := range deps.deps { ret = append(ret, dep) } @@ -701,6 +704,52 @@ func (target *BuildTarget) ExportedDependencies() []BuildLabel { return ret } +// RuntimeDependencies returns any run-time dependencies of this target. +// +// Although run-time dependencies are transitive, RuntimeDependencies only returns this target's direct run-time +// dependencies. Use IterAllRuntimeDependencies to iterate over the target's run-time dependencies transitively. +func (target *BuildTarget) RuntimeDependencies() []BuildLabel { + target.mutex.RLock() + defer target.mutex.RUnlock() + ret := make(BuildLabels, 0, len(target.dependencies)) + for _, deps := range target.dependencies { + if deps.runtime { + ret = append(ret, *deps.declared) + } + } + return ret +} + +// IterAllRuntimeDependencies returns an iterator over the transitive run-time dependencies of this target. +// Require/provide relationships between pairs of targets are resolved as they are with build-time dependencies. +func (target *BuildTarget) IterAllRuntimeDependencies(graph *BuildGraph) iter.Seq[BuildLabel] { + var ( + push func(*BuildTarget, func(BuildLabel) bool) bool + done = make(map[string]bool) + ) + push = func(t *BuildTarget, yield func(BuildLabel) bool) bool { + if done[t.String()] { + return true + } + done[t.String()] = true + for _, runDep := range t.runtimeDependencies { + runDepLabel, _ := runDep.Label() + for _, providedDep := range graph.TargetOrDie(runDepLabel).ProvideFor(t) { + if !yield(providedDep) { + return false + } + if !push(graph.TargetOrDie(providedDep), yield) { + return false + } + } + } + return true + } + return func(yield func(BuildLabel) bool) { + push(target, yield) + } +} + // DependenciesFor returns the dependencies that relate to a given label. func (target *BuildTarget) DependenciesFor(label BuildLabel) []*BuildTarget { target.mutex.RLock() @@ -1287,7 +1336,7 @@ func (target *BuildTarget) addSource(sources []BuildInput, source BuildInput) [] } // Add a dependency if this is not just a file. if label, ok := source.Label(); ok { - target.AddMaybeExportedDependency(label, false, true, false) + target.AddMaybeExportedDependency(label, false, true, false, false) } return append(sources, source) } @@ -1653,7 +1702,7 @@ func (target *BuildTarget) AllNamedTools() map[string][]BuildInput { // AddDependency adds a dependency to this target. It deduplicates against any existing deps. func (target *BuildTarget) AddDependency(dep BuildLabel) { - target.AddMaybeExportedDependency(dep, false, false, false) + target.AddMaybeExportedDependency(dep, false, false, false, false) } // HintDependencies allocates space for at least the given number of dependencies without reallocating. @@ -1662,17 +1711,30 @@ func (target *BuildTarget) HintDependencies(n int) { } // AddMaybeExportedDependency adds a dependency to this target which may be exported. It deduplicates against any existing deps. -func (target *BuildTarget) AddMaybeExportedDependency(dep BuildLabel, exported, source, internal bool) { +func (target *BuildTarget) AddMaybeExportedDependency(dep BuildLabel, exported, source, internal, runtime bool) { if dep == target.Label { log.Fatalf("Attempted to add %s as a dependency of itself.\n", dep) } + if runtime { + if !target.IsBinary { + log.Fatalf("%s: output must be marked as binary to have run-time dependencies", target.String()) + } + target.runtimeDependencies = append(target.runtimeDependencies, dep) + } info := target.dependencyInfo(dep) if info == nil { - target.dependencies = append(target.dependencies, depInfo{declared: &dep, exported: exported, source: source, internal: internal}) + target.dependencies = append(target.dependencies, depInfo{ + declared: &dep, + exported: exported, + source: source, + internal: internal, + runtime: runtime, + }) } else { info.exported = info.exported || exported info.source = info.source && source info.internal = info.internal && internal + info.runtime = info.runtime && runtime info.data = false // It's not *only* data any more. } } diff --git a/src/core/build_target_test.go b/src/core/build_target_test.go index d9a6cea38..4360be131 100644 --- a/src/core/build_target_test.go +++ b/src/core/build_target_test.go @@ -4,6 +4,7 @@ package core import ( "fmt" "os" + "slices" "testing" "github.com/stretchr/testify/assert" @@ -263,7 +264,7 @@ func TestAddDatum(t *testing.T) { assert.Equal(t, target1.Data, []BuildInput{target2.Label}) assert.True(t, target1.dependencies[0].data) // Now we add it as a dependency too, which unsets the data label - target1.AddMaybeExportedDependency(target2.Label, false, false, false) + target1.AddMaybeExportedDependency(target2.Label, false, false, false, false) assert.False(t, target1.dependencies[0].data) } @@ -427,20 +428,64 @@ func TestBuildDependencies(t *testing.T) { target1 := makeTarget1("//src/core:target1", "") target2 := makeTarget1("//src/core:target2", "", target1) target3 := makeTarget1("//src/core:target3", "", target2) + target4 := makeTarget1("//src/core:target4", "") + target5 := makeTarget1("//src/core:target5", "") target3.AddDatum(target1.Label) + // BuildDependencies shouldn't return run-time dependencies: + target5.IsBinary = true + target5.AddMaybeExportedDependency(target4.Label, false, false, false, true) // runtime assert.Equal(t, []*BuildTarget{}, target1.BuildDependencies()) assert.Equal(t, []*BuildTarget{target1}, target2.BuildDependencies()) assert.Equal(t, []*BuildTarget{target2}, target3.BuildDependencies()) + assert.Equal(t, []*BuildTarget{}, target5.BuildDependencies()) } func TestDeclaredDependenciesStrict(t *testing.T) { target1 := makeTarget1("//src/core:target1", "") target2 := makeTarget1("//src/core:target2", "", target1) target3 := makeTarget1("//src/core:target3", "", target2) - target3.AddMaybeExportedDependency(target1.Label, true, false, false) + target4 := makeTarget1("//src/core:target4", "") + target5 := makeTarget1("//src/core:target5", "") + target3.AddMaybeExportedDependency(target1.Label, true, false, false, false) + // DeclaredDependenciesStrict shouldn't return run-time dependencies: + target5.IsBinary = true + target5.AddMaybeExportedDependency(target4.Label, false, false, false, true) // runtime assert.Equal(t, []BuildLabel{}, target1.DeclaredDependenciesStrict()) assert.Equal(t, []BuildLabel{target1.Label}, target2.DeclaredDependenciesStrict()) assert.Equal(t, []BuildLabel{target2.Label}, target3.DeclaredDependenciesStrict()) + assert.Equal(t, []*BuildTarget{}, target5.BuildDependencies()) +} + +func TestRuntimeDependencies(t *testing.T) { + target1 := makeTarget1("//src/core:target1", "") + target2 := makeTarget1("//src/core:target2", "") + target3 := makeTarget1("//src/core:target3", "") + target2.IsBinary = true + target2.AddMaybeExportedDependency(target1.Label, false, false, false, true) // runtime + target3.IsBinary = true + target3.AddMaybeExportedDependency(target2.Label, false, false, false, true) // runtime + // RuntimeDependencies shouldn't return transitive run-time dependencies. + assert.Equal(t, []BuildLabel{}, target1.RuntimeDependencies()) + assert.Equal(t, []BuildLabel{target1.Label}, target2.RuntimeDependencies()) + assert.Equal(t, []BuildLabel{target2.Label}, target3.RuntimeDependencies()) +} + +func TestIterAllRuntimeDependencies(t *testing.T) { + target1 := makeTarget1("//src/core:target1", "") + target2 := makeTarget1("//src/core:target2", "") + target3 := makeTarget1("//src/core:target3", "") + target2.IsBinary = true + target2.AddMaybeExportedDependency(target1.Label, false, false, false, true) // runtime + target3.IsBinary = true + target3.AddMaybeExportedDependency(target2.Label, false, false, false, true) // runtime + graph := NewGraph() + graph.AddTarget(target1) + graph.AddTarget(target2) + graph.AddTarget(target3) + // IterAllRuntimeDependencies should yield transitive run-time dependencies. + assert.Nil(t, slices.Collect(target1.IterAllRuntimeDependencies(graph))) + assert.ElementsMatch(t, []BuildLabel{target1.Label}, slices.Collect(target2.IterAllRuntimeDependencies(graph))) + assert.ElementsMatch(t, []BuildLabel{target1.Label, target2.Label}, slices.Collect(target3.IterAllRuntimeDependencies(graph))) } func TestAddDependency(t *testing.T) { @@ -451,7 +496,7 @@ func TestAddDependency(t *testing.T) { target2.AddDependency(target1.Label) assert.Equal(t, []BuildLabel{target1.Label}, target2.DeclaredDependencies()) assert.Equal(t, []BuildLabel{}, target2.ExportedDependencies()) - target2.AddMaybeExportedDependency(target1.Label, true, false, false) + target2.AddMaybeExportedDependency(target1.Label, true, false, false, false) assert.Equal(t, []BuildLabel{target1.Label}, target2.DeclaredDependencies()) assert.Equal(t, []BuildLabel{target1.Label}, target2.ExportedDependencies()) assert.Equal(t, []*BuildTarget{}, target2.Dependencies()) @@ -459,13 +504,25 @@ func TestAddDependency(t *testing.T) { assert.Equal(t, []*BuildTarget{target1}, target2.Dependencies()) } +func TestAddRuntimeDependency(t *testing.T) { + target1 := makeTarget1("//src/core:target1", "PUBLIC") + target2 := makeTarget1("//src/core:target2", "PUBLIC") + target1.IsBinary = true + target1.AddMaybeExportedDependency(target2.Label, false, false, false, true) // runtime + assert.Equal(t, target1.runtimeDependencies, []BuildLabel{target2.Label}) + assert.True(t, target1.dependencies[0].runtime) + // Now we add it as a build-time dependency too, which should unset the runtime flag. + target1.AddMaybeExportedDependency(target2.Label, false, false, false, false) + assert.False(t, target1.dependencies[0].runtime) +} + func TestAddDependencySource(t *testing.T) { target1 := makeTarget1("//src/core:target1", "") target2 := makeTarget1("//src/core:target2", "") - target2.AddMaybeExportedDependency(target1.Label, true, true, false) + target2.AddMaybeExportedDependency(target1.Label, true, true, false, false) assert.True(t, target2.IsSourceOnlyDep(target1.Label)) // N.B. It's important that calling this again cancels the source flag. - target2.AddMaybeExportedDependency(target1.Label, true, false, false) + target2.AddMaybeExportedDependency(target1.Label, true, false, false, false) assert.False(t, target2.IsSourceOnlyDep(target1.Label)) } diff --git a/src/core/state.go b/src/core/state.go index 3596885b5..18a03d70a 100644 --- a/src/core/state.go +++ b/src/core/state.go @@ -1353,18 +1353,44 @@ func (state *BuildState) IterInputs(target *BuildTarget, test bool) iter.Seq[Bui return IterInputs(state, state.Graph, target, true, target.IsFilegroup) } return func(yield func(BuildInput) bool) { + // The target itself, plus its transitive run-time dependencies (since we're about to run the target): if !yield(target.Label) { return } + for runDep := range target.IterAllRuntimeDependencies(state.Graph) { + if !yield(runDep) { + return + } + } + // The target's data, plus the transitive run-time dependencies for data that are also targets: for _, datum := range target.AllData() { if !yield(datum) { return } + label, ok := datum.Label() + if !ok { + continue + } + for runDep := range state.Graph.TargetOrDie(label).IterAllRuntimeDependencies(state.Graph) { + if !yield(runDep) { + return + } + } } + // The target's test tools, plus the transitive run-time dependencies for test tools that are also targets: for _, tool := range target.AllTestTools() { if !yield(tool) { return } + label, ok := tool.Label() + if !ok { + continue + } + for runDep := range state.Graph.TargetOrDie(label).IterAllRuntimeDependencies(state.Graph) { + if !yield(runDep) { + return + } + } } } } diff --git a/src/core/utils.go b/src/core/utils.go index abc7998cf..3c63f5351 100644 --- a/src/core/utils.go +++ b/src/core/utils.go @@ -146,6 +146,11 @@ func IterInputs(state *BuildState, graph *BuildGraph, target *BuildTarget, inclu if !yield(p) { return false } + for runDep := range graph.TargetOrDie(p).IterAllRuntimeDependencies(graph) { + if !yield(runDep) { + return false + } + } } return true } @@ -157,6 +162,11 @@ func IterInputs(state *BuildState, graph *BuildGraph, target *BuildTarget, inclu if !yield(dependency.Label) { return false } + for runDep := range graph.TargetOrDie(dependency.Label).IterAllRuntimeDependencies(graph) { + if !yield(runDep) { + return false + } + } } done[dependency.Label] = true @@ -257,6 +267,15 @@ func IterRuntimeFiles(graph *BuildGraph, target *BuildTarget, absoluteOuts bool, } } + for runDep := range target.IterAllRuntimeDependencies(graph) { + fullPaths := runDep.FullPaths(graph) + for i, depPath := range runDep.Paths(graph) { + if !pushOut(fullPaths[i], depPath) { + return + } + } + } + for _, data := range target.AllData() { fullPaths := data.FullPaths(graph) for i, dataPath := range data.Paths(graph) { @@ -264,6 +283,18 @@ func IterRuntimeFiles(graph *BuildGraph, target *BuildTarget, absoluteOuts bool, return } } + label, ok := data.Label() + if !ok { + continue + } + for runDep := range graph.TargetOrDie(label).IterAllRuntimeDependencies(graph) { + fullPaths := runDep.FullPaths(graph) + for i, depPath := range runDep.Paths(graph) { + if !pushOut(fullPaths[i], depPath) { + return + } + } + } } if target.Test != nil { @@ -274,6 +305,18 @@ func IterRuntimeFiles(graph *BuildGraph, target *BuildTarget, absoluteOuts bool, return } } + label, ok := tool.Label() + if !ok { + continue + } + for runDep := range graph.TargetOrDie(label).IterAllRuntimeDependencies(graph) { + fullPaths := runDep.FullPaths(graph) + for i, depPath := range runDep.Paths(graph) { + if !pushOut(fullPaths[i], depPath) { + return + } + } + } } } @@ -285,6 +328,18 @@ func IterRuntimeFiles(graph *BuildGraph, target *BuildTarget, absoluteOuts bool, return } } + label, ok := data.Label() + if !ok { + continue + } + for runDep := range graph.TargetOrDie(label).IterAllRuntimeDependencies(graph) { + fullPaths := runDep.FullPaths(graph) + for i, depPath := range runDep.Paths(graph) { + if !pushOut(fullPaths[i], depPath) { + return + } + } + } } for _, tool := range target.AllDebugTools() { fullPaths := tool.FullPaths(graph) @@ -293,6 +348,18 @@ func IterRuntimeFiles(graph *BuildGraph, target *BuildTarget, absoluteOuts bool, return } } + label, ok := tool.Label() + if !ok { + continue + } + for runDep := range graph.TargetOrDie(label).IterAllRuntimeDependencies(graph) { + fullPaths := runDep.FullPaths(graph) + for i, depPath := range runDep.Paths(graph) { + if !pushOut(fullPaths[i], depPath) { + return + } + } + } } } } diff --git a/src/parse/asp/builtins.go b/src/parse/asp/builtins.go index c34479f30..8aede7823 100644 --- a/src/parse/asp/builtins.go +++ b/src/parse/asp/builtins.go @@ -1176,7 +1176,8 @@ func addDep(s *scope, args []pyObject) pyObject { target := getTargetPost(s, string(args[0].(pyString))) dep := s.parseLabelInPackage(string(args[1].(pyString)), s.pkg) exported := args[2].IsTruthy() - target.AddMaybeExportedDependency(dep, exported, false, false) + runtime := args[3].IsTruthy() + target.AddMaybeExportedDependency(dep, exported, false, false, runtime) // Queue this dependency if it'll be needed. if target.State() > core.Inactive { err := s.state.QueueTarget(dep, target.Label, false, core.ParseModeNormal) diff --git a/src/parse/asp/targets.go b/src/parse/asp/targets.go index 94c656830..96baf2483 100644 --- a/src/parse/asp/targets.go +++ b/src/parse/asp/targets.go @@ -30,6 +30,7 @@ const ( outsBuildRuleArgIdx depsBuildRuleArgIdx exportedDepsBuildRuleArgIdx + runtimeDepsBuildRuleArgIdx secretsBuildRuleArgIdx toolsBuildRuleArgIdx testToolsBuildRuleArgIdx @@ -270,9 +271,10 @@ func populateTarget(s *scope, t *core.BuildTarget, args []pyObject) { addMaybeNamedOutput(s, "outs", args[outsBuildRuleArgIdx], t.AddOutput, t.AddNamedOutput, t, false) addMaybeNamedOutput(s, "optional_outs", args[optionalOutsBuildRuleArgIdx], t.AddOptionalOutput, nil, t, true) t.HintDependencies(depLen(args[depsBuildRuleArgIdx]) + depLen(args[exportedDepsBuildRuleArgIdx]) + depLen(args[internalDepsBuildRuleArgIdx])) - addDependencies(s, "deps", args[depsBuildRuleArgIdx], t, false, false) - addDependencies(s, "exported_deps", args[exportedDepsBuildRuleArgIdx], t, true, false) - addDependencies(s, "internal_deps", args[internalDepsBuildRuleArgIdx], t, false, true) + addDependencies(s, "deps", args[depsBuildRuleArgIdx], t, false, false, false) + addDependencies(s, "exported_deps", args[exportedDepsBuildRuleArgIdx], t, true, false, false) + addDependencies(s, "internal_deps", args[internalDepsBuildRuleArgIdx], t, false, true, false) + addDependencies(s, "runtime_deps", args[runtimeDepsBuildRuleArgIdx], t, false, false, true) addStrings(s, "labels", args[labelsBuildRuleArgIdx], t.AddLabel) addStrings(s, "hashes", args[hashesBuildRuleArgIdx], t.AddHash) addStrings(s, "licences", args[licencesBuildRuleArgIdx], t.AddLicence) @@ -472,13 +474,13 @@ func addMaybeNamedSecret(s *scope, name string, obj pyObject, anon func(string), } // addDependencies adds dependencies to a target, which may or may not be exported. -func addDependencies(s *scope, name string, obj pyObject, target *core.BuildTarget, exported, internal bool) { +func addDependencies(s *scope, name string, obj pyObject, target *core.BuildTarget, exported, internal, runtime bool) { addStrings(s, name, obj, func(str string) { if s.state.Config.Bazel.Compatibility && !core.LooksLikeABuildLabel(str) && !strings.HasPrefix(str, "@") { // *sigh*... Bazel seems to allow an implicit : on the start of dependencies str = ":" + str } - target.AddMaybeExportedDependency(assertNotPseudoLabel(s, s.parseLabelInPackage(str, s.pkg)), exported, false, internal) + target.AddMaybeExportedDependency(assertNotPseudoLabel(s, s.parseLabelInPackage(str, s.pkg)), exported, false, internal, runtime) }) } diff --git a/src/query/print.go b/src/query/print.go index 9d5524bd3..0c551055d 100644 --- a/src/query/print.go +++ b/src/query/print.go @@ -137,6 +137,9 @@ func specialFields() specialFieldsMap { "exported_deps": func(target *core.BuildTarget) interface{} { return target.ExportedDependencies() }, + "runtime_deps": func(target *core.BuildTarget) interface{} { + return target.RuntimeDependencies() + }, "visibility": func(target *core.BuildTarget) interface{} { if len(target.Visibility) == 1 && target.Visibility[0] == core.WholeGraph[0] { return []string{"PUBLIC"} diff --git a/test/runtime_deps/BUILD b/test/runtime_deps/BUILD new file mode 100644 index 000000000..fb6f9ed9d --- /dev/null +++ b/test/runtime_deps/BUILD @@ -0,0 +1,68 @@ +subinclude("//test/build_defs") + +please_repo_e2e_test( + name = "runtime_deps_test", + plz_command = " && ".join([ + "plz test //test:runtime_deps_test_case", + ]), + repo = "repo", +) + +# Ensure that the runtime_deps field is correctly printed for those that have one (and that it is omitted for those that +# don't). Importantly, ensure that run-time dependencies are not printed transitively - `plz query print` should only +# print the target's direct run-time dependencies. +please_repo_e2e_test( + name = "query_print_test", + plz_command = " && ".join([ + "plz query print -f runtime_deps //test:runtime_deps_test_case > runtime_deps_test_case", + "plz query print -f runtime_deps //test:target_with_no_runtime_deps > target_with_no_runtime_deps", + ]), + expected_output = { + "runtime_deps_test_case": "//test:target_with_runtime_deps", + "target_with_no_runtime_deps": "", + }, + repo = "repo", +) + +# Ensure that run-time dependencies are in fact considered dependencies by `plz query deps`. Run-time dependencies added +# by a call to the add_runtime_dep function in a post-build function should not appear here. +_expected_deps = """\ +//test:dep_with_runtime_dep + //test:dep_runtime_dep +//test:src_with_runtime_dep + //test:src_dep_with_runtime_dep + //test:src_dep_runtime_dep + //test:src_runtime_dep +//test:target_with_runtime_deps + //test:runtime_dep + //test:target_requiring_kittens + //test:target_with_provides_runtime_dep + //test:provides_runtime_dep + //test:target_with_another_runtime_dep + //test:another_runtime_dep + //test:target_with_build_and_runtime_deps + //test:build_and_runtime_dep + //test:target_with_post_build_runtime_dep +//test:test_data_with_runtime_dep + //test:test_data_runtime_dep +//test:test_tool_with_runtime_dep + //test:test_tool_runtime_dep""" + +please_repo_e2e_test( + name = "query_deps_test", + plz_command = "plz query deps //test:runtime_deps_test_case > deps", + expected_output = { + "deps": _expected_deps, + }, + repo = "repo", +) + +# Ensure that run-time dependencies are in fact considered dependencies by `plz query revdeps`. +please_repo_e2e_test( + name = "query_revdeps_test", + plz_command = "plz query revdeps //test:another_runtime_dep > revdeps", + expected_output = { + "revdeps": "//test:target_with_another_runtime_dep", + }, + repo = "repo", +) diff --git a/test/runtime_deps/repo/.plzconfig b/test/runtime_deps/repo/.plzconfig new file mode 100644 index 000000000..e69de29bb diff --git a/test/runtime_deps/repo/test/BUILD_FILE b/test/runtime_deps/repo/test/BUILD_FILE new file mode 100644 index 000000000..de3274738 --- /dev/null +++ b/test/runtime_deps/repo/test/BUILD_FILE @@ -0,0 +1,197 @@ +def exists(file:str): + path = join_path(package_name(), file) + return f"echo -n '{path} exists: '; if [ -f '{path}' ]; then echo OK; else echo FAIL; exit 1; fi" + +def not_exists(file:str): + path = join_path(package_name(), file) + return f"echo -n '{path} does not exist: '; if [ -f '{path}' ]; then echo FAIL; exit 1; else echo OK; fi" + +def runtime_dep(name:str): + return genrule( + name = name, + outs = [name], + cmd = "touch $OUTS", + ) + +def target(name:str, build_tests:list=[], post_build:function=None, requires:list=None, provides:dict=None, + runtime_deps:list=[], deps:list=[]): + # Ensure that run-time dependencies are not present at build time, unless they are also + # explicitly given as build-time dependencies. (The lstrips here turn build target names + # into the names of files they output - they're slightly grungy, but they allow us to + # re-use exists and not_exists outside of this function; they work because targets + # generated by runtime_dep output a single file who name is identical to that of the + # target's.) + cmd = [not_exists(r.lstrip(":")) for r in runtime_deps if r not in deps] + cmd += [exists(d.lstrip(":")) for d in deps] + return genrule( + name = name, + outs = [name], + # Assume that if a post-build function is defined, it dynamically adds a run-time + # dependency, which requires this target to be marked as binary. + binary = len(runtime_deps) > 0 or post_build is not None, + cmd = cmd + build_tests + ["touch $OUTS"], + post_build = post_build, + requires = requires, + provides = provides, + runtime_deps = runtime_deps, + deps = deps, + ) + + +target( + name = "target_with_no_runtime_deps", +) + +target( + name = "target_with_runtime_deps", + build_tests = [ + # Ensure that the run-time dependencies :target_with_post_build_runtime_dep and + # :target_requiring_kittens are not present at build time (these ones are tricky + # to identify statically, hence explicitly listing them here). + not_exists("post_build_runtime_dep"), + not_exists("provides_runtime_dep"), + ], + runtime_deps = [ + ":runtime_dep", + ":target_with_another_runtime_dep", + ":target_with_build_and_runtime_deps", + ":target_with_post_build_runtime_dep", + ":target_requiring_kittens", + ], +) + +target( + name = "target_with_another_runtime_dep", + runtime_deps = [":another_runtime_dep"], +) + +target( + name = "target_with_build_and_runtime_deps", + runtime_deps = [":build_and_runtime_dep"], + deps = [":build_and_runtime_dep"], +) + +target( + name = "target_with_post_build_runtime_dep", + build_tests = [ + # Ensure that the run-time dependency added by the post-build function is not present at + # build time (this one can't be identified statically, hence explicitly listing it here). + not_exists("post_build_runtime_dep"), + ], + post_build = lambda name, _: add_runtime_dep(name, ":post_build_runtime_dep"), +) + +target( + name = "src_with_runtime_dep", + runtime_deps = [":src_runtime_dep"], + deps = [":src_dep_with_runtime_dep"], +) + +target( + name = "src_dep_with_runtime_dep", + runtime_deps = [":src_dep_runtime_dep"], +) + +target( + name = "dep_with_runtime_dep", + runtime_deps = [":dep_runtime_dep"], +) + +target( + name = "target_requiring_kittens", + build_tests = [ + # Ensure that neither of the run-time dependencies that could possibly be provided by + # :runtime_dep_providing_kittens or :target_with_provides_runtime_dep are present at + # build time (only the latter *should* be provided, but it isn't possible to generate + # both of these tests statically, hence explicitly listing them here). + not_exists("runtime_dep_providing_kittens_runtime_dep"), + not_exists("provides_runtime_dep"), + ], + requires = ["kittens"], + runtime_deps = [":runtime_dep_providing_kittens"], +) + +target( + name = "runtime_dep_providing_kittens", + provides = { + "kittens": ":target_with_provides_runtime_dep", + }, + runtime_deps = [":runtime_dep_providing_kittens_runtime_dep"], +) + +target( + name = "target_with_provides_runtime_dep", + runtime_deps = [":provides_runtime_dep"], +) + +target( + name = "test_data_with_runtime_dep", + runtime_deps = [":test_data_runtime_dep"], +) + +target( + name = "test_tool_with_runtime_dep", + runtime_deps = [":test_tool_runtime_dep"], +) + +runtime_dep("runtime_dep") +runtime_dep("another_runtime_dep") +runtime_dep("build_and_runtime_dep") +runtime_dep("post_build_runtime_dep") +runtime_dep("src_runtime_dep") +runtime_dep("src_dep_runtime_dep") +runtime_dep("dep_runtime_dep") +runtime_dep("test_data_runtime_dep") +runtime_dep("test_tool_runtime_dep") +runtime_dep("provides_runtime_dep") +runtime_dep("runtime_dep_providing_kittens_runtime_dep") + +gentest( + name = "runtime_deps_test_case", + srcs = { + "src_with_runtime_dep": [":src_with_runtime_dep"], + }, + outs = ["runtime_deps_test_case"], + cmd = [ + # Ensure this target's sources' and dependencies' run-time dependencies are present at build time... + exists("src_runtime_dep"), + exists("dep_runtime_dep"), + # ...but that those targets' (build-time) dependencies' run-time dependencies are not... + not_exists("src_dep_runtime_dep"), + # ...and that this target's run-time dependencies are not... + not_exists("runtime_dep"), + not_exists("another_runtime_dep"), + not_exists("build_and_runtime_dep"), + not_exists("post_build_runtime_dep"), + not_exists("provides_runtime_dep"), + not_exists("runtime_dep_providing_kittens_runtime_dep"), + # ...and that this target's test-time dependencies are not. + not_exists("test_data_runtime_dep"), + not_exists("test_tool_runtime_dep"), + "touch $OUTS", + ], + data = [":test_data_with_runtime_dep"], + no_test_output = True, + runtime_deps = [":target_with_runtime_deps"], + test_cmd = [ + # Ensure this target's sources' and dependencies' (transitive) run-time dependencies are not present at test time... + not_exists("src_runtime_dep"), + not_exists("dep_runtime_dep"), + not_exists("src_dep_runtime_dep"), + # ...but that this target's run-time dependencies are... + exists("runtime_dep"), + exists("another_runtime_dep"), + exists("build_and_runtime_dep"), + exists("post_build_runtime_dep"), + exists("provides_runtime_dep"), + # ...and that this target's test-time dependencies are... + exists("test_data_runtime_dep"), + exists("test_tool_runtime_dep"), + # ...and that the requires/provides in the run-time dependency tree were correctly resolved. + not_exists("runtime_dep_providing_kittens_runtime_dep"), + ], + test_tools = [":test_tool_with_runtime_dep"], + deps = [ + ":dep_with_runtime_dep", + ], +) diff --git a/tools/build_langserver/lsp/definition_test.go b/tools/build_langserver/lsp/definition_test.go index ff05ee0af..1adb13bab 100644 --- a/tools/build_langserver/lsp/definition_test.go +++ b/tools/build_langserver/lsp/definition_test.go @@ -25,7 +25,7 @@ func TestDefinition(t *testing.T) { assert.Equal(t, []lsp.Location{ { URI: lsp.DocumentURI("file://" + filepath.Join(cacheDir, "please/misc_rules.build_defs")), - Range: xrng(3, 0, 145, 5), + Range: xrng(3, 0, 153, 5), }, }, locs) } @@ -45,7 +45,7 @@ func TestDefinitionStatement(t *testing.T) { assert.Equal(t, []lsp.Location{ { URI: lsp.DocumentURI("file://" + filepath.Join(cacheDir, "please/misc_rules.build_defs")), - Range: xrng(3, 0, 145, 5), + Range: xrng(3, 0, 153, 5), }, }, locs) } @@ -65,7 +65,7 @@ func TestDefinitionBuiltin(t *testing.T) { assert.Equal(t, []lsp.Location{ { URI: lsp.DocumentURI("file://" + filepath.Join(cacheDir, "please/misc_rules.build_defs")), - Range: xrng(3, 0, 145, 5), + Range: xrng(3, 0, 153, 5), }, }, locs) } From 1b190e9c8fb5d5a128cb7d0cf79cb768079d067f Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Thu, 13 Nov 2025 16:17:42 +0000 Subject: [PATCH 08/38] Tag v17.24.0 (#3452) --- ChangeLog | 6 ++++++ VERSION | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/ChangeLog b/ChangeLog index 4ffd56ebd..3d19526d7 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,9 @@ +Version 17.24.0 +--------------- + * Define `PLZ_ENV` environment variable in build environments (#3444) + * Strip symbols from please_sandbox release binaries (#3450) + * Implement run-time dependencies for binary targets via the `runtime_deps` parameter (#3451) + Version 17.23.0 --------------- * Bump arcat to v1.3.0, adding the `--include` option to the `zip` command (#3441) diff --git a/VERSION b/VERSION index 85496bc4c..858348570 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -17.23.0 +17.24.0 From c550b0bf905a24e9a859befbe9761b265b94cf03 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Fri, 14 Nov 2025 10:08:48 +0000 Subject: [PATCH 09/38] Download run-time dependencies when running a target remotely (#3453) The code path taken when `plz run`ning a binary target is slightly different for remote execution vs local execution - ensure the target's transitive run-time dependencies are all downloaded by the client before it runs the target. --- src/remote/remote.go | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/src/remote/remote.go b/src/remote/remote.go index e7fe68fe2..ee7ec220d 100644 --- a/src/remote/remote.go +++ b/src/remote/remote.go @@ -351,10 +351,13 @@ func (c *Client) Build(target *core.BuildTarget) (*core.BuildMetadata, error) { if err := c.Download(target); err != nil { return metadata, err } - // TODO(peterebden): Should this not just be part of Download()? + // TODO(peterebden): Should these not just be part of Download()? if err := c.downloadData(target); err != nil { return metadata, err } + if err := c.downloadRuntimeDependencies(target); err != nil { + return metadata, err + } } return metadata, nil } @@ -376,6 +379,22 @@ func (c *Client) downloadData(target *core.BuildTarget) error { return g.Wait() } +// downloadRuntimeDependencies downloads all the runtime dependencies for a target. +func (c *Client) downloadRuntimeDependencies(target *core.BuildTarget) error { + var g errgroup.Group + for runDep := range target.IterAllRuntimeDependencies(c.state.Graph) { + l, _ := runDep.Label() + t := c.state.Graph.TargetOrDie(l) + g.Go(func() error { + if err := c.Download(t); err != nil { + return err + } + return nil + }) + } + return g.Wait() +} + // Run runs a target on the remote executors. func (c *Client) Run(target *core.BuildTarget) error { if err := c.CheckInitialised(); err != nil { From 5f970f9f33e9e32ced0c46e7b80cb83ee4c7ae2c Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Fri, 14 Nov 2025 10:17:50 +0000 Subject: [PATCH 10/38] Tag v17.24.1 (#3454) --- ChangeLog | 4 ++++ VERSION | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/ChangeLog b/ChangeLog index 3d19526d7..ff93a4561 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,7 @@ +Version 17.24.1 +--------------- + * Download run-time dependencies when running a target remotely (#3453) + Version 17.24.0 --------------- * Define `PLZ_ENV` environment variable in build environments (#3444) diff --git a/VERSION b/VERSION index 858348570..4f0d1eeeb 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -17.24.0 +17.24.1 From 1dbd2bce52d3ff03db8cdcbe04c64831a864edfa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Nov 2025 10:51:03 +0000 Subject: [PATCH 11/38] Bump golang.org/x/crypto from 0.36.0 to 0.45.0 (#3455) Bumps [golang.org/x/crypto](https://github.com/golang/crypto) from 0.36.0 to 0.45.0. - [Commits](https://github.com/golang/crypto/compare/v0.36.0...v0.45.0) --- updated-dependencies: - dependency-name: golang.org/x/crypto dependency-version: 0.45.0 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Chris Novakovic --- go.mod | 16 ++++++++-------- go.sum | 28 ++++++++++++++-------------- third_party/go/BUILD | 14 +++++++------- 3 files changed, 29 insertions(+), 29 deletions(-) diff --git a/go.mod b/go.mod index 126ef7859..98c3c534d 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module github.com/thought-machine/please -go 1.23.0 +go 1.24.0 require ( cloud.google.com/go/longrunning v0.5.5 @@ -47,11 +47,11 @@ require ( github.com/zeebo/blake3 v0.2.3 go.uber.org/automaxprocs v1.5.3 golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 - golang.org/x/net v0.38.0 - golang.org/x/sync v0.12.0 - golang.org/x/sys v0.31.0 - golang.org/x/term v0.30.0 - golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d + golang.org/x/net v0.47.0 + golang.org/x/sync v0.18.0 + golang.org/x/sys v0.38.0 + golang.org/x/term v0.37.0 + golang.org/x/tools v0.38.0 google.golang.org/genproto/googleapis/bytestream v0.0.0-20240304212257-790db918fca8 google.golang.org/genproto/googleapis/rpc v0.0.0-20240304212257-790db918fca8 google.golang.org/grpc v1.62.1 @@ -103,9 +103,9 @@ require ( go.opentelemetry.io/otel/metric v1.24.0 // indirect go.opentelemetry.io/otel/sdk v1.22.0 // indirect go.opentelemetry.io/otel/trace v1.24.0 // indirect - golang.org/x/crypto v0.36.0 // indirect + golang.org/x/crypto v0.45.0 // indirect golang.org/x/oauth2 v0.27.0 // indirect - golang.org/x/text v0.23.0 // indirect + golang.org/x/text v0.31.0 // indirect golang.org/x/time v0.5.0 // indirect google.golang.org/api v0.168.0 // indirect google.golang.org/genproto v0.0.0-20240304212257-790db918fca8 // indirect diff --git a/go.sum b/go.sum index 8e50c9d4f..285e02e64 100644 --- a/go.sum +++ b/go.sum @@ -305,8 +305,8 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34= -golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20240222234643-814bf88cf225 h1:LfspQV/FYTatPTr/3HzIcmiUFH7PGP+OQ6mgDYo3yuQ= golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY7H1xHzx2i4OsyguNBmN/uPtqc= @@ -334,8 +334,8 @@ golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLd golang.org/x/net v0.0.0-20210505214959-0714010a04ed/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.3.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE= -golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8= -golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M= golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8= @@ -346,8 +346,8 @@ golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw= -golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -375,21 +375,21 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik= -golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA= -golang.org/x/term v0.30.0 h1:PQ39fJZ+mfadBm0y5WlL4vlM7Sx1Hgf13sMIY2+QS9Y= -golang.org/x/term v0.30.0/go.mod h1:NYYFdzHoI5wRh/h5tDMdMqCqPJZEuNqVR5xJLd/n67g= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY= -golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -405,8 +405,8 @@ golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4f golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.4.0/go.mod h1:UE5sM2OK9E/d67R0ANs2xJizIymRP5gJU295PvKXxjQ= -golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg= -golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= +golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= +golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/third_party/go/BUILD b/third_party/go/BUILD index f42164f98..b74602cb1 100644 --- a/third_party/go/BUILD +++ b/third_party/go/BUILD @@ -35,12 +35,12 @@ go_repo( go_repo( module = "golang.org/x/text", - version = "v0.23.0", + version = "v0.31.0", ) go_repo( module = "golang.org/x/tools", - version = "v0.21.1-0.20240508182429-e35e4ccd0d2d", + version = "v0.38.0", ) go_repo( @@ -116,7 +116,7 @@ go_repo( go_repo( module = "golang.org/x/net", - version = "v0.38.0", + version = "v0.47.0", ) go_repo( @@ -211,7 +211,7 @@ go_repo( go_repo( module = "golang.org/x/term", - version = "v0.30.0", + version = "v0.37.0", ) go_repo( @@ -271,7 +271,7 @@ go_repo( go_repo( module = "golang.org/x/sync", - version = "v0.12.0", + version = "v0.18.0", ) go_repo( @@ -331,7 +331,7 @@ go_repo( go_repo( module = "golang.org/x/sys", - version = "v0.31.0", + version = "v0.38.0", ) go_repo( @@ -401,7 +401,7 @@ go_repo( go_repo( module = "golang.org/x/crypto", - version = "v0.36.0", + version = "v0.45.0", ) go_repo( From e1afda33b1fa2e1acf3bf27fd3d6c6c700e50847 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Thu, 20 Nov 2025 11:38:12 +0000 Subject: [PATCH 12/38] Bump Go toolchain to 1.24.10 (#3456) --- third_party/go/BUILD | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/third_party/go/BUILD b/third_party/go/BUILD index b74602cb1..e4a905761 100644 --- a/third_party/go/BUILD +++ b/third_party/go/BUILD @@ -5,14 +5,14 @@ package(default_visibility = ["PUBLIC"]) go_toolchain( name = "toolchain", hashes = [ - "addbfce2056744962e2d7436313ab93486660cf7a2e066d171b9d6f2da7c7abe", # go1.24.1.darwin-amd64.tar.gz - "295581b5619acc92f5106e5bcb05c51869337eb19742fdfa6c8346c18e78ff88", # go1.24.1.darwin-arm64.tar.gz - "47d7de8bb64d5c3ee7b6723aa62d5ecb11e3568ef2249bbe1d4bbd432d37c00c", # go1.24.1.freebsd-amd64.tar.gz - "cb2396bae64183cdccf81a9a6df0aea3bce9511fc21469fb89a0c00470088073", # go1.24.1.linux-amd64.tar.gz - "8df5750ffc0281017fb6070fba450f5d22b600a02081dceef47966ffaf36a3af", # go1.24.1.linux-arm64.tar.gz + "fde05d84f7f64c8d01564f299ea1897fe94457d20d8d9054200ac1f8ae1c2bc3", # go1.24.10.darwin-amd64.tar.gz + "71c70841bcdadf4b5d2f7c0f099952907969f25235663622a47d6f2233ad39aa", # go1.24.10.darwin-arm64.tar.gz + "cb917b64aa4a407ed3310b397cc4dca10f0a3e2b0dd184ed74164ceaeab2625e", # go1.24.10.freebsd-amd64.tar.gz + "dd52b974e3d9c5a7bbfb222c685806def6be5d6f7efd10f9caa9ca1fa2f47955", # go1.24.10.linux-amd64.tar.gz + "94a99dae43dab8a3fe337485bbb89214b524285ec53ea02040514b0c2a9c3f94", # go1.24.10.linux-arm64.tar.gz ], install_std = False, - version = "1.24.1", + version = "1.24.10", ) go_stdlib( From 58cfa4b80c712abdec6a24a2f4afd99c12394fb2 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Tue, 25 Nov 2025 09:59:50 +0000 Subject: [PATCH 13/38] Clean up pleasew platform detection logic (#3458) At the start of the script, ensure that `uname` and `uname -m` produce an OS and architecture that are supported by Please. If they do, set `OS` and `ARCH` to the names of the OS and architecture as Please knows them; if they don't, exit unsuccessfully. Fixes #3457. --- pleasew | 65 ++++++++++++++++++++++++++++++++------------------------- 1 file changed, 36 insertions(+), 29 deletions(-) diff --git a/pleasew b/pleasew index 06c1fadf4..4805e88e5 100755 --- a/pleasew +++ b/pleasew @@ -17,24 +17,44 @@ else RESET='' fi +OS="" +ARCH="" + +case "$(uname)" in + Linux) + OS=linux + case "$(uname -m)" in + x86_64) ARCH=amd64 ;; + aarch64*|armv8b|armv8l) ARCH=arm64 ;; + esac + ;; + Darwin) + OS=darwin + case "$(uname -m)" in + x86_64) ARCH=amd64 ;; + arm64) ARCH=arm64 ;; + esac + ;; + FreeBSD) + OS=freebsd + case "$(uname -m)" in + amd64) ARCH=amd64 ;; + esac + ;; + *) + printf >&2 '%bPlease does not support the %s operating system.%b\n' \ + "${RED}" "$(uname)" "${RESET}" + exit 1 + ;; +esac -DEFAULT_URL_BASE='https://get.please.build' - -OS="$(uname)" - -if [ "${OS}" = 'Darwin' ]; then - # switch between mac amd64/arm64 - ARCH="$(uname -m)" -else - # default to amd64 on other operating systems - # because we only build intel binaries - ARCH='amd64' +if [ -z "$ARCH" ]; then + printf >&2 '%bPlease does not support the %s architecture on %s.%b\n' \ + "${RED}" "$(uname -m)" "$(uname)" "${RESET}" + exit 1 fi -case "${ARCH}" in - aarch64_be|aarch64|armv8b|armv8l) ARCH='arm64' ;; - x86_64) ARCH='amd64' ;; -esac +DEFAULT_URL_BASE='https://get.please.build' has_command () { command -v "${1}" > /dev/null 2>&1 @@ -141,20 +161,7 @@ if [ "${VERSION:+x}" != 'x' ]; then VERSION=$(${TRANSFER_TOOL} ${TRANSFER_SILENT_OPTS} "${URL_BASE}"/latest_version) fi -# Find the os / arch to download. You can do this quite nicely with go env -# but we use this script on machines that don't necessarily have Go itself. -if [ "${OS}" = 'Linux' ]; then - GOOS='linux' -elif [ "${OS}" = 'Darwin' ]; then - GOOS='darwin' -elif [ "${OS}" = 'FreeBSD' ]; then - GOOS='freebsd' -else - printf >&2 '%bUnknown operating system %s%b\n' "${RED}" "${OS}" "${RESET}" - exit 1 -fi - -PLEASE_URL="${URL_BASE}/${GOOS}_${ARCH}/${VERSION}/please_${VERSION}.tar.xz" +PLEASE_URL="${URL_BASE}/${OS}_${ARCH}/${VERSION}/please_${VERSION}.tar.xz" DIR="${LOCATION}/${VERSION}" # Potentially we could reuse this but it's easier not to really. From 781700936630637e3413d8a0342e62f37e5b2fba Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Tue, 25 Nov 2025 11:05:02 +0000 Subject: [PATCH 14/38] Tag v17.24.2 (#3459) --- ChangeLog | 4 ++++ VERSION | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/ChangeLog b/ChangeLog index ff93a4561..79200b0a6 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,7 @@ +Version 17.24.2 +--------------- + * Improve platform detection logic in `plz init`'s `pleasew` script (#3458) + Version 17.24.1 --------------- * Download run-time dependencies when running a target remotely (#3453) diff --git a/VERSION b/VERSION index 4f0d1eeeb..76354e4b9 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -17.24.1 +17.24.2 From 5004307efcc31ac1d47ab1cd0f1ae680668aea17 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Thu, 27 Nov 2025 16:50:33 +0000 Subject: [PATCH 15/38] Add `maxdepth` parameter to `get_labels` function (#3460) Add a `maxdepth` parameter to the `get_labels` built-in function that controls the maximum depth of `get_labels`' recursive dependency search. The default is -1, meaning "no limit". The main use case is to help limit the collection of `cc:inc:` labels to direct dependencies in the cc-rules plugin, since headers used by transitive (library) dependencies shouldn't be required at compilation time. This is an alternative to (and is in fact a superset of the functionality offered by) the `transitive` parameter: `transitive=True` is equivalent to `maxdepth=-1`, and `transitive=False` is equivalent to `maxdepth=0`. The `transitive` parameter remains for backwards compatibility, although it could now be removed in a future major release. --- rules/builtins.build_defs | 2 +- src/parse/asp/builtins.go | 39 +++++++----- src/parse/asp/builtins_test.go | 47 +++++++++----- test/get_labels/BUILD | 109 +++++++++++++++++++++++++++++++++ 4 files changed, 164 insertions(+), 33 deletions(-) create mode 100644 test/get_labels/BUILD diff --git a/rules/builtins.build_defs b/rules/builtins.build_defs index b8e4ccafb..848dad863 100644 --- a/rules/builtins.build_defs +++ b/rules/builtins.build_defs @@ -243,7 +243,7 @@ def dirname(p:str) -> str: # Post-build callback functions. -def get_labels(name:str, prefix:str, all:bool=False, transitive=True) -> list: +def get_labels(name:str, prefix:str, all:bool=False, transitive:bool=None, maxdepth:int=-1) -> list: pass def has_label(name:str, prefix:str, all:bool=False) -> bool: return len(get_labels(name, prefix, all)) > 0 diff --git a/src/parse/asp/builtins.go b/src/parse/asp/builtins.go index 8aede7823..d17cd7b2b 100644 --- a/src/parse/asp/builtins.go +++ b/src/parse/asp/builtins.go @@ -47,8 +47,8 @@ func registerBuiltins(s *scope) { setNativeCode(s, "zip", zip, varargs) setNativeCode(s, "any", anyFunc) setNativeCode(s, "all", allFunc) - setNativeCode(s, "min", min) - setNativeCode(s, "max", max) + setNativeCode(s, "min", minFunc) + setNativeCode(s, "max", maxFunc) setNativeCode(s, "chr", chr) setNativeCode(s, "ord", ord) setNativeCode(s, "len", lenFunc) @@ -1030,11 +1030,11 @@ func allFunc(s *scope, args []pyObject) pyObject { return True } -func min(s *scope, args []pyObject) pyObject { +func minFunc(s *scope, args []pyObject) pyObject { return extreme(s, args, LessThan) } -func max(s *scope, args []pyObject) pyObject { +func maxFunc(s *scope, args []pyObject) pyObject { return extreme(s, args, GreaterThan) } @@ -1093,13 +1093,20 @@ func getLabels(s *scope, args []pyObject) pyObject { name := string(args[0].(pyString)) prefix := string(args[1].(pyString)) all := args[2].IsTruthy() - transitive := args[3].IsTruthy() + transitive := args[3] + maxDepth := int(args[4].(pyInt)) + if transitiveBool, ok := transitive.(pyBool); ok { + s.Assert(maxDepth == -1, "get_labels: only one of transitive and maxdepth may be specified, not both") + if !transitiveBool.IsTruthy() { + maxDepth = 0 + } + } if core.LooksLikeABuildLabel(name) { label := core.ParseBuildLabel(name, s.pkg.Name) - return getLabelsInternal(s.state.Graph.TargetOrDie(label), prefix, core.Built, all, transitive) + return getLabelsInternal(s.state.Graph.TargetOrDie(label), prefix, core.Built, all, maxDepth) } target := getTargetPost(s, name) - return getLabelsInternal(target, prefix, core.Building, all, transitive) + return getLabelsInternal(target, prefix, core.Building, all, maxDepth) } // addLabel adds a set of labels to the named rule @@ -1119,35 +1126,35 @@ func addLabel(s *scope, args []pyObject) pyObject { return None } -func getLabelsInternal(target *core.BuildTarget, prefix string, minState core.BuildTargetState, all, transitive bool) pyObject { +func getLabelsInternal(target *core.BuildTarget, prefix string, minState core.BuildTargetState, all bool, maxDepth int) pyObject { if target.State() < minState { log.Fatalf("get_labels called on a target that is not yet built: %s", target.Label) } - if all && !transitive { - log.Fatalf("get_labels can't be called with all set to true when transitive is set to False") + if all && maxDepth != -1 { + log.Fatalf("get_labels: if all is True, transitive must be True or maxdepth must be -1") } labels := map[string]bool{} done := map[*core.BuildTarget]bool{} - var getLabels func(*core.BuildTarget) - getLabels = func(t *core.BuildTarget) { + var getLabels func(*core.BuildTarget, int) + getLabels = func(t *core.BuildTarget, depth int) { for _, label := range t.Labels { if strings.HasPrefix(label, prefix) { labels[strings.TrimSpace(strings.TrimPrefix(label, prefix))] = true } } - if !transitive { + done[t] = true + if depth == 0 { return } - done[t] = true if !t.OutputIsComplete || t == target || all { for _, dep := range t.Dependencies() { if !done[dep] { - getLabels(dep) + getLabels(dep, max(depth-1, -1)) } } } } - getLabels(target) + getLabels(target, maxDepth) ret := make([]string, len(labels)) i := 0 for label := range labels { diff --git a/src/parse/asp/builtins_test.go b/src/parse/asp/builtins_test.go index dbe3764f6..02e622b0c 100644 --- a/src/parse/asp/builtins_test.go +++ b/src/parse/asp/builtins_test.go @@ -21,28 +21,43 @@ func TestPackageName(t *testing.T) { func TestGetLabels(t *testing.T) { state := core.NewBuildState(core.DefaultConfiguration()) - foo := core.NewBuildTarget(core.NewBuildLabel("pkg", "foo")) - foo.AddLabel("cc:ld:-ldl") - foo.SetState(core.Built) - bar := core.NewBuildTarget(core.NewBuildLabel("pkg", "bar")) - bar.AddDependency(foo.Label) - bar.AddLabel("cc:ld:-pthread") - bar.SetState(core.Built) + bottom := core.NewBuildTarget(core.NewBuildLabel("pkg", "bottom")) + bottom.AddLabel("target:bottom") + bottom.SetState(core.Built) - state.Graph.AddTarget(foo) - state.Graph.AddTarget(bar) + middle := core.NewBuildTarget(core.NewBuildLabel("pkg", "middle")) + middle.AddDependency(bottom.Label) + middle.AddLabel("target:middle") + middle.SetState(core.Built) - err := bar.ResolveDependencies(state.Graph) + top := core.NewBuildTarget(core.NewBuildLabel("pkg", "top")) + top.AddDependency(middle.Label) + top.AddLabel("target:top") + top.SetState(core.Built) + + state.Graph.AddTarget(bottom) + state.Graph.AddTarget(middle) + state.Graph.AddTarget(top) + + err := middle.ResolveDependencies(state.Graph) + require.NoError(t, err) + err = top.ResolveDependencies(state.Graph) require.NoError(t, err) s := &scope{state: state, pkg: core.NewPackage("pkg")} - ls := getLabels(s, []pyObject{pyString(":bar"), pyString("cc:ld:"), False, True}).(pyList) - assert.Len(t, ls, 2) - - ls = getLabels(s, []pyObject{pyString(":bar"), pyString("cc:ld:"), False, False}).(pyList) - assert.Len(t, ls, 1) - assert.Equal(t, pyString("-pthread"), ls[0]) + ls := getLabels(s, []pyObject{pyString(":top"), pyString("target:"), False, True, pyInt(-1)}).(pyList) // transitive=True + assert.Equal(t, pyList{pyString("bottom"), pyString("middle"), pyString("top")}, ls) + ls = getLabels(s, []pyObject{pyString(":top"), pyString("target:"), False, None, pyInt(-1)}).(pyList) // maxdepth=-1 (equivalent to above) + assert.Equal(t, pyList{pyString("bottom"), pyString("middle"), pyString("top")}, ls) + + ls = getLabels(s, []pyObject{pyString(":top"), pyString("target:"), False, False, pyInt(-1)}).(pyList) // transitive=False + assert.Equal(t, pyList{pyString("top")}, ls) + ls = getLabels(s, []pyObject{pyString(":top"), pyString("target:"), False, None, pyInt(0)}).(pyList) // maxdepth=0 (equivalent to above) + assert.Equal(t, pyList{pyString("top")}, ls) + + ls = getLabels(s, []pyObject{pyString(":top"), pyString("target:"), False, None, pyInt(1)}).(pyList) // maxdepth=1 + assert.Equal(t, pyList{pyString("middle"), pyString("top")}, ls) } func TestTag(t *testing.T) { diff --git a/test/get_labels/BUILD b/test/get_labels/BUILD new file mode 100644 index 000000000..e7c03693b --- /dev/null +++ b/test/get_labels/BUILD @@ -0,0 +1,109 @@ +subinclude("//test/build_defs") + +def maxdepth_test(name:str, deps:list=None, maxdepth:int, expected:list): + test_case = genrule( + name = name, + outs = [f"{name}.sh"], + cmd = { + "opt": "echo '#!/bin/sh' > $OUTS", + }, + binary = True, + labels = [ + f"name:{name}", + "manual", + ], + output_is_complete = False, + pre_build = echo_name_labels_up_to(maxdepth), + deps = deps, + ) + expected = text_file( + name = tag(name, "expected"), + content = "\n".join(expected) + "\n", + ) + plz_e2e_test( + name = f"{name}_test", + cmd = f"plz run //test/get_labels:{name}", + expected_output = expected, + ) + +def dep(name:str, deps:list=None): + return genrule( + name = name, + outs = [name], + cmd = "touch $OUTS", + labels = [ + f"name:{name}", + "manual", + ], + output_is_complete = False, + deps = deps, + ) + +def echo_name_labels_up_to(maxdepth:int): + def echo(name:str): + labels = get_labels(name, "name:", maxdepth=maxdepth) + set_command(name, "opt", " && ".join([get_command(name, "opt")] + [f"echo 'echo {l}' >> $OUTS" for l in labels])) + return echo + +dep(name = "dep1", deps = [":dep3"]) +dep(name = "dep2") +dep(name = "dep3", deps = [":dep4", ":dep5"]) +dep(name = "dep4") +dep(name = "dep5") + +maxdepth_test( + name = "target_only", + deps = [ + ":dep1", + ":dep2", + ], + maxdepth = 0, + expected = ["target_only"], +) + +maxdepth_test( + name = "direct_deps", + deps = [ + ":dep1", + ":dep2", + ], + maxdepth = 1, + expected = [ + "dep1", + "dep2", + "direct_deps", + ], +) + +maxdepth_test( + name = "second_level_deps", + deps = [ + ":dep1", + ":dep2", + ":dep3", + ], + maxdepth = 2, + expected = [ + "dep1", + "dep2", + "dep3", + "second_level_deps", + ], +) + +maxdepth_test( + name = "all_deps", + deps = [ + ":dep1", + ":dep2", + ], + maxdepth = -1, + expected = [ + "all_deps", + "dep1", + "dep2", + "dep3", + "dep4", + "dep5", + ], +) From 8d8fa413f60c872bf005f0e4dacf056a67a0681b Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Thu, 27 Nov 2025 16:55:24 +0000 Subject: [PATCH 16/38] Tag v17.25.0 (#3461) --- ChangeLog | 4 ++++ VERSION | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/ChangeLog b/ChangeLog index 79200b0a6..dfb5f8560 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,7 @@ +Version 17.25.0 +--------------- + * Add `maxdepth` parameter to `get_labels` built-in, allowing for recursive dependency searches to be depth-limited (#3460) + Version 17.24.2 --------------- * Improve platform detection logic in `plz init`'s `pleasew` script (#3458) diff --git a/VERSION b/VERSION index 76354e4b9..25b88430c 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -17.24.2 +17.25.0 From b9102bdaa609198be1de1ec918dcb1fb271e1338 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Fri, 28 Nov 2025 12:00:15 +0000 Subject: [PATCH 17/38] Upgrade arcat to v1.3.1 (#3462) This fixes a bug whereby symbol tables in BSD-variant ar archives are erroneously treated as real files, and are therefore copied into output archives when input archives are merged via `--combine` - see https://github.com/please-build/ar/issues/22. --- src/parse/internal.tmpl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/parse/internal.tmpl b/src/parse/internal.tmpl index c877f58cb..f600f8fb3 100644 --- a/src/parse/internal.tmpl +++ b/src/parse/internal.tmpl @@ -1,14 +1,14 @@ remote_file( name = "arcat", - url = f"https://github.com/please-build/arcat/releases/download/v1.3.0/arcat-1.3.0-{CONFIG.HOSTOS}_{CONFIG.HOSTARCH}", + url = f"https://github.com/please-build/arcat/releases/download/v1.3.1/arcat-1.3.1-{CONFIG.HOSTOS}_{CONFIG.HOSTARCH}", out = "arcat", binary = True, hashes = [ - "27fa940b2a1fd2c8beb84d1e29ed7d04ecfca489e021ed9bd7c8e975e5f43839", # darwin_amd64 - "3191b2896451a4f3fd6d592a8a5c6684f4a18bc74f00e91e2488c228a24c1d4e", # darwin_arm64 - "aeaf7be02fb25495f0c4e142a5adb20ce08a2bb988e7be6436562d03fbae83b0", # freebsd_amd64 - "e09a689cebe9d9b27836c184e4955e8d6731c9453fe48124a37b6a173c6b04d6", # linux_amd64 - "c3792853393ca692fd07bd2fbfdf1e1cf6e636090e4e622b8a77f03609c724a9", # linux_arm64 + "6af2cf108592535701aa9395f3a5deeb48a5dfbe8174a8ebe3d56bb93de2c255", # darwin_amd64 + "5070ef05d14c66a85d438f400c6ff734a23833929775d6824b69207b704034bf", # darwin_arm64 + "05ad6ac45be3a4ca1238bb1bd09207a596f8ff5f885415f8df4ff2dc849fa04e", # freebsd_amd64 + "aec85425355291e515cd10ac0addec3a5bc9e05c9d07af01aca8c34aaf0f1222", # linux_amd64 + "8266cb95cc84b23642bca6567f8b4bd18de399c887cb5845ab6a901d0dba54d2", # linux_arm64 ], visibility = ["PUBLIC"], ) From 29492e79c8ec4d380470204d2f00bba6a325834a Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Fri, 28 Nov 2025 13:25:12 +0000 Subject: [PATCH 18/38] Tag v17.25.1 (#3463) --- ChangeLog | 4 ++++ VERSION | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/ChangeLog b/ChangeLog index dfb5f8560..542a4d7ed 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,7 @@ +Version 17.25.1 +--------------- + * Bump arcat to v1.3.1, fixing a bug relating to symbol table stripping on Darwin and FreeBSD (#3462) + Version 17.25.0 --------------- * Add `maxdepth` parameter to `get_labels` built-in, allowing for recursive dependency searches to be depth-limited (#3460) diff --git a/VERSION b/VERSION index 25b88430c..370ea2fe4 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -17.25.0 +17.25.1 From 336e6c35b3d28d4b9fadc9db4f6090d5889a20e5 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Fri, 28 Nov 2025 17:15:15 +0000 Subject: [PATCH 19/38] Bump cc-rules plugin to v0.7.1 (#3464) --- docs/BUILD | 2 +- plugins/BUILD | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/BUILD b/docs/BUILD index 452347864..5e1937f50 100644 --- a/docs/BUILD +++ b/docs/BUILD @@ -60,7 +60,7 @@ plugins = { "python": "v1.14.0", "java": "v0.4.5", "go": "v1.26.0", - "cc": "v0.6.0", + "cc": "v0.7.1", "shell": "v0.2.0", "go-proto": "v0.3.0", "python-proto": "v0.1.0", diff --git a/plugins/BUILD b/plugins/BUILD index de391c094..1222d0787 100644 --- a/plugins/BUILD +++ b/plugins/BUILD @@ -7,7 +7,7 @@ plugin_repo( plugin_repo( name = "cc", plugin = "cc-rules", - revision = "v0.6.0", + revision = "v0.7.1", ) plugin_repo( From 7872c8373d52d16f6ce2ebbcea1be95a73646a41 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Tue, 9 Dec 2025 12:14:18 +0000 Subject: [PATCH 20/38] CI: use FreeBSD 14.3-RELEASE runner on Cirrus CI (#3467) The FreeBSD 14.2-RELEASE runner seems to have been deprecated in the last week. --- .cirrus.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cirrus.yml b/.cirrus.yml index 4129e2efb..e7146207b 100644 --- a/.cirrus.yml +++ b/.cirrus.yml @@ -1,5 +1,5 @@ freebsd_instance: - image_family: freebsd-14-2 + image_family: freebsd-14-3 env: GOPROXY: https://proxy.golang.org From bc8092ac2799bcfc785e90b348a62f34245abf4a Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Tue, 9 Dec 2025 13:48:55 +0000 Subject: [PATCH 21/38] Optionally traverse `srcs` and `deps` for run-time dependencies (#3466) There are circumstances under which a target might want to inherit the run-time dependencies of certain build-time dependencies, because the target's output is built in such a way that its dependencies don't have their own run-time dependencies resolved. An example of this is the shell-rules plugin's `sh_binary` rule: a `sh_binary` is a zip file of its dependencies (`deps`) concatenated onto a shell script preamble that extracts those dependencies into a temporary directory during initialisation. If one of those `deps` happens to have its own run-time dependencies, Please will not resolve them, and they will therefore not be built before the target runs (and will not be sent to the remote worker along with the target's output). Add two parameters to `build_rule` - `runtime_deps_from_srcs` and `runtime_deps_from_deps` - that indicate to Please that run-time dependencies should additionally be collected from the target's `srcs` and `deps` respectively. This allows build definitions to inherit the run-time dependencies of their sources and/or build-time dependencies if they know they will be needed when running the target they generate. --- rules/builtins.build_defs | 70 ++++++++++-- rules/misc_rules.build_defs | 108 +++++++++++++++--- src/build/incrementality_test.go | 10 +- src/core/build_target.go | 45 +++++++- src/parse/asp/targets.go | 7 ++ test/runtime_deps/BUILD | 24 ++++ test/runtime_deps/repo/build_defs/BUILD_FILE | 5 + .../repo/build_defs/test.build_defs | 43 +++++++ .../repo/filegroup_test/BUILD_FILE | 27 +++++ .../repo/from_deps_test/BUILD_FILE | 28 +++++ .../repo/from_exported_deps_test/BUILD_FILE | 45 ++++++++ .../repo/from_srcs_test/BUILD_FILE | 28 +++++ test/runtime_deps/repo/test/BUILD_FILE | 40 +------ tools/build_langserver/lsp/definition_test.go | 6 +- 14 files changed, 406 insertions(+), 80 deletions(-) create mode 100644 test/runtime_deps/repo/build_defs/BUILD_FILE create mode 100644 test/runtime_deps/repo/build_defs/test.build_defs create mode 100644 test/runtime_deps/repo/filegroup_test/BUILD_FILE create mode 100644 test/runtime_deps/repo/from_deps_test/BUILD_FILE create mode 100644 test/runtime_deps/repo/from_exported_deps_test/BUILD_FILE create mode 100644 test/runtime_deps/repo/from_srcs_test/BUILD_FILE diff --git a/rules/builtins.build_defs b/rules/builtins.build_defs index 848dad863..f405bff5b 100644 --- a/rules/builtins.build_defs +++ b/rules/builtins.build_defs @@ -2,18 +2,64 @@ # Do not change the order of arguments to this function without updating the iota in targets.go to match it. -def build_rule(name:str, cmd:str|dict='', test_cmd:str|dict='', debug_cmd:str='', srcs:list|dict=None, data:list|dict=None, - debug_data:list|dict=None, outs:list|dict=None, deps:list=None, exported_deps:list=None, runtime_deps:list=None, - secrets:list|dict=None, tools:str|list|dict=None, test_tools:str|list|dict=None, debug_tools:str|list|dict=None, - labels:list=None, visibility:list=CONFIG.DEFAULT_VISIBILITY, hashes:list=None, binary:bool=False, test:bool=False, - test_only:bool=CONFIG.DEFAULT_TESTONLY, building_description:str=None, needs_transitive_deps:bool=False, - output_is_complete:bool=False, sandbox:bool=CONFIG.BUILD_SANDBOX, test_sandbox:bool=CONFIG.TEST_SANDBOX, - no_test_output:bool=False, flaky:bool|int=0, build_timeout:int|str=0, test_timeout:int|str=0, pre_build:function=None, - post_build:function=None, requires:list=None, provides:dict=None, licences:list=CONFIG.DEFAULT_LICENCES, - test_outputs:list=None, system_srcs:list=None, stamp:bool=False, tag:str='', optional_outs:list=None, progress:bool=False, - size:str=None, _urls:list=None, internal_deps:list=None, pass_env:list=None, local:bool=False, output_dirs:list=[], - exit_on_error:bool=CONFIG.EXIT_ON_ERROR, entry_points:dict={}, env:dict={}, _file_content:str=None, - _subrepo:bool=False, no_test_coverage:bool=False, src_list_files:bool=False): +def build_rule( + name:str, + cmd:str|dict="", + test_cmd:str|dict="", + debug_cmd:str="", + srcs:list|dict=None, + data:list|dict=None, + debug_data:list|dict=None, + outs:list|dict=None, + deps:list=None, + exported_deps:list=None, + runtime_deps:list=None, + runtime_deps_from_srcs:bool=False, + runtime_deps_from_deps:bool=False, + secrets:list|dict=None, + tools:str|list|dict=None, + test_tools:str|list|dict=None, + debug_tools:str|list|dict=None, + labels:list=None, + visibility:list=CONFIG.DEFAULT_VISIBILITY, + hashes:list=None, + binary:bool=False, + test:bool=False, + test_only:bool=CONFIG.DEFAULT_TESTONLY, + building_description:str=None, + needs_transitive_deps:bool=False, + output_is_complete:bool=False, + sandbox:bool=CONFIG.BUILD_SANDBOX, + test_sandbox:bool=CONFIG.TEST_SANDBOX, + no_test_output:bool=False, + flaky:bool|int=0, + build_timeout:int|str=0, + test_timeout:int|str=0, + pre_build:function=None, + post_build:function=None, + requires:list=None, + provides:dict=None, + licences:list=CONFIG.DEFAULT_LICENCES, + test_outputs:list=None, + system_srcs:list=None, + stamp:bool=False, + tag:str="", + optional_outs:list=None, + progress:bool=False, + size:str=None, + _urls:list=None, + internal_deps:list=None, + pass_env:list=None, + local:bool=False, + output_dirs:list=[], + exit_on_error:bool=CONFIG.EXIT_ON_ERROR, + entry_points:dict={}, + env:dict={}, + _file_content:str=None, + _subrepo:bool=False, + no_test_coverage:bool=False, + src_list_files:bool=False, +): pass def chr(i:int) -> str: diff --git a/rules/misc_rules.build_defs b/rules/misc_rules.build_defs index 6992313db..0aaf7e751 100644 --- a/rules/misc_rules.build_defs +++ b/rules/misc_rules.build_defs @@ -1,14 +1,42 @@ """Miscellaneous rules that aren't language-specific.""" -def genrule(name:str, cmd:str|list|dict, srcs:list|dict=None, out:str=None, outs:list|dict=None, deps:list=None, - exported_deps:list=None, runtime_deps:list=None, labels:list&features&tags=None, visibility:list=None, - building_description:str='Building...', data:list|dict=None, hashes:list=None, timeout:int=0, binary:bool=False, - sandbox:bool=None, needs_transitive_deps:bool=False, output_is_complete:bool=True, - test_only:bool&testonly=False, secrets:list|dict=None, requires:list=None, provides:dict=None, - pre_build:function=None, post_build:function=None, tools:str|list|dict=None, pass_env:list=None, - local:bool=False, output_dirs:list=[], exit_on_error:bool=CONFIG.EXIT_ON_ERROR, entry_points:dict={}, - env:dict={}, optional_outs:list=[]): +def genrule( + name:str, + cmd:str|list|dict, + srcs:list|dict=None, + out:str=None, + outs:list|dict=None, + deps:list=None, + exported_deps:list=None, + runtime_deps:list=None, + runtime_deps_from_srcs:bool=False, + runtime_deps_from_deps:bool=False, + labels:list&features&tags=None, + visibility:list=None, + building_description:str="Building...", + data:list|dict=None, + hashes:list=None, + timeout:int=0, + binary:bool=False, + sandbox:bool=None, + needs_transitive_deps:bool=False, + output_is_complete:bool=True, + test_only:bool&testonly=False, + secrets:list|dict=None, + requires:list=None, + provides:dict=None, + pre_build:function=None, + post_build:function=None, + tools:str|list|dict=None, + pass_env:list=None, + local:bool=False, + output_dirs:list=[], + exit_on_error:bool=CONFIG.EXIT_ON_ERROR, + entry_points:dict={}, + env:dict={}, + optional_outs:list=[], +): """A general build rule which allows the user to specify a command. Args: @@ -41,6 +69,14 @@ def genrule(name:str, cmd:str|list|dict, srcs:list|dict=None, out:str=None, outs the outputs of those rules' transitive run-time dependencies, will exist in the dependent rule's build environment. Requires the rule to produce a runnable output (i.e. binary = True). + runtime_deps_from_srcs (bool): If true, additionally collect run-time dependencies from this target's + sources. This is useful if the target's output simply collects its + sources in some way without eliminating their own run-time dependencies. + runtime_deps_from_deps (bool): If true, additionally collect run-time dependencies from this target's + build-time dependencies (and those targets' exported dependencies). + This is useful if the target's output includes its dependencies without + eliminating their own run-time dependencies, e.g. for targets generated + by the shell-rules plugin's sh_binary rule. tools (str | list | dict): Tools used to build this rule; similar to srcs but are not copied to the temporary build directory. Should be accessed via $(exe //path/to:tool) or similar. @@ -127,6 +163,8 @@ def genrule(name:str, cmd:str|list|dict, srcs:list|dict=None, out:str=None, outs deps = deps, exported_deps = exported_deps, runtime_deps = runtime_deps, + runtime_deps_from_srcs = runtime_deps_from_srcs, + runtime_deps_from_deps = runtime_deps_from_deps, data = data, tools = tools, secrets = secrets, @@ -154,13 +192,38 @@ def genrule(name:str, cmd:str|list|dict, srcs:list|dict=None, out:str=None, outs ) -def gentest(name:str, test_cmd:str|list|dict, labels:list&features&tags=None, cmd:str|list|dict=None, srcs:list|dict=None, - outs:list=None, deps:list=None, exported_deps:list=None, runtime_deps:list=None, tools:str|list|dict=None, - test_tools:str|list|dict=None, data:list|dict=None, visibility:list=None, timeout:int=0, - needs_transitive_deps:bool=False, flaky:bool|int=0, secrets:list|dict=None, no_test_output:bool=False, - test_outputs:list=None, output_is_complete:bool=True, requires:list=None, sandbox:bool=None, size:str=None, - local:bool=False, pass_env:list=None, env:dict=None, exit_on_error:bool=CONFIG.EXIT_ON_ERROR, - no_test_coverage:bool=False): +def gentest( + name:str, + test_cmd:str|list|dict, + labels:list&features&tags=None, + cmd:str|list|dict=None, + srcs:list|dict=None, + outs:list=None, + deps:list=None, + exported_deps:list=None, + runtime_deps:list=None, + runtime_deps_from_srcs:bool=False, + runtime_deps_from_deps:bool=False, + tools:str|list|dict=None, + test_tools:str|list|dict=None, + data:list|dict=None, + visibility:list=None, + timeout:int=0, + needs_transitive_deps:bool=False, + flaky:bool|int=0, + secrets:list|dict=None, + no_test_output:bool=False, + test_outputs:list=None, + output_is_complete:bool=True, + requires:list=None, + sandbox:bool=None, + size:str=None, + local:bool=False, + pass_env:list=None, + env:dict=None, + exit_on_error:bool=CONFIG.EXIT_ON_ERROR, + no_test_coverage:bool=False, +): """A rule which creates a test with an arbitrary command. The command must return zero on success and nonzero on failure. Test results are written @@ -185,6 +248,14 @@ def gentest(name:str, test_cmd:str|list|dict, labels:list&features&tags=None, cm list, as well as those rules' transitive run-time dependencies, will exist in the test environment. Requires the rule to produce a runnable output (i.e. binary = True). + runtime_deps_from_srcs (bool): If true, additionally collect run-time dependencies from this target's + sources. This is useful if the target's output simply collects its + sources in some way without eliminating their own run-time dependencies. + runtime_deps_from_deps (bool): If true, additionally collect run-time dependencies from this target's + build-time dependencies (and those targets' exported dependencies). + This is useful if the target's output includes its dependencies without + eliminating their own run-time dependencies, e.g. for targets generated + by the shell-rules plugin's sh_binary rule. tools (str | list | dict): Tools used to build this rule; similar to srcs but are not copied to the temporary build directory. test_tools (str | list | dict): Like tools but available to test_cmd instead. @@ -224,6 +295,8 @@ def gentest(name:str, test_cmd:str|list|dict, labels:list&features&tags=None, cm deps = deps, exported_deps = exported_deps, runtime_deps = runtime_deps, + runtime_deps_from_srcs = runtime_deps_from_srcs, + runtime_deps_from_deps = runtime_deps_from_deps, data = data, tools = tools, test_tools = test_tools, @@ -270,7 +343,7 @@ def export_file(name:str, src:str, visibility:list=None, binary:bool=False, test ) -def filegroup(name:str, tag:str='', srcs:list=None, deps:list=None, exported_deps:list=None, runtime_deps:list=None, +def filegroup(name:str, tag:str='', srcs:list=None, deps:list=None, exported_deps:list=None, visibility:list=None, labels:list&features&tags=None, binary:bool=False, output_is_complete:bool=True, requires:list=None, provides:dict=None, hashes:list=None, test_only:bool&testonly=False): """Defines a collection of files which other rules can depend on. @@ -285,8 +358,6 @@ def filegroup(name:str, tag:str='', srcs:list=None, deps:list=None, exported_dep srcs (list): Source files for the rule. deps (list): Dependencies of the rule. exported_deps (list): Dependencies that will become visible to any rules that depend on this rule. - runtime_deps (list): Run-time dependencies of this rule. Requires the rule to produce a runnable - output (i.e. binary = True). visibility (list): Visibility declaration labels (list): Labels to apply to this rule binary (bool): True to mark the rule outputs as binary @@ -305,7 +376,6 @@ def filegroup(name:str, tag:str='', srcs:list=None, deps:list=None, exported_dep srcs=srcs, deps=deps, exported_deps=exported_deps, - runtime_deps=runtime_deps, visibility=visibility, building_description='Copying...', output_is_complete=output_is_complete, diff --git a/src/build/incrementality_test.go b/src/build/incrementality_test.go index e57cc709a..02da05869 100644 --- a/src/build/incrementality_test.go +++ b/src/build/incrementality_test.go @@ -81,10 +81,12 @@ var KnownFields = map[string]bool{ "Debug.namedTools": true, // These only contribute to the runtime hash, not at build time. - "runtimeDependencies": true, - "Data": true, - "NamedData": true, - "ContainerSettings": true, + "runtimeDependencies": true, + "RuntimeDependenciesFromSources": true, + "RuntimeDependenciesFromDependencies": true, + "Data": true, + "NamedData": true, + "ContainerSettings": true, // These would ideally not contribute to the hash, but we need that at present // because we don't have a good way to force a recheck of its reverse dependencies. diff --git a/src/core/build_target.go b/src/core/build_target.go index 3e2b8b32b..eb00b38ea 100644 --- a/src/core/build_target.go +++ b/src/core/build_target.go @@ -117,6 +117,13 @@ type BuildTarget struct { dependencies []depInfo `name:"deps"` // The run-time dependencies of this target. runtimeDependencies []BuildLabel `name:"runtime_deps"` + // Whether to consider the run-time dependencies of this target's sources to be additional + // run-time dependencies of this target. + RuntimeDependenciesFromSources bool `name:"runtime_deps_from_srcs"` + // Whether to consider the run-time dependencies of this target's build-time dependencies + // (and exported dependencies of those targets) to be additional run-time dependencies of + // this target. + RuntimeDependenciesFromDependencies bool `name:"runtime_deps_from_deps"` // List of build target patterns that can use this build target. Visibility []BuildLabel // Source files of this rule. Can refer to build rules themselves. @@ -732,9 +739,9 @@ func (target *BuildTarget) IterAllRuntimeDependencies(graph *BuildGraph) iter.Se return true } done[t.String()] = true - for _, runDep := range t.runtimeDependencies { - runDepLabel, _ := runDep.Label() - for _, providedDep := range graph.TargetOrDie(runDepLabel).ProvideFor(t) { + for _, dep := range t.runtimeDependencies { + depLabel, _ := dep.Label() + for _, providedDep := range graph.TargetOrDie(depLabel).ProvideFor(t) { if !yield(providedDep) { return false } @@ -743,6 +750,38 @@ func (target *BuildTarget) IterAllRuntimeDependencies(graph *BuildGraph) iter.Se } } } + if t.RuntimeDependenciesFromSources || t.RuntimeDependenciesFromDependencies { + for _, dep := range t.dependencies { + // If required, include the run-time dependencies of sources, but not the sources themselves. + if t.RuntimeDependenciesFromSources && dep.source { + depLabel, _ := dep.declared.Label() + for _, providedDep := range graph.TargetOrDie(depLabel).ProvideFor(t) { + if !push(graph.TargetOrDie(providedDep), yield) { + return false + } + } + } + // If required, include the run-time dependencies of dependencies, but not the dependencies themselves. + if t.RuntimeDependenciesFromDependencies && !dep.exported && !dep.source && !dep.internal && !dep.runtime { + depLabel, _ := dep.declared.Label() + depTarget := graph.TargetOrDie(depLabel) + for _, providedDep := range depTarget.ProvideFor(t) { + if !push(graph.TargetOrDie(providedDep), yield) { + return false + } + } + // Also include the run-time dependencies of the target's exported dependencies, but not the + // exported dependencies themselves. + for _, exportedDep := range depTarget.ExportedDependencies() { + for _, providedDep := range graph.TargetOrDie(exportedDep).ProvideFor(t) { + if !push(graph.TargetOrDie(providedDep), yield) { + return false + } + } + } + } + } + } return true } return func(yield func(BuildLabel) bool) { diff --git a/src/parse/asp/targets.go b/src/parse/asp/targets.go index 96baf2483..48404b792 100644 --- a/src/parse/asp/targets.go +++ b/src/parse/asp/targets.go @@ -31,6 +31,8 @@ const ( depsBuildRuleArgIdx exportedDepsBuildRuleArgIdx runtimeDepsBuildRuleArgIdx + runtimeDepsFromSrcsBuildRuleArgIdx + runtimeDepsFromDepsBuildRuleArgIdx secretsBuildRuleArgIdx toolsBuildRuleArgIdx testToolsBuildRuleArgIdx @@ -142,6 +144,11 @@ func createTarget(s *scope, args []pyObject) *core.BuildTarget { if target.IsRemoteFile { target.AddLabel("remote") } + // filegroups don't really produce any outputs of their own - they're just the filegroup's own sources. + // The run-time dependencies of a filegroup's sources should therefore be treated as the filegroup's own + // run-time dependencies. + target.RuntimeDependenciesFromSources = target.IsFilegroup || isTruthy(runtimeDepsFromSrcsBuildRuleArgIdx) + target.RuntimeDependenciesFromDependencies = isTruthy(runtimeDepsFromDepsBuildRuleArgIdx) target.Command, target.Commands = decodeCommands(s, args[cmdBuildRuleArgIdx]) if test { target.Test = new(core.TestFields) diff --git a/test/runtime_deps/BUILD b/test/runtime_deps/BUILD index fb6f9ed9d..e3afe8088 100644 --- a/test/runtime_deps/BUILD +++ b/test/runtime_deps/BUILD @@ -66,3 +66,27 @@ please_repo_e2e_test( }, repo = "repo", ) + +please_repo_e2e_test( + name = "runtime_deps_from_srcs_test", + plz_command = "plz test //from_srcs_test:runtime_deps_from_srcs_test_case", + repo = "repo", +) + +please_repo_e2e_test( + name = "runtime_deps_from_deps_test", + plz_command = "plz test //from_deps_test:runtime_deps_from_deps_test_case", + repo = "repo", +) + +please_repo_e2e_test( + name = "runtime_deps_from_exported_deps_test", + plz_command = "plz test //from_exported_deps_test:runtime_deps_from_exported_deps_test_case", + repo = "repo", +) + +please_repo_e2e_test( + name = "filegroup_test", + plz_command = "plz test //filegroup_test:filegroup_test_case", + repo = "repo", +) diff --git a/test/runtime_deps/repo/build_defs/BUILD_FILE b/test/runtime_deps/repo/build_defs/BUILD_FILE new file mode 100644 index 000000000..d8f94687e --- /dev/null +++ b/test/runtime_deps/repo/build_defs/BUILD_FILE @@ -0,0 +1,5 @@ +export_file( + name = "test", + src = "test.build_defs", + visibility = ["PUBLIC"], +) diff --git a/test/runtime_deps/repo/build_defs/test.build_defs b/test/runtime_deps/repo/build_defs/test.build_defs new file mode 100644 index 000000000..913807b98 --- /dev/null +++ b/test/runtime_deps/repo/build_defs/test.build_defs @@ -0,0 +1,43 @@ +def exists(file:str): + path = join_path(package_name(), file) + return f"echo -n '{path} exists: '; if [ -f '{path}' ]; then echo OK; else echo FAIL; exit 1; fi" + +def not_exists(file:str): + path = join_path(package_name(), file) + return f"echo -n '{path} does not exist: '; if [ -f '{path}' ]; then echo FAIL; exit 1; else echo OK; fi" + +def target(name:str, srcs:list=[], build_tests:list=[], post_build:function=None, requires:list=None, + provides:dict=None, runtime_deps:list=[], runtime_deps_from_srcs:bool=False, runtime_deps_from_deps:bool=False, + exported_deps:list=[], deps:list=[]): + # Ensure that run-time dependencies are not present at build time, unless they are also + # explicitly given as build-time dependencies. (The lstrips here turn build target names + # into the names of files they output - they're slightly grungy, but they allow us to + # re-use exists and not_exists outside of this function; they work because targets + # generated by runtime_dep output a single file who name is identical to that of the + # target's.) + cmd = [not_exists(r.lstrip(":")) for r in runtime_deps if r not in deps] + cmd += [exists(d.lstrip(":")) for d in deps] + return genrule( + name = name, + srcs = srcs, + outs = [name], + # Assume that if a post-build function is defined, it dynamically adds a run-time + # dependency, which requires this target to be marked as binary. + binary = len(runtime_deps) > 0 or post_build is not None, + cmd = cmd + build_tests + ["touch $OUTS"], + post_build = post_build, + requires = requires, + provides = provides, + runtime_deps = runtime_deps, + runtime_deps_from_srcs = runtime_deps_from_srcs, + runtime_deps_from_deps = runtime_deps_from_deps, + exported_deps = exported_deps, + deps = deps, + ) + +def runtime_dep(name:str): + return genrule( + name = name, + outs = [name], + cmd = "touch $OUTS", + ) diff --git a/test/runtime_deps/repo/filegroup_test/BUILD_FILE b/test/runtime_deps/repo/filegroup_test/BUILD_FILE new file mode 100644 index 000000000..19083d918 --- /dev/null +++ b/test/runtime_deps/repo/filegroup_test/BUILD_FILE @@ -0,0 +1,27 @@ +subinclude("//build_defs:test") + +filegroup( + name = "filegroup", + srcs = [":filegroup_src"], +) + +target( + name = "filegroup_src", + runtime_deps = [":filegroup_src_runtime_dep"], +) + +runtime_dep("filegroup_src_runtime_dep") + +gentest( + name = "filegroup_test_case", + outs = ["filegroup_test_case"], + cmd = [ + not_exists("filegroup_src_runtime_dep"), + "touch $OUTS", + ], + data = [":filegroup"], + no_test_output = True, + test_cmd = [ + exists("filegroup_src_runtime_dep"), + ], +) diff --git a/test/runtime_deps/repo/from_deps_test/BUILD_FILE b/test/runtime_deps/repo/from_deps_test/BUILD_FILE new file mode 100644 index 000000000..6c2e3fe4a --- /dev/null +++ b/test/runtime_deps/repo/from_deps_test/BUILD_FILE @@ -0,0 +1,28 @@ +subinclude("//build_defs:test") + +target( + name = "runtime_deps_from_deps", + runtime_deps_from_deps = True, + deps = [":dep"], +) + +target( + name = "dep", + runtime_deps = [":dep_runtime_dep"], +) + +runtime_dep("dep_runtime_dep") + +gentest( + name = "runtime_deps_from_deps_test_case", + outs = ["runtime_deps_from_deps_test_case"], + cmd = [ + not_exists("dep_runtime_dep"), + "touch $OUTS", + ], + data = [":runtime_deps_from_deps"], + no_test_output = True, + test_cmd = [ + exists("dep_runtime_dep"), + ], +) diff --git a/test/runtime_deps/repo/from_exported_deps_test/BUILD_FILE b/test/runtime_deps/repo/from_exported_deps_test/BUILD_FILE new file mode 100644 index 000000000..bd6121a4b --- /dev/null +++ b/test/runtime_deps/repo/from_exported_deps_test/BUILD_FILE @@ -0,0 +1,45 @@ +subinclude("//build_defs:test") + +target( + name = "runtime_deps_from_deps", + runtime_deps_from_deps = True, + deps = [":dep_with_exported_dep"], +) + +target( + name = "dep_with_exported_dep", + exported_deps = [":exported_dep"], + deps = [":dep"], +) + +target( + name = "exported_dep", + runtime_deps = [":exported_dep_runtime_dep"], +) + +target( + name = "dep", + runtime_deps = [":dep_runtime_dep"], +) + +runtime_dep("exported_dep_runtime_dep") +runtime_dep("dep_runtime_dep") + +gentest( + name = "runtime_deps_from_exported_deps_test_case", + outs = ["runtime_deps_from_exported_deps_test_case"], + cmd = [ + not_exists("dep_runtime_dep"), + not_exists("exported_dep_runtime_dep"), + "touch $OUTS", + ], + data = [":runtime_deps_from_deps"], + no_test_output = True, + test_cmd = [ + # This still shouldn't exist - it's a build-time dependency of a build-time dependency, not a run-time + # dependency of a build-time dependency. + not_exists("dep_runtime_dep"), + # This one should exist - it's a run-time dependency of an (exported) build-time dependency. + exists("exported_dep_runtime_dep"), + ], +) diff --git a/test/runtime_deps/repo/from_srcs_test/BUILD_FILE b/test/runtime_deps/repo/from_srcs_test/BUILD_FILE new file mode 100644 index 000000000..5e525f651 --- /dev/null +++ b/test/runtime_deps/repo/from_srcs_test/BUILD_FILE @@ -0,0 +1,28 @@ +subinclude("//build_defs:test") + +target( + name = "runtime_deps_from_srcs", + srcs = [":src"], + runtime_deps_from_srcs = True, +) + +target( + name = "src", + runtime_deps = [":src_runtime_dep"], +) + +runtime_dep("src_runtime_dep") + +gentest( + name = "runtime_deps_from_srcs_test_case", + outs = ["runtime_deps_from_srcs_test_case"], + cmd = [ + not_exists("src_runtime_dep"), + "touch $OUTS", + ], + data = [":runtime_deps_from_srcs"], + no_test_output = True, + test_cmd = [ + exists("src_runtime_dep"), + ], +) diff --git a/test/runtime_deps/repo/test/BUILD_FILE b/test/runtime_deps/repo/test/BUILD_FILE index de3274738..c33f48cb6 100644 --- a/test/runtime_deps/repo/test/BUILD_FILE +++ b/test/runtime_deps/repo/test/BUILD_FILE @@ -1,42 +1,4 @@ -def exists(file:str): - path = join_path(package_name(), file) - return f"echo -n '{path} exists: '; if [ -f '{path}' ]; then echo OK; else echo FAIL; exit 1; fi" - -def not_exists(file:str): - path = join_path(package_name(), file) - return f"echo -n '{path} does not exist: '; if [ -f '{path}' ]; then echo FAIL; exit 1; else echo OK; fi" - -def runtime_dep(name:str): - return genrule( - name = name, - outs = [name], - cmd = "touch $OUTS", - ) - -def target(name:str, build_tests:list=[], post_build:function=None, requires:list=None, provides:dict=None, - runtime_deps:list=[], deps:list=[]): - # Ensure that run-time dependencies are not present at build time, unless they are also - # explicitly given as build-time dependencies. (The lstrips here turn build target names - # into the names of files they output - they're slightly grungy, but they allow us to - # re-use exists and not_exists outside of this function; they work because targets - # generated by runtime_dep output a single file who name is identical to that of the - # target's.) - cmd = [not_exists(r.lstrip(":")) for r in runtime_deps if r not in deps] - cmd += [exists(d.lstrip(":")) for d in deps] - return genrule( - name = name, - outs = [name], - # Assume that if a post-build function is defined, it dynamically adds a run-time - # dependency, which requires this target to be marked as binary. - binary = len(runtime_deps) > 0 or post_build is not None, - cmd = cmd + build_tests + ["touch $OUTS"], - post_build = post_build, - requires = requires, - provides = provides, - runtime_deps = runtime_deps, - deps = deps, - ) - +subinclude("//build_defs:test") target( name = "target_with_no_runtime_deps", diff --git a/tools/build_langserver/lsp/definition_test.go b/tools/build_langserver/lsp/definition_test.go index 1adb13bab..a728a8ca7 100644 --- a/tools/build_langserver/lsp/definition_test.go +++ b/tools/build_langserver/lsp/definition_test.go @@ -25,7 +25,7 @@ func TestDefinition(t *testing.T) { assert.Equal(t, []lsp.Location{ { URI: lsp.DocumentURI("file://" + filepath.Join(cacheDir, "please/misc_rules.build_defs")), - Range: xrng(3, 0, 153, 5), + Range: xrng(3, 0, 191, 5), }, }, locs) } @@ -45,7 +45,7 @@ func TestDefinitionStatement(t *testing.T) { assert.Equal(t, []lsp.Location{ { URI: lsp.DocumentURI("file://" + filepath.Join(cacheDir, "please/misc_rules.build_defs")), - Range: xrng(3, 0, 153, 5), + Range: xrng(3, 0, 191, 5), }, }, locs) } @@ -65,7 +65,7 @@ func TestDefinitionBuiltin(t *testing.T) { assert.Equal(t, []lsp.Location{ { URI: lsp.DocumentURI("file://" + filepath.Join(cacheDir, "please/misc_rules.build_defs")), - Range: xrng(3, 0, 153, 5), + Range: xrng(3, 0, 191, 5), }, }, locs) } From ecfb2f62c2bfb7f19786eac53b887db2cbd7576e Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Tue, 9 Dec 2025 14:00:20 +0000 Subject: [PATCH 22/38] Tag v17.26.0 (#3468) --- ChangeLog | 4 ++++ VERSION | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/ChangeLog b/ChangeLog index 542a4d7ed..7ec6e0a6f 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,7 @@ +Version 17.26.0 +--------------- + * Optionally traverse `srcs` and `deps` for run-time dependencies (#3466) + Version 17.25.1 --------------- * Bump arcat to v1.3.1, fixing a bug relating to symbol table stripping on Darwin and FreeBSD (#3462) diff --git a/VERSION b/VERSION index 370ea2fe4..836f35247 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -17.25.1 +17.26.0 From 9c324cc47a296904f949cfc4c9c4aee6a19154ae Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Tue, 9 Dec 2025 15:29:30 +0000 Subject: [PATCH 23/38] Traverse `data` and `debug_data` for run-time dependencies (#3469) `data` (and `debug_data`) targets themselves may have their own run-time dependencies - also mark those targets as prerequisites for executing the top-level target. --- src/core/build_target.go | 26 ++++++++++++ test/runtime_deps/BUILD | 9 ++++ .../repo/build_defs/test.build_defs | 12 +++--- .../repo/from_data_test/BUILD_FILE | 42 +++++++++++++++++++ 4 files changed, 84 insertions(+), 5 deletions(-) create mode 100644 test/runtime_deps/repo/from_data_test/BUILD_FILE diff --git a/src/core/build_target.go b/src/core/build_target.go index eb00b38ea..d15a827b5 100644 --- a/src/core/build_target.go +++ b/src/core/build_target.go @@ -750,6 +750,32 @@ func (target *BuildTarget) IterAllRuntimeDependencies(graph *BuildGraph) iter.Se } } } + // Include the run-time dependencies of data targets, but not the data targets themselves. (We needn't worry + // about data files here - they can't have run-time dependencies of their own.) + for _, data := range t.AllData() { + dataLabel, ok := data.Label() + if !ok { + continue + } + for _, providedDep := range graph.TargetOrDie(dataLabel).ProvideFor(t) { + if !push(graph.TargetOrDie(providedDep), yield) { + return false + } + } + } + if t.Debug != nil { + for _, data := range t.AllDebugData() { + dataLabel, ok := data.Label() + if !ok { + continue + } + for _, providedDep := range graph.TargetOrDie(dataLabel).ProvideFor(t) { + if !push(graph.TargetOrDie(providedDep), yield) { + return false + } + } + } + } if t.RuntimeDependenciesFromSources || t.RuntimeDependenciesFromDependencies { for _, dep := range t.dependencies { // If required, include the run-time dependencies of sources, but not the sources themselves. diff --git a/test/runtime_deps/BUILD b/test/runtime_deps/BUILD index e3afe8088..8f07efff6 100644 --- a/test/runtime_deps/BUILD +++ b/test/runtime_deps/BUILD @@ -73,6 +73,15 @@ please_repo_e2e_test( repo = "repo", ) +please_repo_e2e_test( + name = "runtime_deps_from_data_test", + plz_command = " && ".join([ + "plz test //from_data_test:runtime_deps_from_data_test_case", + "plz -c dbg test //from_data_test:runtime_deps_from_data_test_case", + ]), + repo = "repo", +) + please_repo_e2e_test( name = "runtime_deps_from_deps_test", plz_command = "plz test //from_deps_test:runtime_deps_from_deps_test_case", diff --git a/test/runtime_deps/repo/build_defs/test.build_defs b/test/runtime_deps/repo/build_defs/test.build_defs index 913807b98..2f9e725fd 100644 --- a/test/runtime_deps/repo/build_defs/test.build_defs +++ b/test/runtime_deps/repo/build_defs/test.build_defs @@ -6,9 +6,9 @@ def not_exists(file:str): path = join_path(package_name(), file) return f"echo -n '{path} does not exist: '; if [ -f '{path}' ]; then echo FAIL; exit 1; else echo OK; fi" -def target(name:str, srcs:list=[], build_tests:list=[], post_build:function=None, requires:list=None, - provides:dict=None, runtime_deps:list=[], runtime_deps_from_srcs:bool=False, runtime_deps_from_deps:bool=False, - exported_deps:list=[], deps:list=[]): +def target(name:str, srcs:list=[], build_tests:list=[], data:list=[], debug_data:list=[], post_build:function=None, + requires:list=None, provides:dict=None, runtime_deps:list=[], runtime_deps_from_srcs:bool=False, + runtime_deps_from_deps:bool=False, exported_deps:list=[], deps:list=[]): # Ensure that run-time dependencies are not present at build time, unless they are also # explicitly given as build-time dependencies. (The lstrips here turn build target names # into the names of files they output - they're slightly grungy, but they allow us to @@ -17,14 +17,16 @@ def target(name:str, srcs:list=[], build_tests:list=[], post_build:function=None # target's.) cmd = [not_exists(r.lstrip(":")) for r in runtime_deps if r not in deps] cmd += [exists(d.lstrip(":")) for d in deps] - return genrule( + return build_rule( name = name, srcs = srcs, outs = [name], # Assume that if a post-build function is defined, it dynamically adds a run-time # dependency, which requires this target to be marked as binary. binary = len(runtime_deps) > 0 or post_build is not None, - cmd = cmd + build_tests + ["touch $OUTS"], + cmd = " && ".join(cmd + build_tests + ["touch $OUTS"]), + data = data, + debug_data = debug_data, post_build = post_build, requires = requires, provides = provides, diff --git a/test/runtime_deps/repo/from_data_test/BUILD_FILE b/test/runtime_deps/repo/from_data_test/BUILD_FILE new file mode 100644 index 000000000..27eaaa871 --- /dev/null +++ b/test/runtime_deps/repo/from_data_test/BUILD_FILE @@ -0,0 +1,42 @@ +subinclude("//build_defs:test") + +target( + name = "runtime_deps_from_data", + data = [":data_with_runtime_dep"], + debug_data = [":debug_data_with_runtime_dep"], +) + +target( + name = "data_with_runtime_dep", + runtime_deps = [":data_runtime_dep"], +) + +target( + name = "debug_data_with_runtime_dep", + runtime_deps = [":debug_data_runtime_dep"], +) + +runtime_dep("data_runtime_dep") +runtime_dep("debug_data_runtime_dep") + +gentest( + name = "runtime_deps_from_data_test_case", + outs = ["runtime_deps_from_data_test_case"], + cmd = [ + not_exists("data_runtime_dep"), + not_exists("debug_data_runtime_dep"), + "touch $OUTS", + ], + data = [":runtime_deps_from_data"], + no_test_output = True, + test_cmd = { + "opt": " && ".join([ + exists("data_runtime_dep"), + not_exists("debug_data_runtime_dep"), + ]), + "dbg": " && ".join([ + exists("data_runtime_dep"), + exists("debug_data_runtime_dep"), + ]), + }, +) From c455b27443268e08fec2081e2154c42b40a45067 Mon Sep 17 00:00:00 2001 From: Chris Novakovic Date: Tue, 9 Dec 2025 15:32:45 +0000 Subject: [PATCH 24/38] Tag v17.27.0 (#3470) --- ChangeLog | 4 ++++ VERSION | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/ChangeLog b/ChangeLog index 7ec6e0a6f..f1f233b62 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,3 +1,7 @@ +Version 17.27.0 +--------------- + * Traverse `data` and `debug_data` for run-time dependencies (#3469) + Version 17.26.0 --------------- * Optionally traverse `srcs` and `deps` for run-time dependencies (#3466) diff --git a/VERSION b/VERSION index 836f35247..b1799abc7 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -17.26.0 +17.27.0 From 9ff11a566d8521de1c661b58b37360330fe5b4cf Mon Sep 17 00:00:00 2001 From: Simon R Date: Thu, 18 Dec 2025 11:04:38 +0100 Subject: [PATCH 25/38] pleasew: find repo root by traversing up from PWD (#3474) Instead of looking for .plzconfig in the current directory only, traverse upwards until we find it. This replicates the behavior of getRepoRoot in src/core/utils.go. Fixes #3473 Co-authored-by: Simon Reiser --- pleasew | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/pleasew b/pleasew index 4805e88e5..bf21ab79b 100755 --- a/pleasew +++ b/pleasew @@ -74,12 +74,24 @@ get_profile () { # Check `PLZ_CONFIG_PROFILE` or fall back to arguments for a profile. PROFILE="${PLZ_CONFIG_PROFILE:-$(get_profile "${@}")}" +# Find repo root by traversing up until we find .plzconfig +find_repo_root() { + dir="$PWD" + while true; do + [ -f "$dir/.plzconfig" ] && echo "$dir" && return 0 + [ "$dir" = "/" ] && return 0 + dir="$(dirname "$dir")" + done +} + +REPO_ROOT="$(find_repo_root)" + # Config files on order of precedence high to low. CONFIGS="$(cat <<- EOS - .plzconfig.local - ${PROFILE:+.plzconfig.${PROFILE}} - .plzconfig_${OS}_${ARCH} - .plzconfig + ${REPO_ROOT:+${REPO_ROOT}/.plzconfig.local} + ${REPO_ROOT:+${PROFILE:+${REPO_ROOT}/.plzconfig.${PROFILE}}} + ${REPO_ROOT:+${REPO_ROOT}/.plzconfig_${OS}_${ARCH}} + ${REPO_ROOT:+${REPO_ROOT}/.plzconfig} ${HOME}/.config/please/plzconfig /etc/please/plzconfig EOS From b4b1657672a99d224644271be673183ebbf59bd5 Mon Sep 17 00:00:00 2001 From: Jonathan Poole Date: Tue, 23 Dec 2025 17:03:12 +0000 Subject: [PATCH 26/38] Ensure the dir we're exporting to exists (#3345) Co-authored-by: Jonathan Poole Co-authored-by: Samuel Littley Co-authored-by: Chris Novakovic --- src/export/export.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/export/export.go b/src/export/export.go index 905be937a..2863dfef0 100644 --- a/src/export/export.go +++ b/src/export/export.go @@ -37,6 +37,10 @@ func ToDir(state *core.BuildState, dir string, noTrim bool, targets []core.Build exportedTargets: map[core.BuildLabel]bool{}, } + if err := os.MkdirAll(dir, fs.DirPermissions); err != nil { + log.Fatalf("failed to create export directory %s: %v", dir, err) + } + e.exportPlzConf() for _, target := range state.Config.Parse.PreloadSubincludes { for _, includeLabel := range append(state.Graph.TransitiveSubincludes(target), target) { From cb2d9a2e52c5a0675882e51b841f3db8b7ce71e1 Mon Sep 17 00:00:00 2001 From: Samuel Littley Date: Fri, 16 Jan 2026 10:23:50 +0000 Subject: [PATCH 27/38] Switch Please to use m4pro.medium resource class on CircleCI (#3477) M1 is deprecated, and m4pro.medium is the new hotness (with significantly more RAM...) --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 6b9b39eb2..27068008b 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -125,7 +125,7 @@ jobs: working_directory: ~/please macos: xcode: "16.4.0" - resource_class: macos.m1.medium.gen1 + resource_class: m4pro.medium steps: - checkout - attach_workspace: @@ -206,7 +206,7 @@ jobs: build-darwin: macos: xcode: "16.4.0" - resource_class: macos.m1.medium.gen1 + resource_class: m4pro.medium environment: PLZ_ARGS: "--profile ci --exclude pip --exclude embed" steps: From 57a4e178bb3523dcdd9fba8bcc5d76db8ca9654e Mon Sep 17 00:00:00 2001 From: "Andrzej J. Skalski" Date: Wed, 4 Feb 2026 18:26:56 +0100 Subject: [PATCH 28/38] fix: set only one hash per version to "arcat" binary for compatibility with Buildbarn and SRI standard (#3472) Buildbarn assumes that only one SHA per binary is provided, or the SHAs are of different algorithms. To enable Buildbarn as a backend to Please, this change is necessary. With this change, current Buildbarn works as a backend. --------- Co-authored-by: Andrzej J Skalski --- src/parse/internal.tmpl | 7 ++----- src/parse/internal_package.go | 22 ++++++++++++++++++++-- 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/src/parse/internal.tmpl b/src/parse/internal.tmpl index f600f8fb3..90cfe6b67 100644 --- a/src/parse/internal.tmpl +++ b/src/parse/internal.tmpl @@ -4,11 +4,7 @@ remote_file( out = "arcat", binary = True, hashes = [ - "6af2cf108592535701aa9395f3a5deeb48a5dfbe8174a8ebe3d56bb93de2c255", # darwin_amd64 - "5070ef05d14c66a85d438f400c6ff734a23833929775d6824b69207b704034bf", # darwin_arm64 - "05ad6ac45be3a4ca1238bb1bd09207a596f8ff5f885415f8df4ff2dc849fa04e", # freebsd_amd64 - "aec85425355291e515cd10ac0addec3a5bc9e05c9d07af01aca8c34aaf0f1222", # linux_amd64 - "8266cb95cc84b23642bca6567f8b4bd18de399c887cb5845ab6a901d0dba54d2", # linux_arm64 + "{{ .ArcatHash }}", # defined in internal_package.go ], visibility = ["PUBLIC"], ) @@ -29,3 +25,4 @@ genrule( binary = True, ) {{ end }} + diff --git a/src/parse/internal_package.go b/src/parse/internal_package.go index c2d77bd13..b867eca57 100644 --- a/src/parse/internal_package.go +++ b/src/parse/internal_package.go @@ -27,15 +27,33 @@ func GetInternalPackage(config *core.Configuration) (string, error) { url = fmt.Sprintf("%s/%s_%s/%s/please_tools_%s.tar.xz", config.Please.DownloadLocation, runtime.GOOS, runtime.GOARCH, version.PleaseVersion, version.PleaseVersion) } + var arcatHash string + switch fmt.Sprintf("%s_%s", runtime.GOOS, runtime.GOARCH) { + case "darwin_amd64": + arcatHash = "6af2cf108592535701aa9395f3a5deeb48a5dfbe8174a8ebe3d56bb93de2c255" + case "darwin_arm64": + arcatHash = "5070ef05d14c66a85d438f400c6ff734a23833929775d6824b69207b704034bf" + case "freebsd_amd64": + arcatHash = "05ad6ac45be3a4ca1238bb1bd09207a596f8ff5f885415f8df4ff2dc849fa04e" + case "linux_amd64": + arcatHash = "aec85425355291e515cd10ac0addec3a5bc9e05c9d07af01aca8c34aaf0f1222" + case "linux_arm64": + arcatHash = "8266cb95cc84b23642bca6567f8b4bd18de399c887cb5845ab6a901d0dba54d2" + default: + return "", fmt.Errorf("arcat tool not supported for platform: %s_%s", runtime.GOOS, runtime.GOARCH) + } + data := struct { - ToolsURL string - Tools []string + ToolsURL string + Tools []string + ArcatHash string }{ ToolsURL: url, Tools: []string{ "build_langserver", "please_sandbox", }, + ArcatHash: arcatHash, } var buf bytes.Buffer From 9b30492f1b7ecc50fc59ad3c7dbac223a049730b Mon Sep 17 00:00:00 2001 From: cemeceme <26171877+cemeceme@users.noreply.github.com> Date: Thu, 12 Feb 2026 12:13:30 +0100 Subject: [PATCH 29/38] Add regular expression support for strings (#3486) This adds the ability to do regular expression checks on strings using the go `regexp` package. --- docs/lexicon.html | 9 +++++++ rules/builtins.build_defs | 2 ++ src/parse/asp/builtins.go | 16 ++++++++++++ src/parse/asp/interpreter.go | 12 ++++++--- test/builtins/BUILD | 7 +++++ test/builtins/strings/.plzconfig | 2 ++ test/builtins/strings/BUILD_FILE | 45 ++++++++++++++++++++++++++++++++ 7 files changed, 89 insertions(+), 4 deletions(-) create mode 100644 test/builtins/BUILD create mode 100644 test/builtins/strings/.plzconfig create mode 100644 test/builtins/strings/BUILD_FILE diff --git a/docs/lexicon.html b/docs/lexicon.html index 49ee4c266..7a2ecf710 100644 --- a/docs/lexicon.html +++ b/docs/lexicon.html @@ -509,6 +509,15 @@

- returns a copy of this string converted to lowercase. +
  • + + matches(pattern) + - returns true if the string matches the regular expression given by pattern. + +
  • diff --git a/rules/builtins.build_defs b/rules/builtins.build_defs index f405bff5b..1f62089b0 100644 --- a/rules/builtins.build_defs +++ b/rules/builtins.build_defs @@ -109,6 +109,8 @@ def upper(self:str) -> str: pass def lower(self:str) -> str: pass +def matches(self:str, pattern: str) -> bool: + pass def fail(msg:str): pass diff --git a/src/parse/asp/builtins.go b/src/parse/asp/builtins.go index d17cd7b2b..353478b07 100644 --- a/src/parse/asp/builtins.go +++ b/src/parse/asp/builtins.go @@ -7,6 +7,7 @@ import ( "io" "path/filepath" "reflect" + "regexp" "slices" "sort" "strconv" @@ -98,6 +99,7 @@ func registerBuiltins(s *scope) { "count": setNativeCode(s, "count", strCount), "upper": setNativeCode(s, "upper", strUpper), "lower": setNativeCode(s, "lower", strLower), + "matches": setNativeCode(s, "matches", strMatches), } s.interpreter.stringMethods["format"].kwargs = true s.interpreter.dictMethods = map[string]*pyFunc{ @@ -645,6 +647,20 @@ func strLower(s *scope, args []pyObject) pyObject { return pyString(strings.ToLower(self)) } +func strMatches(s *scope, args []pyObject) pyObject { + self := string(args[0].(pyString)) + pattern := string(args[1].(pyString)) + compiledRegex := s.interpreter.regexCache.Get(pattern) + if compiledRegex == nil { + compiled, err := regexp.Compile(pattern) + s.Assert(err == nil, "%s", err) + // We don't need to check if another task inserted the regex first, as it will be an identical result. + s.interpreter.regexCache.Add(pattern, compiled) + compiledRegex = compiled + } + return newPyBool(compiledRegex.MatchString(self)) +} + func boolType(s *scope, args []pyObject) pyObject { return newPyBool(args[0].IsTruthy()) } diff --git a/src/parse/asp/interpreter.go b/src/parse/asp/interpreter.go index 4d8199439..93e4ca0a3 100644 --- a/src/parse/asp/interpreter.go +++ b/src/parse/asp/interpreter.go @@ -6,6 +6,7 @@ import ( "iter" "path/filepath" "reflect" + "regexp" "runtime/debug" "runtime/pprof" "strings" @@ -31,6 +32,8 @@ type interpreter struct { limiter semaphore stringMethods, dictMethods, configMethods map[string]*pyFunc + + regexCache *cmap.Map[string, *regexp.Regexp] } // newInterpreter creates and returns a new interpreter instance. @@ -42,10 +45,11 @@ func newInterpreter(state *core.BuildState, p *Parser) *interpreter { locals: map[string]pyObject{}, } i := &interpreter{ - scope: s, - parser: p, - configs: map[*core.BuildState]*pyConfig{}, - limiter: make(semaphore, state.Config.Parse.NumThreads), + scope: s, + parser: p, + configs: map[*core.BuildState]*pyConfig{}, + limiter: make(semaphore, state.Config.Parse.NumThreads), + regexCache: cmap.New[string, *regexp.Regexp](cmap.SmallShardCount, cmap.XXHash), } // If we're creating an interpreter for a subrepo, we should share the subinclude cache. if p.interpreter != nil { diff --git a/test/builtins/BUILD b/test/builtins/BUILD new file mode 100644 index 000000000..ccb21ec42 --- /dev/null +++ b/test/builtins/BUILD @@ -0,0 +1,7 @@ +subinclude("//test/build_defs") + +please_repo_e2e_test( + name = "strings_test", + plz_command = "plz build", + repo = "strings", +) diff --git a/test/builtins/strings/.plzconfig b/test/builtins/strings/.plzconfig new file mode 100644 index 000000000..ea85e4f73 --- /dev/null +++ b/test/builtins/strings/.plzconfig @@ -0,0 +1,2 @@ +[parse] +BuildFileName = BUILD_FILE diff --git a/test/builtins/strings/BUILD_FILE b/test/builtins/strings/BUILD_FILE new file mode 100644 index 000000000..8d7275107 --- /dev/null +++ b/test/builtins/strings/BUILD_FILE @@ -0,0 +1,45 @@ + +assert ",".join(["a","b","c"]) == "a,b,c" + +assert "a,b,c".split(",") == ["a", "b", "c"] + +assert "abc".replace("bc", "ab") == "aab" + +pre, sep, post = "a,b,c".partition(",") +assert pre == "a" and sep == "," and post == "b,c" + +pre, sep, post = "a,b,c".rpartition(",") +assert pre == "a,b" and sep == "," and post == "c" + +assert "abc".startswith("ab") == True + +assert "abc".endswith("bc") == True + +assert "a{var1},b{var2},c{var3}".format(var1="a", var2=2, var3=[3]) == "aa,b2,c[3]" + +assert "abcba".lstrip("a") == "bcba" + +assert "abcba".rstrip("a") == "abcb" + +assert "abcba".strip("a") == "bcb" + +assert "abc".removeprefix("ab") == "c" + +assert "abc".removesuffix("bc") == "a" + +assert "abcba".find("b") == 1 + +assert "abcba".rfind("b") == 3 + +assert "abcba".count("b") == 2 + +assert "abc".upper() == "ABC" + +assert "ABC".lower() == "abc" + + +assert "abc".matches("a.c") + +assert "abbbbbbc".matches("a.*c") + +assert not "abc".matches("$b") From 82abde49144169e0bb35682c4c970fad31ba5632 Mon Sep 17 00:00:00 2001 From: "Xiao, Yujiao" Date: Wed, 18 Feb 2026 10:11:55 +0000 Subject: [PATCH 30/38] Remove redundant text from Python codelab conclusion --- docs/codelabs/python_intro.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/docs/codelabs/python_intro.md b/docs/codelabs/python_intro.md index b438e33c2..bb1d1882b 100644 --- a/docs/codelabs/python_intro.md +++ b/docs/codelabs/python_intro.md @@ -349,7 +349,3 @@ determine files changes since master, watch rules and build them automatically a `plz help`, and explore this rich set of commands! Otherwise, why not try one of the other codelabs! -, watch rules and build them automatically as things change and much more! Use -`plz help`, and explore this rich set of commands! - -Otherwise, why not try one of the other codelabs! From da14ac43e697098215b95da05cc4480df2010477 Mon Sep 17 00:00:00 2001 From: scyyx5 Date: Thu, 23 Oct 2025 08:03:26 +0100 Subject: [PATCH 31/38] update codelab --- docs/codelabs/genrule.md | 800 +++++------ docs/codelabs/github_actions.md | 353 +++-- docs/codelabs/go_intro.md | 989 +++++++------- docs/codelabs/k8s.md | 910 ++++++------- docs/codelabs/plz_query.md | 613 ++++----- docs/codelabs/python_intro.md | 707 +++++----- docs/codelabs/using_plugins.md | 311 ++--- docs/commands.html | 2260 +++++++++++++++---------------- 8 files changed, 3498 insertions(+), 3445 deletions(-) diff --git a/docs/codelabs/genrule.md b/docs/codelabs/genrule.md index 2f8b6e5de..f73642749 100644 --- a/docs/codelabs/genrule.md +++ b/docs/codelabs/genrule.md @@ -1,394 +1,406 @@ -summary: Writing custom build definitions -description: Start here to learn how to write custom build rules to automate nearly anything in your build -id: genrule -categories: intermediate -tags: medium -status: Published -authors: Jon Poole -Feedback Link: https://github.com/thought-machine/please - -# Custom build rules with `genrule()` -## Overview -Duration: 1 - -### Prerequisites -- You must have Please installed: [Install Please](https://please.build/quickstart.html) -- You should be comfortable using the existing build rules. - -### What you'll learn -We'll be working through a contrived example writing a build definition for -[wc](https://www.gnu.org/software/coreutils/manual/html_node/wc-invocation.html#wc-invocation) from core utils. -In doing so you'll: -- Be introduced to genrule(), the generic build rule -- Explore the build environment with `--shell` -- Write and use custom build rule definitions -- Manage and write custom tools for your build definition -- Add configuration for your build definitions - -### What if I get stuck? - -The final result of running through this codelab can be found -[here](https://github.com/thought-machine/please-codelabs/tree/main/custom_rules) for reference. If you really get stuck -you can find us on [gitter](https://gitter.im/please-build/Lobby)! - -## genrule() -Duration: 3 - -Before we jump into writing custom build definitions, let me introduce you to `genrule()`, the generic build rule. Let's -just create a new project and initialise Please in it: -``` -$ mkdir custom_rules && cd custom_rules -$ plz init --no_prompt -``` - -Then create a `BUILD` file in the root of the repository like so: -### `BUILD` -```python -genrule( - name = "word_count", - srcs = ["file.txt"], - deps = [], - cmd = "wc $SRC > $OUT", - outs = ["file.wc"], -) -``` - -Then create file.txt: -``` -$ echo "the quick brown fox jumped over the lazy dog" > file.txt -``` - -and build it: - -``` -$ plz build //:word_count -Build finished; total time 70ms, incrementality 0.0%. Outputs: -//:word_count: - plz-out/gen/file.wc - -$ cat plz-out/gen/file.wc - 1 9 45 file.txt -``` - -### So what's going on? -Here we've used one of the built-in rules, `genrule()`, to run a custom command. `genrule()` can take a number of -parameters, most notably: the name of the rule, the inputs (sources and dependencies), its outputs, and the command -we want to run. The full list of available arguments can be found on the [`genrule()`](/lexicon.html#genrule) -documentation. - -Here we've used it to count the number of words in `file.txt`. Please has helpfully set up some environment variables -that help us find our inputs, as well as where to put our outputs: - -- `$SRC` - Set when there's only one item in the `srcs` list. Contains the path to that source file. -- `$SRCS` - Contains a space-separated list of the sources of the rule. -- `$OUT` - Set when there's only one item in the `outs` list. Contains the expected path of that output. -- `$OUTS` - Contains a space-separated list of the expected paths of the outputs of the rule. - -For a complete list of available variables, see the [build env](/build_rules.html#build-env) docs. - -The command `wc $SRC > $OUT` is therefore translated into `wc file.txt > file.wc` and we can see that the output of the -rule has been saved to `plz-out/gen/file.wc`. - -## The build directory -Duration: 7 - -One of the key features of Please is that builds are hermetic, that is, commands are executed in an isolated and -controlled environment. Rules can't access files or env vars that are not explicitly made available to them. As a -result, incremental builds very rarely break when using Please. - -Considering this, debugging builds would be quite hard if we couldn't play around in this build environment. Luckily, -Please makes this trivial with the `--shell` flag: - -``` -$ plz build --shell :word_count -Temp directories prepared, total time 50ms: - //:word_count: plz-out/tmp/word_count._build - Command: wc $SRC > $OUT - -bash-4.4$ pwd -/plz-out/tmp/word_count._build - -bash-4.4$ wc $SRC > $OUT - -bash-4.4$ cat $OUT - 1 9 45 file.txt -``` - -As we can see, Please has prepared a temporary directory for us under `plz-out/tmp`, and put us in a true-to-life bash -environment. You may run `printenv`, to see the environment variables that Please has made available to us: - -``` -bash-4.4$ printenv -OS=linux -ARCH=amd64 -LANG=en_GB.UTF-8 -TMP_DIR=/plz-out/tmp/word_count._build -CMD=wc $SRC > $OUT -OUT=/plz-out/tmp/word_count._build/file.wc -TOOLS= -SRCS=file.txt -PKG= -CONFIG=opt -PYTHONHASHSEED=42 -SRC=file.txt -OUTS=file.wc -PWD=/plz-out/tmp/word_count._build -HOME=/plz-out/tmp/word_count._build -NAME=word_count -TMPDIR=/plz-out/tmp/word_count._build -BUILD_CONFIG=opt -XOS=linux -XARCH=x86_64 -SHLVL=1 -PATH=/.please:/usr/local/bin:/usr/bin:/bin -GOOS=linux -PKG_DIR=. -GOARCH=amd64 -_=/usr/bin/printenv -``` - -As you can see, the rule doesn't have access to any of the variables from the host machine. Even `$PATH` has been set -based on configuration in `.plzconfig`: - -The `--shell` flag works for all targets (except filegroups), which of course means any of the built-in rules! Note, -`--shell` also works on `plz test`. You can `plz build --shell //my:test` to see how the test is built, and then -`plz test --shell //my:test` to see how it will be run. - -## Build definitions -Duration: 5 - -We've managed to write a custom rule to count the number of words in `file.txt`, however, we have no way of reusing this, -so let's create a `wordcount()` build definition! - -A build definition is just a function that creates one or more build targets which define how to build something. These -are typically defined inside `.build_def` files within your repository. Let's just create a folder for our definition: - -### `build_defs/word_count.build_defs` -```python -def word_count(name:str, file:str) -> str: - return genrule( - name = name, - srcs = [file], - outs = [f"{name}.wc"], - cmd = "wc $SRC > $OUT", - ) -``` - -We then need some way to access these build definitions from other packages. To do this, we typically use a filegroup: - -### `build_defs/BUILD` -```python -filegroup( - name = "word_count", - srcs = ["word_count.build_defs"], - visibility = ["PUBLIC"], -) -``` - -We can then use this in place of our `genrule()`: - -### `BUILD` -```python -subinclude("//build_defs:word_count") - -word_count( - name = "word_count", - file = "file.txt", -) -``` - -And check it still works: - -``` -$ plz build //:word_count -Build finished; total time 30ms, incrementality 100.0%. Outputs: -//:word_count: - plz-out/gen/word_count.wc -``` - -### `subinclude()` -Subinclude is primarily used for including build definitions into your `BUILD` file. It can be thought of like a -Python import except it operates on a build target instead. Under the hood, subinclude parses the output of the target -and makes the top-level declarations available in the current package's scope. - -The build target is usually a filegroup, however, this doesn't have to be the case. In fact, the build target can be -anything that produces parsable outputs. - -It's almost always a bad idea to build anything as part of a subinclude. These rules will be built at parse time, -which can be hard to debug, but more importantly, will block the parser while it waits for that rule to build. Use -non-filegroup subincludes under very careful consideration! - -## Managing tools -Duration: 7 - -Right now we're relying on `wc` to be available on the configured path. This is a pretty safe bet, however, Please -provides a powerful mechanism for managing tools, so let's over-engineer this: - -### `build_defs/word_count.build_defs` -```python -def word_count(name:str, file:str, wc_tool:str="wc") -> str: - return genrule( - name = name, - srcs = [file], - outs = [f"{name}.wc"], - cmd = "$TOOLS_WC $SRC > $OUT", - tools = { - "WC": [wc_tool], - } - ) -``` - -Here we've configured our build definition to take the word count tool in as a parameter. This is then passed to -`genrule()` via the `tools` parameter. Please has set up the `$TOOLS_WC` environment variable which we can used to -locate our tool. The name of this variable is based on the key in this dictionary. - -In this contrived example, this may not seem very useful, however, Please will perform some important tasks for us: - -- If the tool is a program, Please will check it's available on the path at parse time. -- If the tool is a build rule, Please will build this rule and configure `$TOOLS_WC` so it can be invoked. Whether the -tool is on the path or a build rule is transparent to you, the rule's author! - -### Custom word count tool -Currently, our word count rule doesn't just get the word count: it also gets the character and line count as well. I -mentioned that these can be build rules so let's create a true word count tool that counts just words: - -### `tools/wc.sh` -```shell script -#!/bin/bash - -wc -w $@ -``` - -### `tools/BUILD` -```python -sh_binary( - name = "wc", - main = "wc.sh", - visibility = ["PUBLIC"], -) -``` - -and let's test that out: - -``` -$ plz run //tools:wc -- file.txt -9 file.txt -``` - -Brilliant! We can now use this in our build rule like so: - -### `BUILD` -```python -subinclude("//build_defs:word_count") - -word_count( - name = "lines_words_and_chars", - file = "file.txt", -) - -word_count( - name = "just_words", - file = "file.txt", - wc_tool = "//tools:wc", -) -``` - -and check it all works: - -``` -$ plz build //:lines_words_and_chars //:just_words -Build finished; total time 30ms, incrementality 100.0%. Outputs: -//:lines_words_and_chars: - plz-out/gen/lines_words_and_chars.wc -//:just_words: - plz-out/gen/just_words.wc - -$ cat plz-out/gen/lines_words_and_chars.wc -1 9 45 file.txt - -$ cat plz-out/gen/just_words.wc -9 file.txt -``` - -## Configuration -Duration: 6 - -Right now, we have to specify the new word count tool each time we use our build definition! Let's have a look at how we -can configure this in our `.plzconfig` instead: - -### `.plzconfig` -``` -[Buildconfig] -word-count-tool = //tools:wc -``` - -The `[buildconfig]` section can be used to add configuration specific to your project. By adding the `word-count-tool` -config option here, we can use this in our build definition: - -### `build_defs/word_count.build_defs` -```python -def word_count(name:str, file:str, wc_tool:str=CONFIG.WORD_COUNT_TOOL) -> str: - return genrule( - name = name, - srcs = [file], - outs = [f"{name}.wc"], - cmd = "$TOOLS_WC $SRC > $OUT", - tools = { - "WC": [wc_tool], - } - ) - -CONFIG.setdefault('WORD_COUNT_TOOL', 'wc') -``` - -Here we've set the default value for `wc_tool` to `CONFIG.WORD_COUNT_TOOL`, which will contain our config value from -`.plzconfig`. What if that's not set though? That's why we also set a sensible default configuration value with -`CONFIG.setdefault('WORD_COUNT_TOOL', 'wc')`! - - -We then need to update our build rules: - -### `BUILD` -```python -subinclude("//build_defs:word_count") - -word_count( - name = "lines_words_and_chars", - file = "file.txt", - wc_tool = "wc", -) - -word_count( - name = "just_words", - file = "file.txt", -) -``` - -and check it all works: - -``` -$ plz build //:lines_words_and_chars //:just_words -Build finished; total time 30ms, incrementality 100.0%. Outputs: -//:lines_words_and_chars: - plz-out/gen/lines_words_and_chars.wc -//:just_words: - plz-out/gen/just_words.wc - -$ cat plz-out/gen/lines_words_and_chars.wc -1 9 45 file.txt - -$ cat plz-out/gen/just_words.wc -9 file.txt -``` - -## Conclusion -Duration: 2 - -Congratulations! You've written your first build definition! While contrived, this example demonstrates most of the -mechanisms used to create a rich set of build definitions for a new language or technology. To get a better understanding -of build rules, I recommend reading through the advanced topics on [please.build](/build_rules.html). - -If you create something you believe will be useful to the wider world, we might be able to find a home for it in the -[pleasings](https://github.com/thought-machine/pleasings) repo! - -If you get stuck, jump on [gitter](https://gitter.im/please-build/Lobby) and we'll do our best to help you! +summary: Writing custom build definitions +description: Start here to learn how to write custom build rules to automate nearly anything in your build +id: genrule +categories: intermediate +tags: medium +status: Published +authors: Jon Poole +Feedback Link: https://github.com/thought-machine/please + +# Custom build rules with `genrule()` +## Overview +Duration: 1 + +### Prerequisites +- You must have Please installed: [Install Please](https://please.build/quickstart.html) +- You should be comfortable using the existing build rules. + +### What you'll learn +We'll be working through a contrived example writing a build definition for +[wc](https://www.gnu.org/software/coreutils/manual/html_node/wc-invocation.html#wc-invocation) from core utils. +In doing so you'll: +- Be introduced to genrule(), the generic build rule +- Explore the build environment with `--shell` +- Write and use custom build rule definitions +- Manage and write custom tools for your build definition +- Add configuration for your build definitions + +### What if I get stuck? + +The final result of running through this codelab can be found +[here](https://github.com/thought-machine/please-codelabs/tree/main/custom_rules) for reference. If you really get stuck +you can find us on [gitter](https://gitter.im/please-build/Lobby)! + +## genrule() +Duration: 3 + +Before we jump into writing custom build definitions, let me introduce you to `genrule()`, the generic build rule. Let's +just create a new project and initialise Please in it: +```bash +mkdir custom_rules && cd custom_rules +plz init --no_prompt +``` + +Then create a `BUILD` file in the root of the repository like so: +### `BUILD` +```python +genrule( + name = "word_count", + srcs = ["file.txt"], + deps = [], + cmd = "wc $SRC > $OUT", + outs = ["file.wc"], +) +``` + +Then create file.txt: +```bash +echo "the quick brown fox jumped over the lazy dog" > file.txt +``` + +and build it: + +```bash +$ plz build //:word_count +Build finished; total time 70ms, incrementality 0.0%. Outputs: +//:word_count: + plz-out/gen/file.wc + +$ cat plz-out/gen/file.wc + 1 9 45 file.txt +``` + +### Troubleshooting: "can't store data at section "scm"" + +This message means the runner is using an older Please release that doesn’t understand the `[scm]` section in your `.plzconfig`, so parsing fails before any build work begins. + +**How to fix** +- Upgrade the Please version invoked in CI (pin the same version locally via `pleasew`, `setup-please`, or `PLZ_VERSION`). +- If upgrading immediately is impractical, temporarily remove or comment the `[scm]` block until the runner is updated. + +### So what's going on? +Here we've used one of the built-in rules, `genrule()`, to run a custom command. `genrule()` can take a number of +parameters, most notably: the name of the rule, the inputs (sources and dependencies), its outputs, and the command +we want to run. The full list of available arguments can be found on the [`genrule()`](/lexicon.html#genrule) +documentation. + +Here we've used it to count the number of words in `file.txt`. Please has helpfully set up some environment variables +that help us find our inputs, as well as where to put our outputs: + +- `$SRC` - Set when there's only one item in the `srcs` list. Contains the path to that source file. +- `$SRCS` - Contains a space-separated list of the sources of the rule. +- `$OUT` - Set when there's only one item in the `outs` list. Contains the expected path of that output. +- `$OUTS` - Contains a space-separated list of the expected paths of the outputs of the rule. + +For a complete list of available variables, see the [build env](/build_rules.html#build-env) docs. + +The command `wc $SRC > $OUT` is therefore translated into `wc file.txt > file.wc` and we can see that the output of the +rule has been saved to `plz-out/gen/file.wc`. + +## The build directory +Duration: 7 + +One of the key features of Please is that builds are hermetic, that is, commands are executed in an isolated and +controlled environment. Rules can't access files or env vars that are not explicitly made available to them. As a +result, incremental builds very rarely break when using Please. + +Considering this, debugging builds would be quite hard if we couldn't play around in this build environment. Luckily, +Please makes this trivial with the `--shell` flag: + +``` +$ plz build --shell :word_count +Temp directories prepared, total time 50ms: + //:word_count: plz-out/tmp/word_count._build + Command: wc $SRC > $OUT + +bash-4.4$ pwd +/plz-out/tmp/word_count._build + +bash-4.4$ wc $SRC > $OUT + +bash-4.4$ cat $OUT + 1 9 45 file.txt +``` + +As we can see, Please has prepared a temporary directory for us under `plz-out/tmp`, and put us in a true-to-life bash +environment. You may run `printenv`, to see the environment variables that Please has made available to us: + +``` +bash-4.4$ printenv +OS=linux +ARCH=amd64 +LANG=en_GB.UTF-8 +TMP_DIR=/plz-out/tmp/word_count._build +CMD=wc $SRC > $OUT +OUT=/plz-out/tmp/word_count._build/file.wc +TOOLS= +SRCS=file.txt +PKG= +CONFIG=opt +PYTHONHASHSEED=42 +SRC=file.txt +OUTS=file.wc +PWD=/plz-out/tmp/word_count._build +HOME=/plz-out/tmp/word_count._build +NAME=word_count +TMPDIR=/plz-out/tmp/word_count._build +BUILD_CONFIG=opt +XOS=linux +XARCH=x86_64 +SHLVL=1 +PATH=/.please:/usr/local/bin:/usr/bin:/bin +GOOS=linux +PKG_DIR=. +GOARCH=amd64 +_=/usr/bin/printenv +``` + +As you can see, the rule doesn't have access to any of the variables from the host machine. Even `$PATH` has been set +based on configuration in `.plzconfig`: + +The `--shell` flag works for all targets (except filegroups), which of course means any of the built-in rules! Note, +`--shell` also works on `plz test`. You can `plz build --shell //my:test` to see how the test is built, and then +`plz test --shell //my:test` to see how it will be run. + +## Build definitions +Duration: 5 + +We've managed to write a custom rule to count the number of words in `file.txt`, however, we have no way of reusing this, +so let's create a `wordcount()` build definition! + +A build definition is just a function that creates one or more build targets which define how to build something. These +are typically defined inside `.build_def` files within your repository. Let's just create a folder for our definition: + +### `build_defs/word_count.build_defs` +```python +def word_count(name:str, file:str) -> str: + return genrule( + name = name, + srcs = [file], + outs = [f"{name}.wc"], + cmd = "wc $SRC > $OUT", + ) +``` + +We then need some way to access these build definitions from other packages. To do this, we typically use a filegroup: + +### `build_defs/BUILD` +```python +filegroup( + name = "word_count", + srcs = ["word_count.build_defs"], + visibility = ["PUBLIC"], +) +``` + +We can then use this in place of our `genrule()`: + +### `BUILD` +```python +subinclude("//build_defs:word_count") + +word_count( + name = "word_count", + file = "file.txt", +) +``` + +And check it still works: + +```bash +plz build //:word_count +``` +The output: + +``` +Build finished; total time 30ms, incrementality 100.0%. Outputs: +//:word_count: + plz-out/gen/word_count.wc +``` + +### `subinclude()` +Subinclude is primarily used for including build definitions into your `BUILD` file. It can be thought of like a +Python import except it operates on a build target instead. Under the hood, subinclude parses the output of the target +and makes the top-level declarations available in the current package's scope. + +The build target is usually a filegroup, however, this doesn't have to be the case. In fact, the build target can be +anything that produces parsable outputs. + +It's almost always a bad idea to build anything as part of a subinclude. These rules will be built at parse time, +which can be hard to debug, but more importantly, will block the parser while it waits for that rule to build. Use +non-filegroup subincludes under very careful consideration! + +## Managing tools +Duration: 7 + +Right now we're relying on `wc` to be available on the configured path. This is a pretty safe bet, however, Please +provides a powerful mechanism for managing tools, so let's over-engineer this: + +### `build_defs/word_count.build_defs` +```python +def word_count(name:str, file:str, wc_tool:str="wc") -> str: + return genrule( + name = name, + srcs = [file], + outs = [f"{name}.wc"], + cmd = "$TOOLS_WC $SRC > $OUT", + tools = { + "WC": [wc_tool], + } + ) +``` + +Here we've configured our build definition to take the word count tool in as a parameter. This is then passed to +`genrule()` via the `tools` parameter. Please has set up the `$TOOLS_WC` environment variable which we can used to +locate our tool. The name of this variable is based on the key in this dictionary. + +In this contrived example, this may not seem very useful, however, Please will perform some important tasks for us: + +- If the tool is a program, Please will check it's available on the path at parse time. +- If the tool is a build rule, Please will build this rule and configure `$TOOLS_WC` so it can be invoked. Whether the +tool is on the path or a build rule is transparent to you, the rule's author! + +### Custom word count tool +Currently, our word count rule doesn't just get the word count: it also gets the character and line count as well. I +mentioned that these can be build rules so let's create a true word count tool that counts just words: + +### `tools/wc.sh` +```shell script +#!/bin/bash + +wc -w $@ +``` + +### `tools/BUILD` +```python +sh_binary( + name = "wc", + main = "wc.sh", + visibility = ["PUBLIC"], +) +``` + +and let's test that out: + +``` +$ plz run //tools:wc -- file.txt +9 file.txt +``` + +Brilliant! We can now use this in our build rule like so: + +### `BUILD` +```python +subinclude("//build_defs:word_count") + +word_count( + name = "lines_words_and_chars", + file = "file.txt", +) + +word_count( + name = "just_words", + file = "file.txt", + wc_tool = "//tools:wc", +) +``` + +and check it all works: + +``` +$ plz build //:lines_words_and_chars //:just_words +Build finished; total time 30ms, incrementality 100.0%. Outputs: +//:lines_words_and_chars: + plz-out/gen/lines_words_and_chars.wc +//:just_words: + plz-out/gen/just_words.wc + +$ cat plz-out/gen/lines_words_and_chars.wc +1 9 45 file.txt + +$ cat plz-out/gen/just_words.wc +9 file.txt +``` + +## Configuration +Duration: 6 + +Right now, we have to specify the new word count tool each time we use our build definition! Let's have a look at how we +can configure this in our `.plzconfig` instead: + +### `.plzconfig` +``` +[Buildconfig] +word-count-tool = //tools:wc +``` + +The `[buildconfig]` section can be used to add configuration specific to your project. By adding the `word-count-tool` +config option here, we can use this in our build definition: + +### `build_defs/word_count.build_defs` +```python +def word_count(name:str, file:str, wc_tool:str=CONFIG.WORD_COUNT_TOOL) -> str: + return genrule( + name = name, + srcs = [file], + outs = [f"{name}.wc"], + cmd = "$TOOLS_WC $SRC > $OUT", + tools = { + "WC": [wc_tool], + } + ) + +CONFIG.setdefault('WORD_COUNT_TOOL', 'wc') +``` + +Here we've set the default value for `wc_tool` to `CONFIG.WORD_COUNT_TOOL`, which will contain our config value from +`.plzconfig`. What if that's not set though? That's why we also set a sensible default configuration value with +`CONFIG.setdefault('WORD_COUNT_TOOL', 'wc')`! + + +We then need to update our build rules: + +### `BUILD` +```python +subinclude("//build_defs:word_count") + +word_count( + name = "lines_words_and_chars", + file = "file.txt", + wc_tool = "wc", +) + +word_count( + name = "just_words", + file = "file.txt", +) +``` + +and check it all works: + +``` +$ plz build //:lines_words_and_chars //:just_words +Build finished; total time 30ms, incrementality 100.0%. Outputs: +//:lines_words_and_chars: + plz-out/gen/lines_words_and_chars.wc +//:just_words: + plz-out/gen/just_words.wc + +$ cat plz-out/gen/lines_words_and_chars.wc +1 9 45 file.txt + +$ cat plz-out/gen/just_words.wc +9 file.txt +``` + +## Conclusion +Duration: 2 + +Congratulations! You've written your first build definition! While contrived, this example demonstrates most of the +mechanisms used to create a rich set of build definitions for a new language or technology. To get a better understanding +of build rules, I recommend reading through the advanced topics on [please.build](/build_rules.html). + +If you create something you believe will be useful to the wider world, we might be able to find a home for it in the +[pleasings](https://github.com/thought-machine/pleasings) repo! + +If you get stuck, jump on [gitter](https://gitter.im/please-build/Lobby) and we'll do our best to help you! diff --git a/docs/codelabs/github_actions.md b/docs/codelabs/github_actions.md index 4e184b545..e959a6782 100644 --- a/docs/codelabs/github_actions.md +++ b/docs/codelabs/github_actions.md @@ -1,177 +1,176 @@ -summary: Running Please on GitHub Actions -description: GitHub Actions is an extensible CI/CD platform provided by GitHub -id: github_actions -categories: intermediate -tags: medium -status: Published -authors: Márk Sági-Kazár -Feedback Link: https://github.com/thought-machine/please - -# Running Please on GitHub Actions -## Overview -Duration: 2 - -### Prerequisites -- A repository on [GitHub](https://github.com) -- A project with Please initialized in that repository - -### What you'll learn -- How to setup [GitHub Actions](https://github.com/features/actions) -- How to use Please in a GitHub Actions build -- How to use the [setup-please](https://github.com/sagikazarmark/setup-please-action) action for better integration - -### What if I get stuck? -If you get stuck with GitHub Actions, check out the [official documentation](https://docs.github.com/en/free-pro-team@latest/actions). - -You can find usage examples of the [setup-please](https://github.com/sagikazarmark/setup-please-action) action in [this](https://github.com/sagikazarmark/todobackend-go-kit/blob/20292fc09e25196e751e087da7c5e659cd6c452f/.github/workflows/ci.yaml) repository. - -If you really get -stuck you can find us on [gitter](https://gitter.im/please-build/Lobby)! - -## GitHub Actions -Duration: 5 - -GitHub Actions is an extensible CI/CD platform provided by GitHub. -Compared to other CI/CD solutions, GitHub Actions allows you to build all kinds automations (called workflows) triggered by various events (eg. pushing code to a branch). - -### Setting up GitHub Actions - -Workflow definitions are simple YAML files stored in the `.github/workflows` directory of your repository. - -The following snippet triggers a workflow named `CI` whenever commits are pushed to the `master` branch: - -```yaml -name: CI - -on: - push: - branches: - - master - pull_request: - -jobs: - test: - name: Test - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Test - run: echo "Tests passed" -``` - -Go ahead and add the above snippet to `.github/workflows/ci.yaml` in your project. Then go to `https://github.com/YOU/YOUR-PROJECT/actions` and observe the workflow. - -## Please build -Duration: 4 - -Now we have a project setup with GitHub Actions, it's time to start building with Please! Let's change `ci.yaml` a little: - -```yaml -name: CI - -on: - push: - branches: - - master - pull_request: - -jobs: - test: - name: Test - runs-on: ubuntu-latest - - steps: - # Setup your language of choice here: - # https://github.com/actions/?q=setup-&type=&language= - - - name: Checkout code - uses: actions/checkout@v2 - - # Run please build - - name: Test - run: ./pleasew build //... -``` - -Compared to the example earlier, this workflow uses the `pleasew` script to download Please and build the project. - -Notice the `//...` bit at the end of the command: it's necessary on GitHub Actions. -Check [this](https://github.com/thought-machine/please/issues/1174) issue for more details. - -## setup-please action -Duration: 10 - -The [setup-please](https://github.com/sagikazarmark/setup-please-action) action provides better integration for Please. - -### What is an _action_? - -As you've seen in the previous examples, workflows consist of _steps_. -A workflow step can be as simple as a shell script: - -```yaml -- name: Test - run: ./pleasew build //... -``` - -Shell scripts (no matter how awesome they are) are not always the right tool for the job. Complex build steps might require a more expressive language which takes us to the second type of workflow steps, called _actions_: - -```yaml -- name: Checkout code - uses: actions/checkout@v2 -``` - -An _action_ can be written in any language (distributed as Docker images), but JavaScript is supported natively. - -### Why not just use ./pleasew? - -The above section about _actions_ begs the question: why not just use `pleasew`? Why do we need an action for running Please. - -Please itself can perfectly run on GitHub Actions on its own, so you don't need an _action_ per se. That being said, there are a couple issues when using `pleasew`: - -- The wrapper script does not understand Please configuration which can lead to multiple downloads of different versions to different locations which takes time and time is expensive in CI. -- When using self-hosted runners, GitHub Actions offers a cache specifically for tools (like Please) that can further speed up workflows, but it requires a custom action. - -The [setup-please](https://github.com/sagikazarmark/setup-please-action) action provides better integration for Please solving the above issues (and a lot more). - -### Using the setup-please action - -Adding the [setup-please](https://github.com/sagikazarmark/setup-please-action) action to your workflow is simply adding two lines: - -```yaml -name: CI - -on: - push: - branches: - - master - pull_request: - -jobs: - test: - name: Test - runs-on: ubuntu-latest - - steps: - # Setup your language of choice here: - # https://github.com/actions/?q=setup-&type=&language= - - - name: Checkout code - uses: actions/checkout@v2 - - # Make sure it's added after the checkout step - - name: Set up Please - uses: sagikazarmark/setup-please-action@v0 - - # Run please build - # You can use plz thanks to the setup action - - name: Test - run: plz test //... -``` - -The readme of [setup-please](https://github.com/sagikazarmark/setup-please-action) explains more use cases and configuration options: - -- global include/exclude labels -- global profile -- saving logs as artifacts +summary: Running Please on GitHub Actions +description: GitHub Actions is an extensible CI/CD platform provided by GitHub +id: github_actions +categories: intermediate +tags: medium +status: Published +authors: Márk Sági-Kazár +Feedback Link: https://github.com/thought-machine/please + +# Running Please on GitHub Actions +## Overview +Duration: 2 + +### Prerequisites +- A repository on [GitHub](https://github.com) +- A project with Please initialized in that repository + +### What you'll learn +- How to setup [GitHub Actions](https://github.com/features/actions) +- How to use Please in a GitHub Actions build +- How to use the [setup-please](https://github.com/sagikazarmark/setup-please-action) action for better integration + +### What if I get stuck? +If you get stuck with GitHub Actions, check out the [official documentation](https://docs.github.com/en/free-pro-team@latest/actions). + +You can find usage examples of the [setup-please](https://github.com/sagikazarmark/setup-please-action) action in [this](https://github.com/sagikazarmark/todobackend-go-kit/blob/20292fc09e25196e751e087da7c5e659cd6c452f/.github/workflows/ci.yaml) repository. + +If you really get +stuck you can find us on [gitter](https://gitter.im/please-build/Lobby)! + +## GitHub Actions +Duration: 5 + +GitHub Actions is GitHub's built-in automation platform for CI/CD and other workflows. It runs workflows defined as YAML files in the .github/workflows directory, triggered by events (push, pull_request, schedule, manual, etc.). Workflows consist of jobs (run on hosted or self‑hosted runners) and steps that execute shell commands or reusable actions from the marketplace. Key benefits include tight GitHub integration, flexible triggers and matrices, a large action marketplace, and caching for faster builds. + +### Setting up GitHub Actions + +Workflow definitions are simple YAML files stored in the `.github/workflows` directory of your repository. + +The following snippet triggers a workflow named `CI` whenever commits are pushed to the `master` branch: + +```yaml +name: CI + +on: + push: + branches: + - master + pull_request: + +jobs: + test: + name: Test + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Test + run: echo "Tests passed" +``` + +Go ahead and add the above snippet to `.github/workflows/ci.yaml` in your project. Then go to `https://github.com/YOU/YOUR-PROJECT/actions` and observe the workflow. + +## Please build +Duration: 4 + +Now we have a project setup with GitHub Actions, it's time to start building with Please! Let's change `ci.yaml` a little: + +```yaml +name: CI + +on: + push: + branches: + - master + pull_request: + +jobs: + test: + name: Test + runs-on: ubuntu-latest + + steps: + # Setup your language of choice here: + # https://github.com/actions/?q=setup-&type=&language= + + - name: Checkout code + uses: actions/checkout@v2 + + # Run please build + - name: Test + run: ./pleasew build //... +``` + +Compared to the example earlier, this workflow uses the `pleasew` script to download Please and build the project. + +Notice the `//...` bit at the end of the command: it's necessary on GitHub Actions. +Check [this](https://github.com/thought-machine/please/issues/1174) issue for more details. + +## setup-please action +Duration: 10 + +The [setup-please](https://github.com/sagikazarmark/setup-please-action) action provides better integration for Please. + +### What is an _action_? + +As you've seen in the previous examples, workflows consist of _steps_. +A workflow step can be as simple as a shell script: + +```yaml +- name: Test + run: ./pleasew build //... +``` + +Shell scripts (no matter how awesome they are) are not always the right tool for the job. Complex build steps might require a more expressive language which takes us to the second type of workflow steps, called _actions_: + +```yaml +- name: Checkout code + uses: actions/checkout@v2 +``` + +An _action_ can be written in any language (distributed as Docker images), but JavaScript is supported natively. + +### Why not just use ./pleasew? + +The above section about _actions_ begs the question: why not just use `pleasew`? Why do we need an action for running Please. + +Please itself can perfectly run on GitHub Actions on its own, so you don't need an _action_ per se. That being said, there are a couple issues when using `pleasew`: + +- The wrapper script does not understand Please configuration which can lead to multiple downloads of different versions to different locations which takes time and time is expensive in CI. +- When using self-hosted runners, GitHub Actions offers a cache specifically for tools (like Please) that can further speed up workflows, but it requires a custom action. + +The [setup-please](https://github.com/sagikazarmark/setup-please-action) action provides better integration for Please solving the above issues (and a lot more). + +### Using the setup-please action + +Adding the [setup-please](https://github.com/sagikazarmark/setup-please-action) action to your workflow is simply adding two lines: + +```yaml +name: CI + +on: + push: + branches: + - master + pull_request: + +jobs: + test: + name: Test + runs-on: ubuntu-latest + + steps: + # Setup your language of choice here: + # https://github.com/actions/?q=setup-&type=&language= + + - name: Checkout code + uses: actions/checkout@v2 + + # Make sure it's added after the checkout step + - name: Set up Please + uses: sagikazarmark/setup-please-action@v0 + + # Run please build + # You can use plz thanks to the setup action + - name: Test + run: plz test //... +``` + +The readme of [setup-please](https://github.com/sagikazarmark/setup-please-action) explains more use cases and configuration options: + +- global include/exclude labels +- global profile +- saving logs as artifacts diff --git a/docs/codelabs/go_intro.md b/docs/codelabs/go_intro.md index addbbd212..a68a80c91 100644 --- a/docs/codelabs/go_intro.md +++ b/docs/codelabs/go_intro.md @@ -1,483 +1,506 @@ -summary: Getting started with Go -description: Building and testing with Go and Please, as well as managing third-party dependencies via go_repo -id: go_intro -categories: beginner -tags: medium -status: Published -authors: Jon Poole -Feedback Link: https://github.com/thought-machine/please - -# Getting started with Go -## Overview -Duration: 4 - -### Prerequisites -- You must have Please installed: [Install please](https://please.build/quickstart.html) -- Go must be installed: [Install Go](https://golang.org/doc/install#install) - -### What you'll learn -- Configuring Please for Go using the Go plugin -- Creating an executable Go binary -- Adding Go packages to your project -- Testing your code -- Including third-party libraries - -### What if I get stuck? - -The final result of running through this codelab can be found -[here](https://github.com/thought-machine/please-codelabs/tree/main/getting_started_go) for reference. If you really get -stuck you can find us on [gitter](https://gitter.im/please-build/Lobby)! - -## Initialising your project -Duration: 2 - -The easiest way to get started is from an existing Go module: - -```text -$ mkdir getting_started_go && cd getting_started_go -$ plz init -$ plz init plugin go -$ go mod init github.com/example/module -``` - - -### So what just happened? -You will see this has created a number of files in your working folder: -```text -$ tree -a - . - ├── go.mod - ├── pleasew - ├── plugins - │ └── BUILD - └── .plzconfig -``` - -The `go.mod` file was generated by `go` and contains information about the Go module. While Please doesn't directly use -this file, it can be useful for integrating your project with the Go ecosystem and IDEs. You may remove it if you wish. - -The `pleasew` script is a wrapper script that will automatically install Please if it's not already! This -means Please projects are portable and can always be built via -`git clone https://... example_module && cd example_module && ./pleasew build`. - -The `plugins/BUILD` is a file generated by `plz init plugin go` which defines a build target for the Go plugin. - -The file `.plzconfig` contains the project configuration for Please. Please will have initialised this with the Go -plugin configuration for us: - -### `.plzconfig` -``` -[parse] -preloadsubincludes = ///go//build_defs:go ; Makes the Go rules available automatically in BUILD files - -[Plugin "go"] -Target = //plugins:go -``` - -This configures the Go plugin, and makes the build definitions available in the parse context throughout the repo -automatically. Alternatively, if you're not using Go everywhere, you can remove the `preloadsubincludes` config and add -`subinclude("///go//build_defs:go")` to each `BUILD` file that needs access to Go rules. - -Read the [config](/config.html) and [go plugin config](/plugins.html#go.config) docs for more information on -configuration. - -Finally, the `plz-out` directory contains artifacts built by plz. - -## Setting up our import path -Duration: 1 - -As we've initialised a Go module, all imports should be resolved relative to the module name. To instruct Please to -use this import path, we have to configure the Go plugin as such: - -### `.plzconfig` -```text -[Plugin "go"] -Target = //plugins:go -ImportPath = github.com/example/module ; Should match the module name in go.mod -``` - -## Setting up your toolchain -Duration: 2 - -If you have followed the [Golang quickstart guide](https://go.dev/doc/tutorial/getting-started), or if you're using -1.20 or newer, there's a good chance additional configuration is required. There are two options for configuring your -Go toolchain with Please. - -### Recommended: managed toolchain - -The simplest way is to let Please manage your toolchain for you. The `go_toolchain()` rule will download the Go -toolchain, compiling the standard library if necessary. Simply add the following rule to your project: - -### `third_party/go/BUILD` -```python -go_toolchain( - name = "toolchain", - version = "1.20", -) -``` - -And then configure the Go plugin to use it like so: -### `.plzconfig` -```text -[Plugin "go"] -Target = //plugins:go -ImportPath = github.com/example/module -GoTool = //third_party/go:toolchain|go -``` - -### Using Go from the system PATH - -By default, Please will look for Go in the following locations: -``` -/usr/local/bin:/usr/bin:/bin -``` - -If you have Please installed elsewhere, you must configure the path like so: - -### `.plzconfig` -```text -[Build] -Path = /usr/local/go/bin:/usr/local/bin:/usr/bin:/bin -``` - -Additionally, from version 1.20, golang no longer includes the standard library with its distribution. To use 1.20 from -the path with Please, you must install it. This can be done like so: - -```text -$ GODEBUG="installgoroot=all" go install std -``` - -## Hello, world! -Duration: 4 - -Now we have a Please project, it's time to start adding some code to it! Let's create a "hello world" Go program: - -### `src/main.go` -```go -package main - -import "fmt" - -func main(){ - fmt.Println("Hello, world!") -} -``` - -We now need to tell Please about our Go code. Please projects define metadata about the targets that are available to be -built in `BUILD` files. Let's create a `BUILD` file to build this program: - -### `src/BUILD` -```python -go_binary( - name = "main", - srcs = ["main.go"], -) -``` - -That's it! You can now run this with: -```text -$ plz run //src:main -Hello, world! -``` - -There's a lot going on here; first off, `go_binary()` is one of the [go plugin functions](/plugins.html#go). This build -function creates a "build target" in the `src` package. A package, in the Please sense, is any directory that contains a -`BUILD` file. - -Each build target can be identified by a build label in the format `//path/to/package:label`, i.e. `//src:main`. -There are a number of things you can do with a build target such as `plz build //src:main`, however, as you've seen, -if the target is a binary, you may run it with `plz run`. - -## Adding packages -Duration: 5 - -Let's add a `src/greetings` package to our Go project: - -### `src/greetings/greetings.go` -```go -package greetings - -import ( - "math/rand" -) - -var greetings = []string{ - "Hello", - "Bonjour", - "Marhabaan", -} - -func Greeting() string { - return greetings[rand.Intn(len(greetings))] -} -``` - -We then need to tell Please how to compile this library: - -### `src/greetings/BUILD` -```python -go_library( - name = "greetings", - srcs = ["greetings.go"], - visibility = ["//src/..."], -) -``` - -We can then build it like so: - -```text -$ plz build //src/greetings -Build finished; total time 290ms, incrementality 50.0%. Outputs: -//src/greetings:greetings: - plz-out/gen/src/greetings/greetings.a -``` - -Here we can see that the output of a `go_library` rule is a `.a` file which is stored in -`plz-out/gen/src/greetings/greetings.a`. This is a [static library archive](https://en.wikipedia.org/wiki/Static_library) -representing the compiled output of our package. - -We have also provided a `visibility` list to this rule. This is used to control where this `go_library()` rule can be -used within our project. In this case, any rule under `src`, denoted by the `...` syntax. - -NB: This syntax can also be used on the command line e.g. `plz build //src/...` - -## Using our new package -Duration: 2 -To maintain a principled model for incremental and hermetic builds, Please requires that rules are explicit about their -inputs and outputs. To use this new package in our "hello world" program, we have to add it as a dependency: - -### `src/BUILD` -```python -go_binary( - name = "main", - srcs = ["main.go"], - # NB: if the package and rule name are the same, you may omit the name i.e. this could be just //src/greetings - deps = ["//src/greetings:greetings"], -) -``` - -You can see we use a build label to refer to another rule here. Please will make sure that this rule is built before -making its outputs available to our rule here. - -Then update `src/main.go`: -### `src/main.go` -```go -package main - -import ( - "fmt" - - "github.com/example/module/src/greetings" -) - -func main(){ - fmt.Printf("%s, world!\n", greetings.Greeting()) -} -``` - -Give it a whirl: - -```text -$ plz run //src:main -Bonjour, world! -``` - -## Testing our code -Duration: 5 - -Let's create a very simple test for our library: -### `src/greetings/greetings_test.go` -```go -package greetings - -import "testing" - -func TestGreeting(t *testing.T) { - if Greeting() == "" { - panic("Greeting failed to produce a result") - } -} -``` - -We then need to tell Please about our tests: -### `src/greetings/BUILD` -```python -go_library( - name = "greetings", - srcs = ["greetings.go"], - visibility = ["//src/..."], -) - -go_test( - name = "greetings_test", - srcs = ["greetings_test.go"], - # Here we have used the shorthand `:greetings` label format. This format can be used to refer to a rule in the same - # package and is shorthand for `//src/greetings:greetings`. - deps = [":greetings"], -) -``` - -We've used `go_test()`. This is a special build rule that is considered a test. These rules can be executed as such: -```text -$ plz test //src/... -//src/greetings:greetings_test 1 test run in 3ms; 1 passed -1 test target and 1 test run in 3ms; 1 passed. Total time 90ms. -``` - -Please will run all the tests it finds under `//src/...`, and aggregate the results up. This works even across -languages allowing you to test your whole project with a single command. - -### External tests - -Go has a concept of "external" tests. This means that tests can exist in the same folder as the production code, but -they have a different package. Please supports this through the `external = True` argument on `go_test()`: - -### `src/greetings/greetings_test.go` -```go -package greetings_test - -import ( - "testing" - - // We now need to import the "production" package - "github.com/example/module/src/greetings" -) - -func TestGreeting(t *testing.T) { - if greetings.Greeting() == "" { - panic("Greeting failed to produce a result") - } -} -``` - -### `src/greetings/BUILD` -```python -go_library( - name = "greetings", - srcs = ["greetings.go"], - visibility = ["//src/..."], -) - -go_test( - name = "greetings_test", - srcs = ["greetings_test.go"], - deps = [":greetings"], - external = True, -) -``` - -Check if it works: -```text -$ plz test //src/... -//src/greetings:greetings_test 1 test run in 3ms; 1 passed - 1 test target and 1 test run in 3ms; 1 passed. Total time 90ms. -``` -## Third-party dependencies -Duration: 7 - -To add third party dependencies to Please, the easiest way is to use `///go//tools:please_go` to resolve them, and then -add them to `third_party/go/BUILD`. Let's add `github.com/stretchr/testify`: - -```text -$ plz run ///go//tools:please_go -- get github.com/stretchr/testify@v1.8.2 -go_repo(module="github.com/stretchr/objx", version="v0.5.0") -go_repo(module="gopkg.in/yaml.v3", version="v3.0.1") -go_repo(module="gopkg.in/check.v1", version="v0.0.0-20161208181325-20d25e280405") -go_repo(module="github.com/stretchr/testify", version="v1.8.2") -go_repo(module="github.com/davecgh/go-spew", version="v1.1.1") -go_repo(module="github.com/pmezard/go-difflib", version="v1.0.0") -``` - -We can then add them to `third_party/go/BUILD`: -```python -# We give direct modules a name and install list so we can reference them nicely -go_repo( - name = "testify", - module = "github.com/stretchr/testify", - version="v1.8.2", - # We add the subset of packages we actually depend on here - install = [ - "assert", - "require", - ] -) - -# Indirect modules are referenced internally, so we don't have to name them if we don't want to. They can still be -# referenced by the following build label naming convention: ///third_party/go/github.com_owner_repo//package. -# -# NB: Any slashes in the module name will be replaced by _ -go_repo(module="github.com/davecgh/go-spew", version="v1.1.1") -go_repo(module="github.com/pmezard/go-difflib", version="v1.0.0") -go_repo(module="github.com/stretchr/objx", version="v0.5.0") -go_repo(module="gopkg.in/yaml.v3", version="v3.0.1") -go_repo(module="gopkg.in/check.v1", version="v0.0.0-20161208181325-20d25e280405") -``` - -More information as to how `go_repo` works can be found -[here](/plugins.html#go_repo). - -NB: This build label looks a little different. That's because it's referencing a build target in a subrepo. -### Updating our tests - -We can now use this library in our tests: - -### `src/greetings/greetings_test.go` -```go -package greetings_test - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/example/module/src/greetings" -) - -func TestGreeting(t *testing.T) { - assert.NotEqual(t, greetings.Greeting(), "") -} -``` - -### `src/greetings/BUILD` -```python -go_library( - name = "greetings", - srcs = ["greetings.go"], - visibility = ["//src/..."], -) - -go_test( - name = "greetings_test", - srcs = ["greetings_test.go"], - deps = [ - ":greetings", - # Could use a subrepo label i.e. ///third_party/go/github.com_stretchr_testify//assert instead if we want - "//third_party/go:testify", - ], - external = True, -) -``` - -And then we can check it all works: -```text -$ plz test -//src/greetings:greetings_test 1 test run in 3ms; 1 passed -1 test target and 1 test run; 1 passed. -Total time: 480ms real, 0s compute. -``` - -## What next? -Duration: 1 - -Hopefully you now have an idea as to how to build Go with Please. Please is capable of so much more though! - -- [Please basics](/basics.html) - A more general introduction to Please. It covers a lot of what we have in this -tutorial in more detail. -- [Go plugin rules](/plugins.html#go) - See the rest of the Go plugin rules and config. -- [Built-in rules](/lexicon.html#go) - See the rest of the built in rules. -- [Config](/config.html) - See the available config options for Please. -- [Command line interface](/commands.html) - Please has a powerful command line interface. Interrogate the build graph, -determine files changes since master, watch rules and build them automatically as things change and much more! Use -`plz help`, and explore this rich set of commands! - -Otherwise, why not try one of the other codelabs! +summary: Getting started with Go +description: Building and testing with Go and Please, as well as managing third-party dependencies via go_repo +id: go_intro +categories: beginner +tags: medium +status: Published +authors: Jon Poole +Feedback Link: https://github.com/thought-machine/please + +# Getting started with Go +## Overview +Duration: 4 + +### Prerequisites +- You must have Please installed: [Install please](https://please.build/quickstart.html) +- Go must be installed: [Install Go](https://golang.org/doc/install#install) + +### What you'll learn +- Configuring Please for Go using the Go plugin +- Creating an executable Go binary +- Adding Go packages to your project +- Testing your code +- Including third-party libraries + +### What if I get stuck? + +The final result of running through this codelab can be found +[here](https://github.com/thought-machine/please-codelabs/tree/main/getting_started_go) for reference. If you really get +stuck you can find us on [gitter](https://gitter.im/please-build/Lobby)! + +## Initialising your project +Duration: 2 + +The easiest way to get started is from an existing Go module: + +```bash +mkdir getting_started_go && cd getting_started_go +plz init +plz init plugin go +go mod init github.com/example/module +``` + + +### So what just happened? +You will see this has created a number of files in your working folder: +```text +$ tree -a + . + ├── go.mod + ├── pleasew + ├── plugins + │ └── BUILD + └── .plzconfig +``` + +The `go.mod` file was generated by `go` and contains information about the Go module. While Please doesn't directly use +this file, it can be useful for integrating your project with the Go ecosystem and IDEs. You may remove it if you wish. + +The `pleasew` script is a wrapper script that will automatically install Please if it's not already! This +means Please projects are portable and can always be built via +`git clone https://... example_module && cd example_module && ./pleasew build`. + +The `plugins/BUILD` is a file generated by `plz init plugin go` which defines a build target for the Go plugin. + +The file `.plzconfig` contains the project configuration for Please. Please will have initialised this with the Go +plugin configuration for us: + +### `.plzconfig` +``` +[parse] +preloadsubincludes = ///go//build_defs:go ; Makes the Go rules available automatically in BUILD files + +[Plugin "go"] +Target = //plugins:go +``` + +This configures the Go plugin, and makes the build definitions available in the parse context throughout the repo +automatically. Alternatively, if you're not using Go everywhere, you can remove the `preloadsubincludes` config and add +`subinclude("///go//build_defs:go")` to each `BUILD` file that needs access to Go rules. + +### Troubleshooting: "unknown rule go_binary" +Duration: 1 + +Seeing `unknown rule go_binary` (or similar for other Go rules) means the plugin was not loaded. Confirm the plugin target exists and re-run the init script if needed. + +**Fix checklist** +- `plz query config Plugin.go.Target` should report `//plugins:go`. +- Ensure `plugins/BUILD` is present and contains the Go plugin target. +- If `.plzconfig` was edited manually, re-run `plz init plugin go` or restore the snippet above. + +Read the [config](/config.html) and [go plugin config](/plugins.html#go.config) docs for more information on +configuration. + +Finally, the `plz-out` directory contains artifacts built by plz. + +## Setting up our import path +Duration: 1 + +As we've initialised a Go module, all imports should be resolved relative to the module name. To instruct Please to +use this import path, we have to configure the Go plugin as such: + +### `.plzconfig` +```text +[Plugin "go"] +Target = //plugins:go +ImportPath = github.com/example/module ; Should match the module name in go.mod +``` + +## Setting up your toolchain +Duration: 2 + +If you have followed the [Golang quickstart guide](https://go.dev/doc/tutorial/getting-started), or if you're using +1.20 or newer, there's a good chance additional configuration is required. There are two options for configuring your +Go toolchain with Please. + +### Recommended: managed toolchain + +The simplest way is to let Please manage your toolchain for you. The `go_toolchain()` rule will download the Go +toolchain, compiling the standard library if necessary. Simply add the following rule to your project: + +### `third_party/go/BUILD` +```python +go_toolchain( + name = "toolchain", + version = "1.20", +) +``` + +And then configure the Go plugin to use it like so: +### `.plzconfig` +```text +[Plugin "go"] +Target = //plugins:go +ImportPath = github.com/example/module +GoTool = //third_party/go:toolchain|go +``` + +### Using Go from the system PATH + +By default, Please will look for Go in the following locations: +``` +/usr/local/bin:/usr/bin:/bin +``` + +If you have Please installed elsewhere, you must configure the path like so: + +### `.plzconfig` +```text +[Build] +Path = /usr/local/go/bin:/usr/local/bin:/usr/bin:/bin +``` + +Additionally, from version 1.20, golang no longer includes the standard library with its distribution. To use 1.20 from +the path with Please, you must install it. This can be done like so: + +```bash +GODEBUG="installgoroot=all" go install std +``` + +## Hello, world! +Duration: 4 + +Now we have a Please project, it's time to start adding some code to it! Let's create a "hello world" Go program: + +### `src/main.go` +```go +package main + +import "fmt" + +func main() { + fmt.Println("Hello, world!") +} +``` + +We now need to tell Please about our Go code. Please projects define metadata about the targets that are available to be +built in `BUILD` files. Let's create a `BUILD` file to build this program: + +### `src/BUILD` +```python +go_binary( + name = "main", + srcs = ["main.go"], +) +``` + +That's it! You can now run this with: + +```bash +plz run //src:main +``` + +You should see the output: + +```text +Hello, world! +``` + +There's a lot going on here; first off, `go_binary()` is one of the [go plugin functions](/plugins.html#go). This build +function creates a "build target" in the `src` package. A package, in the Please sense, is any directory that contains a +`BUILD` file. + +Each build target can be identified by a build label in the format `//path/to/package:label`, i.e. `//src:main`. +There are a number of things you can do with a build target such as `plz build //src:main`, however, as you've seen, +if the target is a binary, you may run it with `plz run`. + +## Adding packages +Duration: 5 + +Let's add a `src/greetings` package to our Go project: + +### `src/greetings/greetings.go` +```go +package greetings + +import ( + "math/rand" +) + +var greetings = []string{ + "Hello", + "Bonjour", + "Marhabaan", +} + +func Greeting() string { + return greetings[rand.Intn(len(greetings))] +} +``` + +We then need to tell Please how to compile this library: + +### `src/greetings/BUILD` +```python +go_library( + name = "greetings", + srcs = ["greetings.go"], + visibility = ["//src/..."], +) +``` + +Then run the following command to build the greetings package: + +```bash +plz build //src/greetings +``` + +You should see output similar to: + +```text +Build finished; total time 290ms, incrementality 50.0%. Outputs: +//src/greetings:greetings: + plz-out/gen/src/greetings/greetings.a +``` + +Here we can see that the output of a `go_library` rule is a `.a` file which is stored in +`plz-out/gen/src/greetings/greetings.a`. This is a [static library archive](https://en.wikipedia.org/wiki/Static_library) +representing the compiled output of our package. + +We have also provided a `visibility` list to this rule. This is used to control where this `go_library()` rule can be +used within our project. In this case, any rule under `src`, denoted by the `...` syntax. + +NB: This syntax can also be used on the command line e.g. `plz build //src/...` + +## Using our new package +Duration: 2 +To maintain a principled model for incremental and hermetic builds, Please requires that rules are explicit about their +inputs and outputs. To use this new package in our "hello world" program, we have to add it as a dependency of our binary rule: + +### `src/BUILD` +```python +go_binary( + name = "main", + srcs = ["main.go"], + # NB: if the package and rule name are the same, you may omit the name i.e. this could be just //src/greetings + deps = ["//src/greetings:greetings"], +) +``` + +You can see we use a build label to refer to another rule here. Please will make sure that this rule is built before +making its outputs available to our rule here. + +Then update `src/main.go`: +### `src/main.go` +```go +package main + +import ( + "fmt" + + "github.com/example/module/src/greetings" +) + +func main() { + fmt.Printf("%s, world!\n", greetings.Greeting()) +} +``` + +Give it a whirl by running the following command: + +```text +$ plz run //src:main +Bonjour, world! +``` + +The greeting is selected at random, so your output may vary each time you run the command. + +## Testing our code +Duration: 5 + +Let's create a very simple test for our library: +### `src/greetings/greetings_test.go` +```go +package greetings + +import "testing" + +func TestGreeting(t *testing.T) { + if Greeting() == "" { + panic("Greeting failed to produce a result") + } +} +``` + +We then need to tell Please about our tests: +### `src/greetings/BUILD` +```python +go_library( + name = "greetings", + srcs = ["greetings.go"], + visibility = ["//src/..."], +) + +go_test( + name = "greetings_test", + srcs = ["greetings_test.go"], + # Here we have used the shorthand `:greetings` label format. This format can be used to refer to a rule in the same + # package and is shorthand for `//src/greetings:greetings`. + deps = [":greetings"], +) +``` + +We've used `go_test()`. This is a special build rule that is considered a test. These rules can be executed as such: +```text +$ plz test //src/... +//src/greetings:greetings_test 1 test run in 3ms; 1 passed +1 test target and 1 test run in 3ms; 1 passed. Total time 90ms. +``` + +Please will run all the tests it finds under `//src/...`, and aggregate the results up. This works even across +languages allowing you to test your whole project with a single command. + +### External tests + +Go has a concept of "external" tests. This means that tests can exist in the same folder as the production code, but +they have a different package. Please supports this through the `external = True` argument on `go_test()`: + +### `src/greetings/greetings_test.go` +```go +package greetings_test + +import ( + "testing" + + // We now need to import the "production" package + "github.com/example/module/src/greetings" +) + +func TestGreeting(t *testing.T) { + if greetings.Greeting() == "" { + panic("Greeting failed to produce a result") + } +} +``` + +### `src/greetings/BUILD` +```python +go_library( + name = "greetings", + srcs = ["greetings.go"], + visibility = ["//src/..."], +) + +go_test( + name = "greetings_test", + srcs = ["greetings_test.go"], + deps = [":greetings"], + external = True, +) +``` + +Check if it works: +```text +$ plz test //src/... +//src/greetings:greetings_test 1 test run in 3ms; 1 passed + 1 test target and 1 test run in 3ms; 1 passed. Total time 90ms. +``` +## Third-party dependencies +Duration: 7 + +To add third party dependencies to Please, the easiest way is to use `///go//tools:please_go` to resolve them, and then +add them to `third_party/go/BUILD`. Let's add `github.com/stretchr/testify`: + +```text +$ plz run ///go//tools:please_go -- get github.com/stretchr/testify@v1.8.2 +go_repo(module="github.com/stretchr/objx", version="v0.5.0") +go_repo(module="gopkg.in/yaml.v3", version="v3.0.1") +go_repo(module="gopkg.in/check.v1", version="v0.0.0-20161208181325-20d25e280405") +go_repo(module="github.com/stretchr/testify", version="v1.8.2") +go_repo(module="github.com/davecgh/go-spew", version="v1.1.1") +go_repo(module="github.com/pmezard/go-difflib", version="v1.0.0") +``` + +We can then add them to `third_party/go/BUILD`: +```python +# We give direct modules a name and install list so we can reference them nicely +go_repo( + name = "testify", + module = "github.com/stretchr/testify", + version="v1.8.2", + # We add the subset of packages we actually depend on here + install = [ + "assert", + "require", + ] +) + +# Indirect modules are referenced internally, so we don't have to name them if we don't want to. They can still be +# referenced by the following build label naming convention: ///third_party/go/github.com_owner_repo//package. +# +# NB: Any slashes in the module name will be replaced by _ +go_repo(module="github.com/davecgh/go-spew", version="v1.1.1") +go_repo(module="github.com/pmezard/go-difflib", version="v1.0.0") +go_repo(module="github.com/stretchr/objx", version="v0.5.0") +go_repo(module="gopkg.in/yaml.v3", version="v3.0.1") +go_repo(module="gopkg.in/check.v1", version="v0.0.0-20161208181325-20d25e280405") +``` + +More information as to how `go_repo` works can be found +[here](/plugins.html#go_repo). + +NB: This build label looks a little different. That's because it's referencing a build target in a subrepo. +### Updating our tests + +We can now use this library in our tests: + +### `src/greetings/greetings_test.go` +```go +package greetings_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/example/module/src/greetings" +) + +func TestGreeting(t *testing.T) { + assert.NotEqual(t, greetings.Greeting(), "") +} +``` + +### `src/greetings/BUILD` +```python +go_library( + name = "greetings", + srcs = ["greetings.go"], + visibility = ["//src/..."], +) + +go_test( + name = "greetings_test", + srcs = ["greetings_test.go"], + deps = [ + ":greetings", + # Could use a subrepo label i.e. ///third_party/go/github.com_stretchr_testify//assert instead if we want + "//third_party/go:testify", + ], + external = True, +) +``` + +And then we can check it all works: +```text +$ plz test +//src/greetings:greetings_test 1 test run in 3ms; 1 passed +1 test target and 1 test run; 1 passed. +Total time: 480ms real, 0s compute. +``` + +## What next? +Duration: 1 + +Hopefully you now have an idea as to how to build Go with Please. Please is capable of so much more though! + +- [Please basics](/basics.html) - A more general introduction to Please. It covers a lot of what we have in this +tutorial in more detail. +- [Go plugin rules](/plugins.html#go) - See the rest of the Go plugin rules and config. +- [Built-in rules](/lexicon.html#go) - See the rest of the built in rules. +- [Config](/config.html) - See the available config options for Please. +- [Command line interface](/commands.html) - Please has a powerful command line interface. Interrogate the build graph, +determine files changes since master, watch rules and build them automatically as things change and much more! Use +`plz help`, and explore this rich set of commands! + +Otherwise, why not try one of the other codelabs! diff --git a/docs/codelabs/k8s.md b/docs/codelabs/k8s.md index 29ab44ced..7ae13be8d 100644 --- a/docs/codelabs/k8s.md +++ b/docs/codelabs/k8s.md @@ -1,454 +1,456 @@ -id: k8s -summary: Kubernetes and Docker -description: Learn about using Please to build and deploy Docker images and Kubernetes manifests -categories: intermediate -tags: medium -status: Published -authors: Jon Poole -Feedback Link: https://github.com/thought-machine/please - -# Kubernetes and Docker -## Overview -Duration: 1 - -### Prerequisites -- You must have Please installed: [Install Please](https://please.build/quickstart.html) -- You should be comfortable using the existing build rules. -- You should be familiar with [Docker](https://docs.docker.com/get-started/) - and [Kubernetes](https://kubernetes.io/docs/tutorials/kubernetes-basics/) - -This codelab uses Golang for the example service however the language used for this service isn't that important. Just -make sure you're able to build a binary in whatever your preferred language is. - -### What you'll learn -This codelab is quite long and tries to give an idea of what a complete build pipeline might look like for a docker and -kubernetes based project. You'll learn: - -- How to build a service and bake that into docker image -- How to build a kubernetes deployment for that docker image -- Starting minikube and testing your deployment out -- Setting up aliases to streamline your dev workflow - -### What if I get stuck? - -The final result of running through this codelab can be found -[here](https://github.com/thought-machine/please-codelabs/tree/main/kubernetes_and_docker) for reference. If you really get stuck -you can find us on [gitter](https://gitter.im/please-build/Lobby)! - -## Creating a service -Duration: 5 - -First up, let create a service to deploy. It's not really important what it does or what language we implement it in. -For the sake of this codelabs, we'll make a simple hello world HTTP service in Python. - -### Initialising the project -``` -$ plz init -$ go mod init github.com/example/module -$ plz init plugin go -``` - -### Set up the Go plugin - -Add a go toolchain to `third_party/go/BUILD` -```python -go_toolchain( - name = "toolchain", - version = "1.20", -) -``` - -And configure the plugin: -``` -[Plugin "go"] -Target = //plugins:go -ImportPath = github.com/example/module -GoTool = //third_party/go:toolchain|go -``` - -For more information on this, check out the Go codelab. - -### Creating a Go service -Create a file `hello_service/service.go`: - -```golang -package main - -import ( - "fmt" - "log" - "net/http" -) - -func main() { - http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintln(w, "This is my website!") - }) - - http.HandleFunc("/hello", func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintln(w, "Hello, HTTP!") - }) - - err := http.ListenAndServe(":8000", nil) - if err != nil { - log.Fatal("Error starting the server: ", err) - } -} -``` - -Then create a `hello_service/BUILD` file like so: -```python -go_binary( - name = "hello_service", - srcs = ["service.go"], - visibility = ["//hello_service/k8s:all"], -) -``` - -And test it works: - -``` -$ plz run //hello_service & -[1] 28694 - -$ curl localhost:8000 -Hello, world! - -$ pkill hello_service -[1]+ Terminated plz run //hello_service -``` - -## Building a Docker image -Duration: 5 - -Before we create a docker image for our service, it can be useful to create a base image that all our services share. -This can be used this to install language runtimes e.g. a python interpreter. If you're using a language that requires -a runtime, this is where you should install it. In this case, we're using Go so this isn't strictly necessary. - -Let's create a base docker file for our repo that all our services will use in `common/docker/Dockerfile-base`: -``` -FROM ubuntu:22.04 - -RUN apt update -y && apt upgrade -y -``` - -### Docker build rules - -To use the docker build rules, we need to install the docker plugin, as well as the shell plugin which it requires: - -`$ plz init plugin shell && plz init plugin docker` - -We can then build a set of scripts that help us build, and push our docker images: - -```python -docker_image( - name = "base", - dockerfile = "Dockerfile-base", - visibility = ["PUBLIC"], -) -``` - -And then let's build that: -``` -$ plz build //common/docker:base -Build finished; total time 80ms, incrementality 40.0%. Outputs: -//common/docker:base: - plz-out/bin/common/docker/base.sh -``` - -### So what's going on? -As promised, the output of the docker image rule is a script that can build the docker image for you. We can have a -look at what the script is doing: - -``` -$ cat plz-out/bin/common/docker/base.sh -#!/bin/sh -docker build -t please-examples/base:0d45575ad71adea9861b079e5d56ff0bdc179a1868d06d6b3d102721824c1538 \ - -f Dockerfile-base - < plz-out/gen/common/docker/_base#docker_context.tar.gz -``` - -There's a couple key things to note: -- The image has been tagged with a hash based on the inputs to the rule. This means that we can always refer -back to this specific version of this image. -- It's generated us a `tar.gz` containing all the other files we might need to build the Docker image. - -We can run this script to build the image and push it to the docker daemon as set in our docker env: -``` -$ plz run //common/docker:base -``` - -## Using our base image -Duration: 5 - -So now we have a base image, let's use it for our docker image. Create a `hello_service/k8s/Dockerfile` for our hello -service: - -``` -FROM //common/docker:base - -COPY /hello_service /hello_service - -ENTRYPOINT [ "/hello_service" ] -``` - -And then set up some build rules for that in `hello_service/k8s/BUILD`: - -``` -docker_image( - name = "image", - srcs = ["//hello_service"], - dockerfile = "Dockerfile", - base_image = "//common/docker:base", -) -``` - -Let's build this and have a look at the script it generates: - -``` -$ plz build //hello_service/k8s:image -Build finished; total time 100ms, incrementality 100.0%. Outputs: -//hello_service/k8s:image: - plz-out/bin/hello_service/k8s/image.sh - -$ cat plz-out/bin/hello_service/k8s/image.sh -#!/bin/sh -./plz-out/bin/common/docker/base.sh \ - && docker build -t please-example/image:0d45575ad71adea9861b079e5d56ff0bdc179a1868d06d6b3d102721824c1538 -f \ - Dockerfile - < plz-out/gen/hello_service/k8s/_image#docker_context.tar.gz -``` - -Note, this script takes care of building the base image for us, so we don't have to orchestrate this ourselves. - -## Creating a Kubernetes deployment -Duration: 5 - -Let's create `hello_service/k8s/deployment.yaml` for our service: -```yaml -apiVersion: apps/v1 -kind: Deployment -metadata: - name: hello - labels: - app: hello -spec: - replicas: 3 - selector: - matchLabels: - app: hello - template: - metadata: - labels: - app: hello - spec: - containers: - - name: main - image: //hello_service/k8s:image - ports: - # This must match the port we start the server on in hello-service/main.py - - containerPort: 8000 -``` - -Let's also create `hello_service/k8s/service.yaml` for good measure: -```yaml -apiVersion: v1 -kind: Service -metadata: - name: hello-svc -spec: - selector: - app: hello - ports: - - protocol: TCP - port: 8000 - targetPort: 8000 -``` - -### Kubernetes rules -Note that we've referenced the image `//hello-service/k8s:image` in the deployment. The kubernetes rules are able to -template your yaml files substituting in the image with the correct label based on the version of the image we just -built! This ties all the images and kubernetes manifests together based on the current state of the repo making the -deployment much more reproducible! - -To add the kubernetes rules, run `plz init plugin k8s`. - -Let's update `hello_service/k8s/BUILD` to build these manifests: - -```python -docker_image( - name = "image", - srcs = ["//hello_service"], - dockerfile = "Dockerfile", - base_image = "//common/docker:base", -) - -k8s_config( - name = "k8s", - srcs = [ - "deployment.yaml", - "service.yaml", - ], - containers = [":image"], -) -``` - -And check that has done the right thing: -``` -$ plz build //hello_service/k8s -Build finished; total time 90ms, incrementality 90.9%. Outputs: -//hello_service/k8s:k8s: - plz-out/gen/hello_service/k8s/templated_deployment.yaml - plz-out/gen/hello_service/k8s/templated_service.yaml - - -$ cat plz-out/gen/hello_service/k8s/templated_deployment.yaml -apiVersion: apps/v1 -kind: Deployment -metadata: - name: hello - labels: - app: hello -spec: - replicas: 3 - selector: - matchLabels: - app: hello - template: - metadata: - labels: - app: hello - spec: - containers: - - name: main - image: please-example/image:0d45575ad71adea9861b079e5d56ff0bdc179a1868d06d6b3d102721824c1538 - ports: - # This must match the port we start the server on in hello-service/main.py - - containerPort: 8000 -``` - -As you can see, this image matches the image we built earlier! These rules also provide a useful script for pushing -the manifests to kubernetes: - -``` -$ plz build //hello_service/k8s:k8s_push -Build finished; total time 140ms, incrementality 100.0%. Outputs: -//hello_service/k8s:k8s_push: - plz-out/bin/hello_service/k8s/k8s_push.sh - -$ cat plz-out/bin/hello_service/k8s/k8s_push.sh -#!/bin/sh -kubectl apply -f plz-out/gen/hello_service/k8s/templated_deployment.yaml && \ -kubectl apply -f plz-out/gen/hello_service/k8s/templated_service.yaml -``` - -## Local testing with minikube -Duration: 5 - -Let's tie this all together by deploying our service to minikube! - -### Setting up minikube -We can get Please to download minikube for us. Let's create `third_party/binary/BUILD` to do so: - -``` -remote_file ( - name = "minikube", - url = f"https://storage.googleapis.com/minikube/releases/latest/minikube-{CONFIG.OS}-{CONFIG.ARCH}", - binary = True, -) -``` - -And then we can start the cluster like so: -``` -$ plz run //third_party/binary:minikube -- start -``` - -### Deploying our service - -First we need to push our images to minikube's docker. To do this we need to point `docker` at minikube: - -``` -$ eval $(plz run //third_party/binary:minikube -- docker-env) -``` - -Then we can run our deployment scripts: - -``` -$ plz run //hello_service/k8s:image_load && plz run //hello_service/k8s:k8s_push -``` - -And check they're working as we expected: - -``` -$ kubectl port-forward service/hello-svc 8000:8000 & -[1] 25986 - -$ curl localhost:8000 -Hello world! - -$ pkill kubectl -[1]+ Terminated kubectl kubectl port-forward service/hello-svc 8000:8000 -``` - -## Please deploy -Duration: 5 - -Here we have learnt about the provided targets we need to run to get our changes deployed to minikube, however it's a -bit of a ritual. Let's look at consolidating this into a single command. Luckily the generated targets are labeled so -this is as simple as: - -``` -$ plz run sequential --include docker-build --include k8s-push //hello_service/... -``` - -We can then set up an alias for this in `.plzconfig`: - -``` -[alias "deploy"] -cmd = run sequential --include docker-build --include k8s-push -; Enable tab completion for build labels -positionallabels = true -``` - -This is used like: - -``` -$ plz deploy //hello_service/... -``` - -## Docker build and build systems -Duration: 7 - -To finish this off, it's worth talking about the challenges with building docker images from Docker files in a -file based build system. - -Integrating a build system with `docker build` is notoriously difficult. Build systems have trouble building your image -as `docker build` sends the image to a daemon running in the background. There's no easy way to get a file based artifact -out of Docker without this extra infrastructure. The built in rules produce a number of scripts to help build, load, -push and save images: - -``` -docker_image( - name = "image", - srcs = [":example"], - base_image = ":base", - run_args = "-p 8000:8000", - visibility = ["//k8s/example:all"], -) -``` - -This single target produces the following sub-targets: - -- `:image_fqn` target contains the fully qualified name of the generated image. Each image gets tagged with the hash -of its inputs so this can be relied upon to uniquely identify this image. -- `:image` & `:image_load` are the same script. This script loads the image into the local docker daemon. It will -also make sure the base image is build and loaded first. -- `:image_push` will load and push the image to the docker registry as configured by your local machines docker -environment. -- `:image_save` will load and then save the image to a `.tar` in `plz-out/gen` -- `:image_run` will run the image in the local docker env - -There are two ways we anticipate these targets to be used as part of a CI/CD pipeline: - -- The build server can be given access to the docker registry, and the images can be loaded directly with `:image_push`. -- The build server can save the images out to an offline image tarball with `:image_save`. These can be exported as -artifacts from the build server. Another stage of the CI/CD pipeline can then push these to the docker registry via -`docker load`. +id: k8s +summary: Kubernetes and Docker +description: Learn about using Please to build and deploy Docker images and Kubernetes manifests +categories: intermediate +tags: medium +status: Published +authors: Jon Poole +Feedback Link: https://github.com/thought-machine/please + +# Kubernetes and Docker +## Overview +Duration: 1 + +### Prerequisites +- You must have Please installed: [Install Please](https://please.build/quickstart.html) +- You should be comfortable using the existing build rules. +- You should be familiar with [Docker](https://docs.docker.com/get-started/) + and [Kubernetes](https://kubernetes.io/docs/tutorials/kubernetes-basics/) + +This codelab uses Golang for the example service however the language used for this service isn't that important. Just +make sure you're able to build a binary in whatever your preferred language is. + +### What you'll learn +This codelab is quite long and tries to give an idea of what a complete build pipeline might look like for a docker and +kubernetes based project. You'll learn: + +- How to build a service and bake that into docker image +- How to build a kubernetes deployment for that docker image +- Starting minikube and testing your deployment out +- Setting up aliases to streamline your dev workflow + +### What if I get stuck? + +The final result of running through this codelab can be found +[here](https://github.com/thought-machine/please-codelabs/tree/main/kubernetes_and_docker) for reference. If you really get stuck +you can find us on [gitter](https://gitter.im/please-build/Lobby)! + +## Creating a service +Duration: 5 + +First up, let's create a service to deploy. It's not really important what it does or what language we implement it in. +For this codelab, we'll make a simple hello world HTTP service in go. + +### Initialising the project +```bash +plz init +go mod init github.com/example/module +plz init plugin go +``` + +### Set up the Go plugin + +Add a go toolchain to `third_party/go/BUILD` +```go +go_toolchain( + name = "toolchain", + version = "1.20", +) +``` + +And configure the plugin: +``` +[Plugin "go"] +Target = //plugins:go +ImportPath = github.com/example/module +GoTool = //third_party/go:toolchain|go +``` + +For more information on this, check out the Go codelab. + +### Creating a Go service +Create a file `hello_service/service.go`: + +```golang +package main + +import ( + "fmt" + "log" + "net/http" +) + +func main() { + http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintln(w, "This is my website!") + }) + + http.HandleFunc("/hello", func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintln(w, "Hello, HTTP!") + }) + + if err := http.ListenAndServe(":8000", nil); err != nil { + log.Fatal("Error starting the server: ", err) + } +} +``` + +Then create a `hello_service/BUILD` file like so: +```python +go_binary( + name = "hello_service", + srcs = ["service.go"], + visibility = ["//hello_service/k8s:all"], +) +``` + +And test it works: + +```bash +plz run //hello_service:hello_service & +curl localhost:8000 +pkill hello_service +``` + +The output should look like this: +```bash +[1] 28694 +Hello, world! +[1]+ Terminated plz run //hello_service +``` + +## Building a Docker image +Duration: 5 + +Before we create a docker image for our service, it can be useful to create a base image that all our services share. +This can be used this to install language runtimes e.g. a python interpreter. If you're using a language that requires +a runtime, this is where you should install it. In this case, we're using Go so this isn't strictly necessary. + +Let's create a base docker file for our repo that all our services will use in `common/docker/Dockerfile-base`: +``` +FROM ubuntu:22.04 + +RUN apt update -y && apt upgrade -y +``` + +### Docker build rules + +To use the docker build rules, we need to install the docker plugin, as well as the shell plugin which it requires: + +```bash +plz init plugin shell && plz init plugin docker +``` + +We can then build a set of scripts that help us build, and push our docker images. Add the following to `common/docker/BUILD`: + +```python +docker_image( + name = "base", + dockerfile = "Dockerfile-base", + visibility = ["PUBLIC"], +) +``` + +And then let's build that: +``` +$ plz build //common/docker:base +Build finished; total time 80ms, incrementality 40.0%. Outputs: +//common/docker:base: + plz-out/bin/common/docker/base.sh +``` + +### So what's going on? +As promised, the output of the docker image rule is a script that can build the docker image for you. We can have a +look at what the script is doing: + +```bash +$ cat plz-out/bin/common/docker/base.sh +#!/bin/sh +docker build -t please-examples/base:0d45575ad71adea9861b079e5d56ff0bdc179a1868d06d6b3d102721824c1538 \ + -f Dockerfile-base - < plz-out/gen/common/docker/_base#docker_context.tar.gz +``` + +There's a couple key things to note: +- The image has been tagged with a hash based on the inputs to the rule. This means that we can always refer +back to this specific version of this image. +- It's generated us a `tar.gz` containing all the other files we might need to build the Docker image. + +We can run this script to build the image and push it to the docker daemon as set in our docker env: +```bash +plz run //common/docker:base +``` + +## Using our base image +Duration: 5 + +So now we have a base image, let's use it for our docker image. Create a `hello_service/k8s/Dockerfile` for our hello +service: + +``` +FROM //common/docker:base + +COPY /hello_service /hello_service + +ENTRYPOINT [ "/hello_service" ] +``` + +And then set up some build rules for that in `hello_service/k8s/BUILD`: + +``` +docker_image( + name = "image", + srcs = ["//hello_service"], + dockerfile = "Dockerfile", + base_image = "//common/docker:base", +) +``` + +Let's build this and have a look at the script it generates: + +``` +$ plz build //hello_service/k8s:image +Build finished; total time 100ms, incrementality 100.0%. Outputs: +//hello_service/k8s:image: + plz-out/bin/hello_service/k8s/image.sh + +$ cat plz-out/bin/hello_service/k8s/image.sh +#!/bin/sh +./plz-out/bin/common/docker/base.sh \ + && docker build -t please-example/image:0d45575ad71adea9861b079e5d56ff0bdc179a1868d06d6b3d102721824c1538 -f \ + Dockerfile - < plz-out/gen/hello_service/k8s/_image#docker_context.tar.gz +``` + +Note, this script takes care of building the base image for us, so we don't have to orchestrate this ourselves. + +## Creating a Kubernetes deployment +Duration: 5 + +Let's create `hello_service/k8s/deployment.yaml` for our service: +```yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: hello + labels: + app: hello +spec: + replicas: 3 + selector: + matchLabels: + app: hello + template: + metadata: + labels: + app: hello + spec: + containers: + - name: main + image: //hello_service/k8s:image + ports: + # This must match the port we start the server on in hello-service/main.py + - containerPort: 8000 +``` + +Let's also create `hello_service/k8s/service.yaml` for good measure: +```yaml +apiVersion: v1 +kind: Service +metadata: + name: hello-svc +spec: + selector: + app: hello + ports: + - protocol: TCP + port: 8000 + targetPort: 8000 +``` + +### Kubernetes rules +Note that we've referenced the image `//hello-service/k8s:image` in the deployment. The kubernetes rules are able to +template your yaml files substituting in the image with the correct label based on the version of the image we just +built! This ties all the images and kubernetes manifests together based on the current state of the repo making the +deployment much more reproducible! + +To add the kubernetes rules, run `plz init plugin k8s`. + +Let's update `hello_service/k8s/BUILD` to build these manifests: + +```python +docker_image( + name = "image", + srcs = ["//hello_service"], + dockerfile = "Dockerfile", + base_image = "//common/docker:base", +) + +k8s_config( + name = "k8s", + srcs = [ + "deployment.yaml", + "service.yaml", + ], + containers = [":image"], +) +``` + +And check that has done the right thing: +``` +$ plz build //hello_service/k8s +Build finished; total time 90ms, incrementality 90.9%. Outputs: +//hello_service/k8s:k8s: + plz-out/gen/hello_service/k8s/templated_deployment.yaml + plz-out/gen/hello_service/k8s/templated_service.yaml + + +$ cat plz-out/gen/hello_service/k8s/templated_deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: hello + labels: + app: hello +spec: + replicas: 3 + selector: + matchLabels: + app: hello + template: + metadata: + labels: + app: hello + spec: + containers: + - name: main + image: please-example/image:0d45575ad71adea9861b079e5d56ff0bdc179a1868d06d6b3d102721824c1538 + ports: + # This must match the port we start the server on in hello-service/main.py + - containerPort: 8000 +``` + +As you can see, this image matches the image we built earlier! These rules also provide a useful script for pushing +the manifests to kubernetes: + +``` +$ plz build //hello_service/k8s:k8s_push +Build finished; total time 140ms, incrementality 100.0%. Outputs: +//hello_service/k8s:k8s_push: + plz-out/bin/hello_service/k8s/k8s_push.sh + +$ cat plz-out/bin/hello_service/k8s/k8s_push.sh +#!/bin/sh +kubectl apply -f plz-out/gen/hello_service/k8s/templated_deployment.yaml && \ +kubectl apply -f plz-out/gen/hello_service/k8s/templated_service.yaml +``` + +## Local testing with minikube +Duration: 5 + +Let's tie this all together by deploying our service to minikube! + +### Setting up minikube +We can get Please to download minikube for us. Let's create `third_party/binary/BUILD` to do so: + +``` +remote_file ( + name = "minikube", + url = f"https://storage.googleapis.com/minikube/releases/latest/minikube-{CONFIG.OS}-{CONFIG.ARCH}", + binary = True, +) +``` + +And then we can start the cluster like so: +```bash +plz run //third_party/binary:minikube -- start +``` + +### Deploying our service + +First we need to push our images to minikube's docker. To do this we need to point `docker` at minikube: + +```bash +eval $(plz run //third_party/binary:minikube -- docker-env) +``` + +Then we can run our deployment scripts: + +```bash +plz run //hello_service/k8s:image_load && plz run //hello_service/k8s:k8s_push +``` + +And check they're working as we expected: + +``` +$ kubectl port-forward service/hello-svc 8000:8000 & curl localhost:8000 + +[1] 25986 +Hello world! + +$ pkill kubectl +[1]+ Terminated kubectl kubectl port-forward service/hello-svc 8000:8000 +``` + +## Please deploy +Duration: 5 + +Here we have learnt about the provided targets we need to run to get our changes deployed to minikube, however it's a +bit of a ritual. Let's look at consolidating this into a single command. Luckily the generated targets are labeled so +this is as simple as: + +```bash +plz run sequential --include docker-build --include k8s-push //hello_service/... +``` + +We can then set up an alias for this in `.plzconfig`: + +``` +[alias "deploy"] +cmd = run sequential --include docker-build --include k8s-push +; Enable tab completion for build labels +positionallabels = true +``` + +This is used like: + +```bash +plz deploy //hello_service/... +``` + +## Docker build and build systems +Duration: 7 + +To finish this off, it's worth talking about the challenges with building docker images from Docker files in a +file based build system. + +Integrating a build system with `docker build` is notoriously difficult. Build systems have trouble building your image +as `docker build` sends the image to a daemon running in the background. There's no easy way to get a file based artifact +out of Docker without this extra infrastructure. The built in rules produce a number of scripts to help build, load, +push and save images: + +``` +docker_image( + name = "image", + srcs = [":example"], + base_image = ":base", + run_args = "-p 8000:8000", + visibility = ["//k8s/example:all"], +) +``` + +This single target produces the following sub-targets: + +- `:image_fqn` target contains the fully qualified name of the generated image. Each image gets tagged with the hash +of its inputs so this can be relied upon to uniquely identify this image. +- `:image` & `:image_load` are the same script. This script loads the image into the local docker daemon. It will +also make sure the base image is build and loaded first. +- `:image_push` will load and push the image to the docker registry as configured by your local machines docker +environment. +- `:image_save` will load and then save the image to a `.tar` in `plz-out/gen` +- `:image_run` will run the image in the local docker env + +There are two ways we anticipate these targets to be used as part of a CI/CD pipeline: + +- The build server can be given access to the docker registry, and the images can be loaded directly with `:image_push`. +- The build server can save the images out to an offline image tarball with `:image_save`. These can be exported as +artifacts from the build server. Another stage of the CI/CD pipeline can then push these to the docker registry via +`docker load`. diff --git a/docs/codelabs/plz_query.md b/docs/codelabs/plz_query.md index 097563242..96a21e1be 100644 --- a/docs/codelabs/plz_query.md +++ b/docs/codelabs/plz_query.md @@ -1,303 +1,310 @@ -summary: Tips and tricks - plz query -description: Tips and tricks to help you become productive with Please - using plz query to query the build graph -id: plz_query -categories: intermediate -tags: medium -status: Published -authors: Jon Poole -Feedback Link: https://github.com/thought-machine/please - -# Tips and tricks - plz query - -## Overview - -Duration: 2 - -### Prerequisites - -- You must have Please installed: [Install please](https://please.build/quickstart.html) -- You should have a basic understanding of using Please to build and test code - -### What you'll learn - -This codelab isn't exhaustive however it should give you an idea of the sort of -things the Please CLI is capable of: - -- Finding the dependencies of a target -- Including and excluding targets -- Printing information about targets as well as internal targets - -## Setting up - -Duration: 2 - -For this codelab we will be using the Please codelabs repo: - -```bash -$ git clone https://github.com/thought-machine/please-codelabs -Cloning into 'please-examples'... -remote: Enumerating objects: 228, done. -remote: Total 228 (delta 0), reused 0 (delta 0), pack-reused 228 -Receiving objects: 100% (228/228), 38.23 KiB | 543.00 KiB/s, done. -Resolving deltas: 100% (79/79), done. -``` - -We'll be using the getting started with go codelab for these examples: - -```bash -cd please-codelabs/getting_started_go -``` - -## Finding dependencies of a target - -Duration: 4 - -Please has a strict build graph representing each build target and their -dependencies on each other. Among many things, this graph can be interrogated -to determine the dependencies of a target: - -```bash -$ plz query deps //src/greetings:greetings_test -//src/greetings:greetings_test - //src/greetings:greetings - ///go//tools:please_go - //plugins:go - //_please:arcat - //third_party/go:toolchain - //third_party/go:testify - ///third_party/go/github.com_stretchr_testify//:installs - ///third_party/go/github.com_stretchr_testify//assert:assert - ///third_party/go/github.com_davecgh_go-spew//spew:spew - //third_party/go:github.com_davecgh_go-spew - ///third_party/go/github.com_pmezard_go-difflib//difflib:difflib - //third_party/go:github.com_pmezard_go-difflib - ///third_party/go/gopkg.in_yaml.v3//:yaml.v3 - //third_party/go:gopkg.in_yaml.v3 - ///third_party/go/github.com_stretchr_testify//require:require -``` - -This can be especially useful when trying to improve build performance. -Unnecessary dependencies between targets can cause certain rules to be rebuilt -when they don't need to be. - -### Subrepo rules - -Woah, what are these `///third_party/go/foo//:bar` targets? Targets that begin -with a `///` are subrepo targets. In this case, the third-party dependency -*testify* has been defined using a `go_repo()` rule, which downloads the go -module into plz-out, generates Please BUILD files for each of its packages, and -then builds it like any other Please project. So -`///third_party/go/github.com_stretchr_testify//assert:assert` is saying "look -in the subrepo called *third_party/go/github.com_stretchr_testify*, and retrieve -for me the build target `//assert:assert`. - -You can `plz query print` these targets just like you would any other target: - -```bash -plz query print ///third_party/go/github.com_stretchr_testify//assert:assert -``` - -This will show you the underlying build rule for that target. Or, if you prefer, -you could have a look in the plz-out directory at the generated build rule: - -```bash -$ cat plz-out/subrepos/third_party/go/github.com_stretchr_testify/assert/BUILD -subinclude("///go//build_defs:go") - -go_library( - name = "assert", - srcs = [ - "assertion_compare.go", - "assertion_compare_can_convert.go", - "assertion_format.go", - "assertion_forward.go", - "assertion_order.go", - "assertions.go", - "doc.go", - "errors.go", - "forward_assertions.go", - "http_assertions.go", - ], - visibility = ["PUBLIC"], - deps = [ - "///third_party/go/github.com_davecgh_go-spew//spew", - "///third_party/go/github.com_pmezard_go-difflib//difflib", - "///third_party/go/gopkg.in_yaml.v3//:yaml.v3", - ], -) -``` - -### Internal rules - -If you pass the `--hidden` flag to a `plz query` command, you'll come across -*internal* targets as well. These can be identified by the leading `_` in their -name. E.g. - -```bash -$ plz query deps //src/greetings:greetings --hidden -//src/greetings:greetings - //src/greetings:_greetings#import_config - //src/greetings:_greetings#pkg_info - ///go//tools:please_go - //plugins:go - //_please:arcat - //plugins:_go#download - //src/greetings:_greetings#srcs - //third_party/go:toolchain - //third_party/go:_toolchain#download -``` - -As always, we can inspect these with `plz query print`, e.g. - -```bash -$ plz query print //src/greetings:_greetings#srcs -# //src/greetings:_greetings#srcs: -filegroup( - name = '_greetings#srcs', - srcs = ['greetings.go'], - labels = [ - 'link:plz-out/go/src/${PKG}', - 'go_src', - 'go', - ], - visibility = ['//src/...'], - build_timeout = 600, - requires = ['go'], -) -``` - -This particular internal rule is a filegroup that was generated by -`go_library()` and is used to expose the Go source files that make up that -library. You shouldn't depend on these types of rules directly as they may -change between minor releases of Please. - -## Reverse dependencies - -Duration: 2 - -If you're changing a build rule that you know has a wide-reaching effect, it -might be good to run all the tests that will be affected by that change. Let's -find the reverse dependencies of our subrepo rules: - -```bash -$ plz query revdeps ///third_party/go/github.com_stretchr_testify//require:require -///third_party/go/github.com_stretchr_testify//:installs -``` - -Well that doesn't look quite right... We should see -`//src/greetings:greetings_test` too. - -Turns out finding reverse dependencies is quite a slow operation. Please limits -this to just one level so you don't accidentally lock up your terminal trying to -walk the whole build graph. You can set the level with `--level=2` or if you -want to get all reverse dependencies, you can set it to `-1`: - -```bash -$ plz query revdeps ///third_party/go/github.com_stretchr_testify//require:require --level -1 -//src/greetings:greetings_test -//third_party/go:testify -///third_party/go/github.com_stretchr_testify//:installs -``` - -Be careful, this can be slow on larger build graphs. You can use -`--include=//src/foo/...` to limit the search to a slice of your repository. -More on this later in this codelab! - -## Composing plz commands - -Duration: 2 - -So we've managed to determine which targets will be affected by our change. How -do we run these tests? Please can be instructed to listen for targets on -standard input: - -```bash -$ plz query revdeps ///third_party/go/github.com_stretchr_testify//require:require --level -1 | plz test - -//src/greetings:greetings_test 1 test run in 8ms; 1 passed -1 test target and 1 test run; 1 passed. -Total time: 6.62s real, 10ms compute. -``` - -The `-` at the end of `plz test -` indicates to Please that we will be -supplying the targets to build over standard input. - -## Including and excluding targets - -Duration: 2 - -Almost all Please commands can take in the `--include` and `--exclude` -arguments. These can be used to specifically exclude targets: - -```bash -$ plz query revdeps --exclude //src/greetings:greetings_test --level=-1 ///third_party/go/github.com_stretchr_testify//require:require | plz test - -0 test targets and 0 tests run; 0 passed. -Total time: 40ms real, 0s compute. -``` - -As you can see, we excluded the test from earlier so `plz test` didn't run it. -We can also exclude this on the test command: - -```bash -$ plz query revdeps --level=-1 ///third_party/go/github.com_stretchr_testify//require:require | plz test --exclude //src/greetings:greetings_test - -0 test targets and 0 tests run; 0 passed. -Total time: 40ms real, 0s compute. -``` - -### Including based on label - -Targets can be labeled in Please. Most of the built-in rules apply some basic -labels, e.g. the Go rules apply the `go` label to their targets. These can be -very useful to run all tests for a given language: - -```bash -plz build --include go --exclude //third_party/go/... -``` - -This will build all Go targets but will only build targets under -`//third_party/go/...` if they're a dependency of a target that needs to be built. - -You may also add custom labels to your targets. Update `srcs/greetings/BUILD` as such: - -### `src/greetings/BUILD` -```python -go_library( - name = "greetings", - srcs = ["greetings.go"], - visibility = ["//src/..."], - labels = ["my_label"], # Add a label to the library rule -) - -go_test( - name = "greetings_test", - srcs = ["greetings_test.go"], - deps = [ - ":greetings", - "//third_party/go:assert", - ], - external = True, -) -``` - -```bash -$ plz query alltargets --include=my_label -//src/greetings:greetings - -$ plz build --include=my_label -Build finished; total time 300ms, incrementality 100.0%. Outputs: -//src/greetings:greetings: - plz-out/gen/src/greetings/greetings.a -``` - -This can be especially useful for separating out slow running tests: - -```bash -plz test --exclude e2e -``` - -## What's next? - -Duration: 1 - -Hopefully this has given you a taster for what is possible with `plz query`, -however there's so much more. See the [cli](/commands.html#query) for an idea of -what's possible! +summary: Tips and tricks - plz query +description: Tips and tricks to help you become productive with Please - using plz query to query the build graph +id: plz_query +categories: intermediate +tags: medium +status: Published +authors: Jon Poole +Feedback Link: https://github.com/thought-machine/please + +# Tips and tricks - plz query + +## Overview + +Duration: 2 + +### Prerequisites + +- You must have Please installed: [Install please](https://please.build/quickstart.html) +- You should have a basic understanding of using Please to build and test code + +### What you'll learn + +This codelab isn't exhaustive however it should give you an idea of the sort of +things the Please CLI is capable of: + +- Finding the dependencies of a target +- Including and excluding targets +- Printing information about targets as well as internal targets + +## Setting up + +Duration: 2 + +For this codelab we will be using the Please codelabs repo: + +```bash +git clone https://github.com/thought-machine/please-codelabs +``` + +The output should look something like this: + +```bash +Cloning into 'please-codelabs'... +remote: Enumerating objects: 228, done. +remote: Total 228 (delta 0), reused 0 (delta 0), pack-reused 228 +Receiving objects: 100% (228/228), 38.23 KiB | 543.00 KiB/s, done. +Resolving deltas: 100% (79/79), done. +``` + +We'll be using the getting started with go codelab for these examples: + +```bash +cd please-codelabs/getting_started_go +``` + +## Finding dependencies of a target + +Duration: 4 + +Please has a strict build graph representing each build target and their +dependencies on each other. Among many things, this graph can be interrogated +to determine the dependencies of a target: + +```bash +$ plz query deps //src/greetings:greetings_test +//src/greetings:greetings_test + //src/greetings:greetings + ///go//tools:please_go + //plugins:go + //_please:arcat + //third_party/go:toolchain + //third_party/go:testify + ///third_party/go/github.com_stretchr_testify//:installs + ///third_party/go/github.com_stretchr_testify//assert:assert + ///third_party/go/github.com_davecgh_go-spew//spew:spew + //third_party/go:github.com_davecgh_go-spew + ///third_party/go/github.com_pmezard_go-difflib//difflib:difflib + //third_party/go:github.com_pmezard_go-difflib + ///third_party/go/gopkg.in_yaml.v3//:yaml.v3 + //third_party/go:gopkg.in_yaml.v3 + ///third_party/go/github.com_stretchr_testify//require:require +``` + +This can be especially useful when trying to improve build performance. +Unnecessary dependencies between targets can cause certain rules to be rebuilt +when they don't need to be. + +### Subrepo rules + +Woah, what are these `///third_party/go/foo//:bar` targets? Targets that begin +with a `///` are subrepo targets. In this case, the third-party dependency +*testify* has been defined using a `go_repo()` rule, which downloads the go +module into plz-out, generates Please BUILD files for each of its packages, and +then builds it like any other Please project. So +`///third_party/go/github.com_stretchr_testify//assert:assert` is saying "look +in the subrepo called *third_party/go/github.com_stretchr_testify*, and retrieve +for me the build target `//assert:assert`. + +You can `plz query print` these targets just like you would any other target: + +```bash +plz query print ///third_party/go/github.com_stretchr_testify//assert:assert +``` + +This will show you the underlying build rule for that target. Or, if you prefer, +you could have a look in the plz-out directory at the generated build rule: + +```bash +$ cat plz-out/subrepos/third_party/go/github.com_stretchr_testify/assert/BUILD +subinclude("///go//build_defs:go") + +go_library( + name = "assert", + srcs = [ + "assertion_compare.go", + "assertion_compare_can_convert.go", + "assertion_format.go", + "assertion_forward.go", + "assertion_order.go", + "assertions.go", + "doc.go", + "errors.go", + "forward_assertions.go", + "http_assertions.go", + ], + visibility = ["PUBLIC"], + deps = [ + "///third_party/go/github.com_davecgh_go-spew//spew", + "///third_party/go/github.com_pmezard_go-difflib//difflib", + "///third_party/go/gopkg.in_yaml.v3//:yaml.v3", + ], +) +``` + +### Internal rules + +If you pass the `--hidden` flag to a `plz query` command, you'll come across +*internal* targets as well. These can be identified by the leading `_` in their +name. E.g. + +```bash +$ plz query deps //src/greetings:greetings --hidden +//src/greetings:greetings + //src/greetings:_greetings#import_config + //src/greetings:_greetings#pkg_info + ///go//tools:please_go + //plugins:go + //_please:arcat + //plugins:_go#download + //src/greetings:_greetings#srcs + //third_party/go:toolchain + //third_party/go:_toolchain#download +``` + +As always, we can inspect these with `plz query print`, e.g. + +```bash +$ plz query print //src/greetings:_greetings#srcs +# //src/greetings:_greetings#srcs: +filegroup( + name = '_greetings#srcs', + srcs = ['greetings.go'], + labels = [ + 'link:plz-out/go/src/${PKG}', + 'go_src', + 'go', + ], + visibility = ['//src/...'], + build_timeout = 600, + requires = ['go'], +) +``` + +This particular internal rule is a filegroup that was generated by +`go_library()` and is used to expose the Go source files that make up that +library. You shouldn't depend on these types of rules directly as they may +change between minor releases of Please. + +## Reverse dependencies + +Duration: 2 + +If you're changing a build rule that you know has a wide-reaching effect, it +might be good to run all the tests that will be affected by that change. Let's +find the reverse dependencies of our subrepo rules: + +```bash +$ plz query revdeps ///third_party/go/github.com_stretchr_testify//require:require +///third_party/go/github.com_stretchr_testify//:installs +``` + +Well that doesn't look quite right... We should see +`//src/greetings:greetings_test` too. + +Turns out finding reverse dependencies is quite a slow operation. Please limits +this to just one level so you don't accidentally lock up your terminal trying to +walk the whole build graph. You can set the level with `--level=2` or if you +want to get all reverse dependencies, you can set it to `-1`: + +```bash +$ plz query revdeps ///third_party/go/github.com_stretchr_testify//require:require --level -1 +//src/greetings:greetings_test +//third_party/go:testify +///third_party/go/github.com_stretchr_testify//:installs +``` + +Be careful, this can be slow on larger build graphs. You can use +`--include=//src/foo/...` to limit the search to a slice of your repository. +More on this later in this codelab! + +## Composing plz commands + +Duration: 2 + +So we've managed to determine which targets will be affected by our change. How +do we run these tests? Please can be instructed to listen for targets on +standard input: + +```bash +$ plz query revdeps ///third_party/go/github.com_stretchr_testify//require:require --level -1 | plz test - +//src/greetings:greetings_test 1 test run in 8ms; 1 passed +1 test target and 1 test run; 1 passed. +Total time: 6.62s real, 10ms compute. +``` + +The `-` at the end of `plz test -` indicates to Please that we will be +supplying the targets to build over standard input. + +## Including and excluding targets + +Duration: 2 + +Almost all Please commands can take in the `--include` and `--exclude` +arguments. These can be used to specifically exclude targets: + +```bash +$ plz query revdeps --exclude //src/greetings:greetings_test --level=-1 ///third_party/go/github.com_stretchr_testify//require:require | plz test - +0 test targets and 0 tests run; 0 passed. +Total time: 40ms real, 0s compute. +``` + +As you can see, we excluded the test from earlier so `plz test` didn't run it. +We can also exclude this on the test command: + +```bash +$ plz query revdeps --level=-1 ///third_party/go/github.com_stretchr_testify//require:require | plz test --exclude //src/greetings:greetings_test - +0 test targets and 0 tests run; 0 passed. +Total time: 40ms real, 0s compute. +``` + +### Including based on label + +Targets can be labeled in Please. Most of the built-in rules apply some basic +labels, e.g. the Go rules apply the `go` label to their targets. These can be +very useful to run all tests for a given language: + +```bash +plz build --include go --exclude //third_party/go/... +``` + +This will build all Go targets but will only build targets under +`//third_party/go/...` if they're a dependency of a target that needs to be built. + +You may also add custom labels to your targets. Update `srcs/greetings/BUILD` as such: + +### `src/greetings/BUILD` +```python +go_library( + name = "greetings", + srcs = ["greetings.go"], + visibility = ["//src/..."], + labels = ["my_label"], # Add a label to the library rule +) + +go_test( + name = "greetings_test", + srcs = ["greetings_test.go"], + deps = [ + ":greetings", + "//third_party/go:assert", + ], + external = True, +) +``` + +```bash +$ plz query alltargets --include=my_label +//src/greetings:greetings + +$ plz build --include=my_label +Build finished; total time 300ms, incrementality 100.0%. Outputs: +//src/greetings:greetings: + plz-out/gen/src/greetings/greetings.a +``` + +This can be especially useful for separating out slow running tests: + +```bash +plz test --exclude my_label +``` + +This will run all tests except those labeled with `my_label`. + +## What's next? + +Duration: 1 + +Hopefully this has given you a taster for what is possible with `plz query`, +however there's so much more. See the [cli](/commands.html#query) for an idea of +what's possible! diff --git a/docs/codelabs/python_intro.md b/docs/codelabs/python_intro.md index 11c40c31b..2b06cd929 100644 --- a/docs/codelabs/python_intro.md +++ b/docs/codelabs/python_intro.md @@ -1,352 +1,355 @@ -summary: Getting started with Python -description: Building and testing with Python and Please, as well as managing third-party dependencies via pip -id: python_intro -categories: beginner -tags: medium -status: Published -authors: Jon Poole -Feedback Link: https://github.com/thought-machine/please - -# Getting started with Python -## Overview -Duration: 4 - -### Prerequisites -- You must have Please installed: [Install please](https://please.build/quickstart.html) -- Python must be installed: [Install Python](https://www.python.org/downloads/) - -### What you'll learn -- Configuring Please for Python using the Python plugin -- Creating an executable Python binary -- Authoring Python modules in your project -- Testing your code -- Including third-party libraries - -### What if I get stuck? - -The final result of running through this codelab can be found -[here](https://github.com/thought-machine/please-codelabs/tree/main/getting_started_python) for reference. If you really -get stuck you can find us on [gitter](https://gitter.im/please-build/Lobby)! - -## Initialising your project -Duration: 2 - -Let's create a new project: -``` -$ mkdir getting_started_python && cd getting_started_python -$ plz init --no_prompt -$ plz init plugin python -``` - -### A note about your Please PATH -Please doesn't use your host system's `PATH` variable. By default, Please uses `/usr/local/bin:/usr/bin:/bin`. If Python -isn't in this path, you will need to add the following to `.plzconfig`: -``` -[build] -path = $YOUR_PYTHON_INSTALL_HERE:/usr/local/bin:/usr/bin:/bin -``` - -### So what just happened? -You will see this has created a number of files in your working folder: -``` -$ tree -a -. -├── pleasew -├── plugins -│   └── BUILD -├── .plzconfig -└── plz-out - └── log - └── build.log - -``` - -The `pleasew` script is a wrapper script that will automatically install Please if it's not already! This -means Please projects are portable and can always be built via -`git clone https://... example_module && cd example_module && ./pleasew build`. - -The `plugins/BUILD` is a file generated by `plz init plugin python` which defines a build target for the python plugin. - -The file `.plzconfig` contains the project configuration for Please. - -### `.plzconfig` -``` -[parse] -preloadsubincludes = ///python//build_defs:python - -[Plugin "python"] -Target = //plugins:python -``` - -Read the [config](/config.html) documentation and [python plugin config](/plugins.html#python.config) for more information on configuration. - -Finally, the `plz-out` directory contains artifacts built by plz. - -## Hello, world! -Duration: 3 - -Now we have a Please project, it's time to start adding some code to it! Let's create a "hello world" program: - -### `src/main.py` -```python -print('Hello, world!') -``` - -We now need to tell Please about our Python code. Please projects define metadata about the targets that are available to be -built in `BUILD` files. Let's create a `BUILD` file to build this program: - -### `src/BUILD` -```python -python_binary( - name = "main", - main = "main.py", -) -``` - -That's it! You can now run this with: -``` -$ plz run //src:main -Hello, world! -``` - -There's a lot going on here; first off, `python_binary()` is one of the [python plugin functions](/plugins.html#python). -This build function creates a "build target" in the `src` package. A package, in the Please sense, is any directory that -contains a `BUILD` file. - -Each build target can be identified by a build label in the format `//path/to/package:label`, i.e. `//src:main`. -There are a number of things you can do with a build target such e.g. `plz build //src:main`, however, as you've seen, -if the target is a binary, you may run it with `plz run`. - -## Adding modules -Duration: 4 - -Let's add a `src/greetings` package to our Python project: - -### `src/greetings/greetings.py` -```python -import random - -def greeting(): - return random.choice(["Hello", "Bonjour", "Marhabaan"]) -``` - -We then need to tell Please how to compile this library: - -### `src/greetings/BUILD` -```python -python_library( - name = "greetings", - srcs = ["greetings.py"], - visibility = ["//src/..."], -) -``` -NB: Unlike many popular build systems, Please doesn't just have one metadata file in the root of the project. Please will -typically have one `BUILD` file per [Python package](https://docs.python.org/3/tutorial/modules.html#packages). - -We can then build it like so: - -``` -$ plz build //src/greetings -Build finished; total time 290ms, incrementality 50.0%. Outputs: -//src/greetings:greetings: - plz-out/gen/src/greetings/greetings.py -``` - -Here we can see that the output of a `python_library` rule is a `.py` file which is stored in -`plz-out/gen/src/greetings/greetings.py`. - -We have also provided a `visibility` list to this rule. This is used to control where this `python_library()` rule can be -used within our project. In this case, any rule under `src`, denoted by the `...` syntax. - -NB: This syntax can also be used on the command line, e.g. `plz build //src/...`. - -### A note about `python_binary()` -If you're used to Python, one thing that might trip you up is how we package Python. The `python_binary()` rule outputs -something called a `pex`. This is very similar to the concept of a `.jar` file from the java world. All the Python files -relating to that build target are zipped up into a self-executable `.pex` file. This makes deploying and distributing -Python simple as there's only one file to distribute. - -Check it out: -``` -$ plz build //src:main -Build finished; total time 50ms, incrementality 100.0%. Outputs: -//src:main: - plz-out/bin/src/main.pex - -$ plz-out/bin/src/main.pex -Bonjour, world! -``` - -## Using our new module -Duration: 2 - -To maintain a principled model for incremental and hermetic builds, Please requires that rules are explicit about their -inputs and outputs. To use this new package in our "hello world" program, we have to add it as a dependency: - -### `src/BUILD` -```python -python_binary( - name = "main", - main = "main.py", - # NB: if the package and rule name are the same, you may omit the name i.e. this could be just //src/greetings - deps = ["//src/greetings:greetings"], -) -``` - -You can see we use a build label to refer to another rule here. Please will make sure that this rule is built before -making its outputs available to our rule here. - -Then update src/main.py: -### `src/main.py` -```python -from src.greetings import greetings - -print(greetings.greeting() + ", world!") -``` - -Give it a whirl: - -``` -$ plz run //src:main -Bonjour, world! -``` - -## Testing our code -Duration: 5 - -Let's create a very simple test for our library: -### `src/greetings/greetings_test.py` -```python -import unittest -from src.greetings import greetings - -class GreetingTest(unittest.TestCase): - - def test_greeting(self): - self.assertTrue(greetings.greeting()) - -``` - -We then need to tell Please about our tests: -### `src/greetings/BUILD` -```python -python_library( - name = "greetings", - srcs = ["greetings.py"], - visibility = ["//src/..."], -) - -python_test( - name = "greetings_test", - srcs = ["greetings_test.py"], - # Here we have used the shorthand `:greetings` label format. This format can be used to refer to a rule in the same - # package and is shorthand for `//src/greetings:greetings`. - deps = [":greetings"], -) -``` - -We've used `python_test()` to define our test target. This is a special build rule that is considered a test. These -rules can be executed as such: -``` -$ plz test //src/... -//src/greetings:greetings_test 1 test run in 3ms; 1 passed -1 test target and 1 test run in 3ms; 1 passed. Total time 90ms. -``` - -Please will run all the tests it finds under `//src/...`, and aggregate the results up. This works even across -languages allowing you to test your whole project with a single command. - -## Third-party dependencies -Duration: 7 - -### Using `pip_library()` - -Eventually, most projects need to depend on third-party code. Let's include NumPy into our package. Conventionally, -third-party dependencies live under `//third_party/...` (although they don't have to), so let's create that package: - -### `third_party/python/BUILD` -```python -package(default_visibility = ["PUBLIC"]) - -pip_library( - name = "numpy", - version = "1.23.4", - zip_safe = False, # This is because NumPy has shared object files which can't be linked to them when zipped up -) -``` - -This will download NumPy for us to use in our project. We use the `package()` built-in function to set the default -visibility for this package. This can be very useful for third-party rules to avoid having to specify -`visibility = ["PUBLIC"]` on every `pip_library()` invocation. - -NB: The visibility "PUBLIC" is a special case. Typically, items in the visibility list are labels. "PUBLIC" is equivalent -to `//...`. - -### Setting up our module path -Importing Python modules is based on the import path. That means by default, we'd import NumPy as -`import third_party.python.numpy`. To fix this, we need to tell Please where our third-party module is. Add the -following to your `.plzconfig`: - -### `.plzconfig` -``` -[plugin "python"] -ModuleDir = third_party.python -``` - -NB: if you encounter an error eg. `no such option: --system` you are likely using an operating system where you need to disable vendor flags. This can be done by adding this config. - -### `.plzconfig` -``` -[plugin "python"] -DisableVendorFlags = true -``` - -### Updating our tests - -We can now use this library in our code: - -### `src/greetings/greetings.py` -```go -from numpy import random - -def greeting(): - return random.choice(["Hello", "Bonjour", "Marhabaan"]) - -``` - -And add NumPy as a dependency: -### `src/greetings/BUILD` -```python -python_library( - name = "greetings", - srcs = ["greetings.py"], - visibility = ["//src/..."], - deps = ["//third_party/python:numpy"], -) - -python_test( - name = "greetings_test", - srcs = ["greetings_test.py"], - deps = [":greetings"], -) -``` - -``` -$ plz run //src:main -Marhabaan, world! -``` - -## What next? -Duration: 1 - -Hopefully you now have an idea as to how to build Python with Please. Please is capable of so much more though! - -- [Please basics](/basics.html) - A more general introduction to Please. It covers a lot of what we have in this -tutorial in more detail. -- [Plugin rules](/plugin.html#python) - See the rest of the Python rules in the python plugin. -- [Config](/config.html#python) - See the available config options for Please, especially those relating to Python. -- [Command line interface](/commands.html) - Please has a powerful command line interface. Interrogate the build graph, -determine files changes since master, watch rules and build them automatically as things change and much more! Use -`plz help`, and explore this rich set of commands! - -Otherwise, why not try one of the other codelabs! +summary: Getting started with Python +description: Building and testing with Python and Please, as well as managing third-party dependencies via pip +id: python_intro +categories: beginner +tags: medium +status: Published +authors: Jon Poole +Feedback Link: https://github.com/thought-machine/please + +# Getting started with Python +## Overview +Duration: 4 + +### Prerequisites +- You must have Please installed: [Install please](https://please.build/quickstart.html) +- Python must be installed: [Install Python](https://www.python.org/downloads/) + +### What you'll learn +- Configuring Please for Python using the Python plugin +- Creating an executable Python binary +- Authoring Python modules in your project +- Testing your code +- Including third-party libraries + +### What if I get stuck? + +The final result of running through this codelab can be found +[here](https://github.com/thought-machine/please-codelabs/tree/main/getting_started_python) for reference. If you really +get stuck you can find us on [gitter](https://gitter.im/please-build/Lobby)! + +## Initialising your project +Duration: 2 + +Let's create a new project: +```bash +mkdir getting_started_python && cd getting_started_python +plz init --no_prompt +plz init plugin python +``` + +### A note about your Please PATH +Please doesn't use your host system's `PATH` variable. By default, Please uses `/usr/local/bin:/usr/bin:/bin`. If Python +isn't in this path, you will need to add the following to `.plzconfig`: +``` +[build] +path = $YOUR_PYTHON_INSTALL_HERE:/usr/local/bin:/usr/bin:/bin +``` + +### So what just happened? +You will see this has created a number of files in your working folder: +``` +$ tree -a +. +├── pleasew +├── plugins +│   └── BUILD +├── .plzconfig +└── plz-out + └── log + └── build.log + +``` + +The `pleasew` script is a wrapper script that will automatically install Please if it's not already! This +means Please projects are portable and can always be built via +`git clone https://... example_module && cd example_module && ./pleasew build`. + +The `plugins/BUILD` is a file generated by `plz init plugin python` which defines a build target for the python plugin. + +The file `.plzconfig` contains the project configuration for Please. + +### `.plzconfig` +``` +[parse] +preloadsubincludes = ///python//build_defs:python + +[Plugin "python"] +Target = //plugins:python +``` + +Read the [config](/config.html) documentation and [python plugin config](/plugins.html#python.config) for more information on configuration. + +Finally, the `plz-out` directory contains artifacts built by plz. + +## Hello, world! +Duration: 3 + +Now we have a Please project, it's time to start adding some code to it! Let's create a "hello world" program: + +### `src/main.py` +```python +print('Hello, world!') +``` + +We now need to tell Please about our Python code. Please projects define metadata about the targets that are available to be +built in `BUILD` files. Let's create a `BUILD` file to build this program: + +### `src/BUILD` +```python +python_binary( + name = "main", + main = "main.py", +) +``` + +That's it! You can now run this with: +``` +$ plz run //src:main +Hello, world! +``` + +There's a lot going on here; first off, `python_binary()` is one of the [python plugin functions](/plugins.html#python). +This build function creates a "build target" in the `src` package. A package, in the Please sense, is any directory that +contains a `BUILD` file. + +Each build target can be identified by a build label in the format `//path/to/package:label`, i.e. `//src:main`. +There are a number of things you can do with a build target such e.g. `plz build //src:main`, however, as you've seen, +if the target is a binary, you may run it with `plz run`. + +## Adding modules +Duration: 4 + +Let's add a `src/greetings` package to our Python project: + +### `src/greetings/greetings.py` +```python +import random + +def greeting(): + return random.choice(["Hello", "Bonjour", "Marhabaan"]) +``` + +We then need to tell Please how to compile this library: + +### `src/greetings/BUILD` +```python +python_library( + name = "greetings", + srcs = ["greetings.py"], + visibility = ["//src/..."], +) +``` +NB: Unlike many popular build systems, Please doesn't just have one metadata file in the root of the project. Please will +typically have one `BUILD` file per [Python package](https://docs.python.org/3/tutorial/modules.html#packages). + +We can then build it like so: + +``` +$ plz build //src/greetings +Build finished; total time 290ms, incrementality 50.0%. Outputs: +//src/greetings:greetings: + plz-out/gen/src/greetings/greetings.py +``` + +Here we can see that the output of a `python_library` rule is a `.py` file which is stored in +`plz-out/gen/src/greetings/greetings.py`. + +We have also provided a `visibility` list to this rule. This is used to control where this `python_library()` rule can be +used within our project. In this case, any rule under `src`, denoted by the `...` syntax. + +NB: This syntax can also be used on the command line, e.g. `plz build //src/...`. + +### A note about `python_binary()` +If you're used to Python, one thing that might trip you up is how we package Python. The `python_binary()` rule outputs +something called a `pex`. This is very similar to the concept of a `.jar` file from the java world. All the Python files +relating to that build target are zipped up into a self-executable `.pex` file. This makes deploying and distributing +Python simple as there's only one file to distribute. + +Check it out: +``` +$ plz build //src:main +Build finished; total time 50ms, incrementality 100.0%. Outputs: +//src:main: + plz-out/bin/src/main.pex + +$ plz-out/bin/src/main.pex +Bonjour, world! +``` + +## Using our new module +Duration: 2 + +To maintain a principled model for incremental and hermetic builds, Please requires that rules are explicit about their +inputs and outputs. To use this new package in our "hello world" program, we have to add it as a dependency: + +### `src/BUILD` +```python +python_binary( + name = "main", + main = "main.py", + # NB: if the package and rule name are the same, you may omit the name i.e. this could be just //src/greetings + deps = ["//src/greetings:greetings"], +) +``` + +You can see we use a build label to refer to another rule here. Please will make sure that this rule is built before +making its outputs available to our rule here. + +Then update src/main.py: +### `src/main.py` +```python +from src.greetings import greetings + +print(greetings.greeting() + ", world!") +``` + +Give it a whirl: + +``` +$ plz run //src:main +Bonjour, world! +``` + +## Testing our code +Duration: 5 + +Let's create a very simple test for our library: +### `src/greetings/greetings_test.py` +```python +import unittest +from src.greetings import greetings + +class GreetingTest(unittest.TestCase): + + def test_greeting(self): + self.assertTrue(greetings.greeting()) + +``` + +We then need to tell Please about our tests: +### `src/greetings/BUILD` +```python +python_library( + name = "greetings", + srcs = ["greetings.py"], + visibility = ["//src/..."], +) + +python_test( + name = "greetings_test", + srcs = ["greetings_test.py"], + # Here we have used the shorthand `:greetings` label format. This format can be used to refer to a rule in the same + # package and is shorthand for `//src/greetings:greetings`. + deps = [":greetings"], +) +``` + +We've used `python_test()` to define our test target. This is a special build rule that is considered a test. These +rules can be executed as such: +``` +$ plz test //src/... +//src/greetings:greetings_test 1 test run in 3ms; 1 passed +1 test target and 1 test run in 3ms; 1 passed. Total time 90ms. +``` + +Please will run all the tests it finds under `//src/...`, and aggregate the results up. This works even across +languages allowing you to test your whole project with a single command. + +## Third-party dependencies +Duration: 7 + +### Using `pip_library()` + +Eventually, most projects need to depend on third-party code. Let's include NumPy into our package. Conventionally, +third-party dependencies live under `//third_party/...` (although they don't have to), so let's create that package: + +### `third_party/python/BUILD` +```python +package(default_visibility = ["PUBLIC"]) + +pip_library( + name = "numpy", + version = "1.23.4", + zip_safe = False, # This is because NumPy has shared object files which can't be linked to them when zipped up +) +``` + +This will download NumPy for us to use in our project. We use the `package()` built-in function to set the default +visibility for this package. This can be very useful for third-party rules to avoid having to specify +`visibility = ["PUBLIC"]` on every `pip_library()` invocation. + +NB: The visibility "PUBLIC" is a special case. Typically, items in the visibility list are labels. "PUBLIC" is equivalent +to `//...`. + +### Setting up our module path +Importing Python modules is based on the import path. That means by default, we'd import NumPy as +`import third_party.python.numpy`. To fix this, we need to tell Please where our third-party module is. Add the +following to your `.plzconfig`: + +### `.plzconfig` +``` +[plugin "python"] +ModuleDir = third_party.python +``` + +NB: if you encounter an error eg. `no such option: --system` you are likely using an operating system where you need to disable vendor flags. This can be done by adding this config. + +### `.plzconfig` +``` +[plugin "python"] +DisableVendorFlags = true +``` + +### Updating our tests + +We can now use this library in our code: + +### `src/greetings/greetings.py` +```python +from numpy import random + +def greeting(): + return random.choice(["Hello", "Bonjour", "Marhabaan"]) +``` + +And add NumPy as a dependency: +### `src/greetings/BUILD` +```python +python_library( + name = "greetings", + srcs = ["greetings.py"], + visibility = ["//src/..."], + deps = ["//third_party/python:numpy"], +) + +python_test( + name = "greetings_test", + srcs = ["greetings_test.py"], + deps = [":greetings"], +) +``` + +``` +$ plz run //src:main +Marhabaan, world! +``` + +## What next? +Duration: 1 + +Hopefully you now have an idea as to how to build Python with Please. Please is capable of so much more though! + +- [Please basics](/basics.html) - A more general introduction to Please. It covers a lot of what we have in this +tutorial in more detail. +- [Plugin rules](/plugin.html#python) - See the rest of the Python rules in the python plugin. +- [Config](/config.html#python) - See the available config options for Please, especially those relating to Python. +- [Command line interface](/commands.html) - Please has a powerful command line interface. Interrogate the build graph, +determine files changes since master, watch rules and build them automatically as things change and much more! Use +`plz help`, and explore this rich set of commands! + +Otherwise, why not try one of the other codelabs! +, watch rules and build them automatically as things change and much more! Use +`plz help`, and explore this rich set of commands! + +Otherwise, why not try one of the other codelabs! diff --git a/docs/codelabs/using_plugins.md b/docs/codelabs/using_plugins.md index 1ecf44c47..94df5360d 100644 --- a/docs/codelabs/using_plugins.md +++ b/docs/codelabs/using_plugins.md @@ -1,152 +1,159 @@ -summary: Using plugins -description: How to use Please's language plugins -id: using_plugins -categories: beginner -tags: medium -status: Published -authors: Sam Westmoreland -Feedback Link: https://github.com/thought-machine/please - -# Using Plugins - -## Overview - -Duration: 1 - -### Prerequisites - -- You must have Please installed: [Install please](https://please.build/quickstart.html) -- You should have a basic understanding of how to use Please to build and test code - -### What you'll learn - -Language plugins were introduced with the release of Please v17. Each plugin -contains build definitions specific to a particular language. In this codelab -we'll cover - -- Where to find plugins -- How to can install them in your project -- How to configure them to work for your repo - -## Initialising your Please repo - -Duration: 1 - -For this codelab we'll start with a clean repo. The first thing you'll need to -do is initialise it as a Please repo. We can do this with `plz init`. Now if we -check the directory we should have a config file, as well as the Please wrapper -script `pleasew`: - -```bash -$ plz init -$ tree -a -. -├── pleasew -└── .plzconfig - -1 directory, 2 files -``` - -## Where to find plugins - -Duration: 1 - -For a comprehensive list of available plugins, visit -[https://github.com/please-build/please-rules](https://github.com/please-build/please-rules). -There you'll find plugins for language build rules, plugins for various -technologies, plugins for generating protos, and tools to help you maintain -your Please project. - -### Can't find a plugin for your language? - -The plugin ecosystem was designed with extensibility in mind, so if there is a -language that you'd like to build with Please and no plugin, consider writing -one! The existing plugins should serve as helpful templates, and if you get -stuck, feel free to reach out to the Please team on Github or the Please -community on [Gitter](https://gitter.im/please-build/Lobby). There will also be -a codelab coming soon that will cover the basics of writing a new plugin. - -## How to install a plugin - -Duration: 4 - -The easy way to install a plugin in your project is to use `plz init`. We'll -use the Go plugin in this example: - -```bash -$ plz init plugin go - -$ tree -a -. -├── pleasew -├── plugins -│ └── BUILD -├── .plzconfig -└── plz-out - └── log - └── build.log - -4 directories, 4 files -``` - -### `.plzconfig` - -```ini -[parse] -preloadsubincludes = ///go//build_defs:go - -[Plugin "go"] -Target = //plugins:go -``` - -In the plzconfig, we can see that two things have been added for us. The first -is a preloaded subinclude. This will ensure that whichever package we're in in -our project, the rules defined in the plugin we just installed will be available. -This is completely optional. If the intention is to only use the rules in a few -places, it might make more sense to have an explicit subinclude in those -packages to avoid the plugin being a dependency of the entire repo. - -The second thing is a section for configuration of our new plugin. `Target` is -a required field in this section. This tells Please where to look for the build -target that defines the plugin. There may be other required fields depending on -the particular plugin we've installed. More information about the various config -options is available from the plugin repository itself (e.g. [https://github.com/ -please-build/go-rules](https://github.com/please-build/go-rules)), or via `plz -help [language]`. - -### `plugins/BUILD` - -```python -plugin_repo( - name = "go", - revision = "v1.17.2", - plugin = "go-rules", - owner = "please-build", -) -``` - -A new file has also been created for us called `plugins/BUILD`. This file should -contain a `plugin_repo()` target which will download our desired plugin for us. -The plugin is actually defined as a *subrepo* under the hood, which is why when -we want to depend on the build definitions in the plugin, we reference them with -a `///` (like in the preloaded subinclude in the `.plzconfig` file). The `//` is -then used to reference build targets within that subrepo, `//build_defs:go` for -example. - -Note the revision field will be set to the most recent available version of the -plugin, but can be set to any version tag or commit hash that you require. - -The use of `plz init plugin` is entirely optional. You might prefer -to manually add the `plugin_repo()` target somewhere else if putting it in -`plugins/` doesn't fit your needs. The only requirements for using a plugin are -that there is a `plugin_repo()` target *somewhere*, and that it is referenced -in the `Target` field of the plugin's config section in the `.plzconfig` file. - -## What's next? - -Duration: 0 - -You should now be set up with a language plugin. You now have access to all of -the build definitions provided by your chosen plugin. - -Go ahead and install any other plugins that you Please, and get building! +summary: Using plugins +description: How to use Please's language plugins +id: using_plugins +categories: beginner +tags: medium +status: Published +authors: Sam Westmoreland +Feedback Link: https://github.com/thought-machine/please + +# Using Plugins + +## Overview + +Duration: 1 + +### Prerequisites + +- You must have Please installed: [Install please](https://please.build/quickstart.html) +- You should have a basic understanding of how to use Please to build and test code + +### What you'll learn + +Language plugins were introduced with the release of Please v17. Each plugin +contains build definitions specific to a particular language. In this codelab +we'll cover + +- Where to find plugins +- How to can install them in your project +- How to configure them to work for your repo + +## Initialising your Please repo + +Duration: 1 + +For this codelab we'll start with a clean repo. The first thing you'll need to +do is initialise it as a Please repo. We can do this with `plz init`. Now if we +check the directory we should have a config file, as well as the Please wrapper +script `pleasew`: + +```bash +plz init +tree -a +``` + +The output should look like this: +```bash +. +├── pleasew +└── .plzconfig + +1 directory, 2 files +``` + +## Where to find plugins + +Duration: 1 + +For a comprehensive list of available plugins, visit +[https://github.com/please-build/please-rules](https://github.com/please-build/please-rules). +There you'll find plugins for language build rules, plugins for various +technologies, plugins for generating protos, and tools to help you maintain +your Please project. + +### Can't find a plugin for your language? + +The plugin ecosystem was designed with extensibility in mind, so if there is a +language that you'd like to build with Please and no plugin, consider writing +one! The existing plugins should serve as helpful templates, and if you get +stuck, feel free to reach out to the Please team on Github or the Please +community on [Gitter](https://gitter.im/please-build/Lobby). There will also be +a codelab coming soon that will cover the basics of writing a new plugin. + +## How to install a plugin + +Duration: 4 + +The easy way to install a plugin in your project is to use `plz init`. We'll +use the Go plugin in this example: + +```bash +plz init plugin go +tree -a +``` + +The output should look like this: +```bash +. +├── pleasew +├── plugins +│ └── BUILD +├── .plzconfig +└── plz-out + └── log + └── build.log + +4 directories, 4 files +``` + +### `.plzconfig` + +```ini +[parse] +preloadsubincludes = ///go//build_defs:go + +[Plugin "go"] +Target = //plugins:go +``` + +In the plzconfig, we can see that two things have been added for us. The first +is a preloaded subinclude. This will ensure that whichever package we're in in +our project, the rules defined in the plugin we just installed will be available. +This is completely optional. If the intention is to only use the rules in a few +places, it might make more sense to have an explicit subinclude in those +packages to avoid the plugin being a dependency of the entire repo. + +The second thing is a section for configuration of our new plugin. `Target` is +a required field in this section. This tells Please where to look for the build +target that defines the plugin. There may be other required fields depending on +the particular plugin we've installed. More information about the various config +options is available from the plugin repository itself (e.g. [https://github.com/ +please-build/go-rules](https://github.com/please-build/go-rules)), or via `plz +help [language]`. + +### `plugins/BUILD` + +```python +plugin_repo( + name = "go", + revision = "v1.17.2", + plugin = "go-rules", + owner = "please-build", +) +``` + +A new file has also been created for us called `plugins/BUILD`. This file should +contain a `plugin_repo()` target which will download our desired plugin for us. +The plugin is actually defined as a *subrepo* under the hood, which is why when +we want to depend on the build definitions in the plugin, we reference them with +a `///` (like in the preloaded subinclude in the `.plzconfig` file). The `//` is +then used to reference build targets within that subrepo, `//build_defs:go` for +example. + +Note the revision field will be set to the most recent available version of the +plugin, but can be set to any version tag or commit hash that you require. + +The use of `plz init plugin` is entirely optional. You might prefer +to manually add the `plugin_repo()` target somewhere else if putting it in +`plugins/` doesn't fit your needs. The only requirements for using a plugin are +that there is a `plugin_repo()` target *somewhere*, and that it is referenced +in the `Target` field of the plugin's config section in the `.plzconfig` file. + +## What's next? + +Duration: 0 + +You should now be set up with a language plugin. You now have access to all of +the build definitions provided by your chosen plugin. + +Go ahead and install any other plugins that you Please, and get building! diff --git a/docs/commands.html b/docs/commands.html index b6b2fcfa7..92f46dd0a 100644 --- a/docs/commands.html +++ b/docs/commands.html @@ -1,1130 +1,1130 @@ -

    Please commands

    - -

    - Please has a rich command line interface that can be used to build and test - you code; interrogate the build graph; and much more! -

    - -
    -

    Tab completion

    - -

    - To get the most our of the Please command line interface, it is highly - recommended that you enable tab-completion. Please has a sophisticated - mechanism that is aware of your build graph, all the commands and flags, and - any aliases you may have - configured. To enable Please completions, add this line to your - .bashrc or .zshrc: -

    - -
    -    
    -    
    -    source <(plz --completion_script)
    -    
    -  
    -
    - -
    -

    Common flags

    - -

    These flags are common to all (or nearly all) operations.

    - -
    -

    - Options controlling what to build & how to build it: -

    - -
      -
    • -
      -

      - -c, --config -

      - -

      - The build config to use. The effect this has depends on the - language; typically it allows swapping between a debug or an - optimised build.
      - The default is - opt to build optimised code; - dbg is accepted for C++ and Go to build - code with debugging symbols.
      - This has no effect on Python or Java rules. -

      -
      -
    • -
    • -
      -

      - -r, --repo_root -

      - -

      - Sets the location of the repo root to use. Normally plz assumes it - is within the repo somewhere and locates the root itself, this - forces it to a specific location. -

      -
      -
    • -
    • -
      -

      - -n, --num_threads -

      - -

      - Sets the number of parallel workers to use while building. The - default is the number of logical CPUs of the current machine plus - two. -

      -
      -
    • -
    • -
      -

      - -i, --include -

      - -

      - Labels of targets to include when selecting multiple targets with - :all or /.... - These apply to labels which can be set on individual targets; a - number of them are predefined, most notably for each language (go, python, java, - cc, etc).
      - Only targets with this label will be built. -

      -
      -
    • -
    • -
      -

      - -e, --exclude -

      - -

      - The inverse of - --include; labels of targets to exclude - when selecting multiple targets with - :all or - /....
      - Takes priority over - --include.
      - You can also pass build expressions to - --exclude - to exclude targets as well as by label. -

      -
      -
    • -
    • -
      -

      - -a, --arch -

      - -

      - Architecture to compile for. By default Please will build for the - host architecture, but has some support for targeting others. See - the cross-compiling docs - for more information. -

      -
      -
    • -
    • -
      -

      - -o, --override -

      - -

      - Allows overriding individual config settings on a temporary basis; - for example - -o python.testrunner:pytest. See the - config reference - for more information on what can be overridden. -

      -
      -
    • -
    • -
      -

      - --profile -

      - -

      - Defines a profile of config file to load from the repo. For example, - --profile ci - will load - .plzconfig.ci. This can be useful to - canonicalise certain settings for non-common or scripted - configurations. -

      -
      -
    • -
    -
    - -
    -

    - Options controlling output & logging: -

    - -
      -
    • -
      -

      - -v, --verbosity -

      - -

      - Sets the amount of output logged from plz; a number between 0 and - 4.
      - Each number shows all messages at the given level and above: -

      - -
        -
      1. 0. Error
      2. -
      3. 1. Warning
      4. -
      5. 2. Notice
      6. -
      7. 3. Info
      8. -
      9. 4. Debug
      10. -
      - -

      - The default is 1, for warnings and errors only. If level 4 is - requested then it will suppress interactive output. -

      -
      -
    • -
    • -
      -

      - --log_file -

      - -

      Writes all logs out into the given file.

      -
      -
    • -
    • -
      -

      - --log_file_level -

      - -

      - Level of logging to write to the file. Defaults to 2 (notice, - warning and error). -

      -
      -
    • -
    • -
      -

      - --interactive_output -

      - -

      - Forces plz to show interactive output on stderr. By default it - autodetects based on whether stderr appears to be an interactive - terminal or not, but this flag can be used to force it on in cases - where it might get it wrong. -

      -
      -
    • -
    • -
      -

      - -p, --plain_output -

      - -

      - Forces plz not to show interactive output on stderr. Can be useful - in cases where it might obscure other messages or where the output - isn't capable of interpreting the escape codes correctly. -

      -
      -
    • -
    • -
      -

      - --colour -

      - -

      - Forces coloured output from logging & shell output. Again, this - is autodetected by default, but this can be used in cases where it - would normally detect false but it will later be consumed by - something that understands the codes (e.g. CI systems like Teamcity - or Jenkins). -

      -
      -
    • -
    • -
      -

      - --nocolour -

      - -

      - Inverse of above, forces colourless output from logging & the - shell. -

      -
      -
    • -
    • -
      -

      - --trace_file -

      - -

      - File to write Chrome tracing output into.
      - This is a JSON format that contains the actions taken by plz during - the build and their timings. You can load the file up in - about:tracing - and use that to see which parts of your build were slow. -

      -
      -
    • -
    • -
      -

      - --version -

      - -

      Prints the version of the tool and exits immediately.

      -
      -
    • -
    • -
      -

      - --show_all_output -

      - -

      - Prints all output of each building process as they run. Implies - --plain_output. -

      -
      -
    • -
    • -
      -

      - --completion_script -

      - -

      - Prints the bash / zsh completion script to stdout. This can be used - in a - .bashrc or - .zshrc, e.g. - source <(plz --completion_script). -

      -
      -
    • -
    -
    - -
    -

    - Options that enable / disable certain features: -

    - -
      -
    • -
      -

      - --noupdate -

      - -

      Disables Please attempting to auto-update itself.

      -
      -
    • -
    • -
      -

      - --nohash_verification -

      - -

      - Turns hash verification errors into non-fatal warnings.
      - Obviously this is only for local development & testing, not for - 'production' use. -

      -
      -
    • -
    • -
      -

      - --nolock -

      - -

      - Don't attempt to lock the repo exclusively while building.
      - Use with care - if two instances of plz start building the same - targets simultaneously they will likely fail with very strange - errors. -

      -
      -
    • -
    • -
      -

      - --keep_workdirs -

      - -

      - Don't clean directories in plz-out/tmp after successfully building - targets.
      - They're always left in cases where targets fail. -

      -
      -
    • -
    -
    -
    - -
    -

    - plz build -

    - -

    - This is the most common and obvious command; it builds one or more targets - and all their dependencies. A plain - plz build attempts to build everything, but more - usually you can tell it to build a particular target or targets by passing - them on the command line afterwards. For example: -

    - -
      -
    • - plz build //src/core:core builds just the one - target. -
    • -
    • - plz build //src/core:all builds every target - in. -
    • -
    • - src/core/BUILD. -
    • -
    • - plz build //src/... builds every target in - src and all subdirectories. -
    • -
    -
    - -
    -

    plz test

    - -

    - This is also a very commonly used command, it builds one or more targets and - then runs their tests. Which tests to run are specified by positional - arguments as described for - plz build. -

    - -

    - After successful completion a combined test output file will be written to - plz-out/log/test_results.xml - in something approximating xUnit XML format. -

    - -

    It takes a few special flags:

    -
      -
    • -
      -

      - --num_runs -

      - -

      - Determines how many times to run each test. The default is 1, but can - be more for tests marked as flaky. -

      -
      -
    • -
    • -
      -

      - --failing_tests_ok -

      - -

      - The return value is 0 regardless of whether any tests fail or not. It - will only be nonzero if they fail to build completely.
      - This is not commonly used, it's mostly useful for CI automation which - will parse the results file to determine ultimate success / failure. -

      -
      -
    • -
    • -
      -

      - --test_results_file -

      - -

      Specifies the location to write the combined test results to.

      -
      -
    • -
    • -
      -

      - -d, --debug -

      - -

      - Turns on interactive debug mode for this test. You can only specify - one test with this flag, because it attaches an interactive debugger - to catch failures.
      - It only works for some test types, currently python (with pytest as - the test runner), C and C++.
      - It implies - -c dbg unless that flag is explicitly - passed. -

      -
      -
    • -
    • -
      -

      - --rerun -

      - -

      - Forces the rerun of a test, even if the hash has not changed. -

      -
      -
    • -
    -
    - -
    -

    - plz cover -

    - -

    - Very similar to - plz test, but also instruments tests for coverage - and collects results. Tests normally run significantly slower in this mode - (the exact amount depends on the language). -

    - -

    Coverage isn't available for C++ tests at present.

    - -

    - All the same flags from - plz test apply here as well. In addition there are - several more: -

    - -
      -
    • -
      -

      - --nocoverage_report -

      - -

      Suppresses the coverage report output to the shell.

      -
      -
    • -
    • -
      -

      - --line_coverage_report -

      - -

      Produces a line-by-line coverage display for all source files.

      -
      -
    • -
    • -
      -

      - --include_all_files -

      - -

      - Includes any transitively dependent source files in the coverage - report (the default is just files from relevant packages). -

      -
      -
    • -
    • -
      -

      - --include_file -

      - -

      - Files to include in the coverage report (the flag can be passed more - than once for multiple). -

      -
      -
    • -
    • -
      -

      - --coverage_results_file -

      - -

      - Similar to - --test_results_file, determines where to - write the aggregated coverage results to. -

      -
      -
    • -
    • -
      -

      - -d, --debug -

      - -

      - Turns on interactive debug mode for this test. You can only specify - one test with this flag, because it attaches an interactive debugger - to catch failures.
      - It only works for some test types, currently python (with pytest as - the test runner), C and C++.
      - It implies - -c dbg unless that flag is explicitly - passed. -

      -
      -
    • -
    -
    - -
    -

    plz run

    - -

    - This is essentially shorthand for calling - plz build and then running the result of whatever - target was built. It's often handy for iterating on a single target such - that one command builds and reruns it. -

    - -

    - Because of the way the target is run after, you have to provide exactly one - target to this command. The target must be marked as - binary in its rule definition (this is implicit - for the various builtin _binary rules such as - go_binary etc). -

    - -

    - If you want to pass flags to the target rather than plz itself, you must - pass them last on the command line, after a - --. This tells Please not to attempt to parse them - as its own flags. -

    - -

    - There are two optional subcommands - sequential and - parallel which allow running multiple targets in - one go. As the names suggest, they run targets either one after the other or - all in parallel.
    - In either case, the semantics are a little different to running a single - target; arguments must be passed one by one via the - -a flag, and while stdout / stderr are connected - to the current terminal, stdin is not connected (because it'd not be clear - which process would consume it). -

    -
    - -
    -

    plz exec

    - -

    - This command executes the target in a hermetic build environment, as opposed - to the plz run command. This allows for uses cases, - such as: debugging/profiling programs that may require a predictable environment, - or running E2E tests reliant on external state which doesn't fit with Please's - caching approach. -

    - -

    - The --share_network and --share_mount flags are available (Linux only) for greater control over the sandboxed environment - where the target is run. The --share_network flag is useful - in situations where the host system might want to connect to a server that the command - started. -

    - -

    - The --output_path and --out flags allow for artifacts, produced by the command executed in the sandboxed environment, - to be copied onto the host system where plz exec is being - run from. -

    - -

    - Non-binary targets are also supported, but a custom command (see above) is required since - there isn't a binary produced that can be executed by default. These targets' results can - be accessed via the $OUTS environment variable. -

    - -

    - Only a single command is supported per execution with plz exec. - Multiple can be run with plz exec sequential or plz exec parallel, - which are analogous to their plz run equivalents. -

    -
    - -
    -

    - plz watch -

    - -

    - Watches a set of targets for changes. Whenever any one of their source files - (or that of any dependency) is changed, the targets will be rebuilt. If any - of them are tests, then they will be run as well. -

    - -

    - Optionally you can pass the - --run flag if you'd like the targets to be run - (using plz run) instead of just built / tested. -

    -
    - -
    -

    - plz query -

    - -

    - This allows you to introspect various aspects of the build graph. There are - a number of subcommands identifying what you want to query for: -

    - -
      -
    • - alltargets: Lists all targets in the - graph. -
    • -
    • - filter: Filter targets based on --include and --exclude. - This is commonly used with other commands. For example, to run e2e tests separately from other tests: - plz query changes --since master > plz-out/changes, then - cat plz-out/changes | plz query filter --include e2e - | plz test -. - -
    • -
    • - changes: Queries changed targets versus a - revision or from a set of files. -
    • -
    • - completions: Prints possible completions for - a string. -
    • -
    • - deps: Queries the dependencies of a - target. -
    • -
    • - graph: Prints a JSON representation of the - build graph. -
    • -
    • - input: Prints all transitive inputs of a - target. -
    • -
    • - output: Prints all outputs of a target. -
    • -
    • - print: Prints a representation of a single - target. -
    • -
    • - reverseDeps: Queries all the reverse - dependencies of a target. -
    • -
    • - somepath: Queries for a path between two - targets. -
    • -
    • - rules: Prints out a machine-parseable - description of all currently known build rules. -
    • -
    • - - whatinputs: Prints out target(s) with provided file(s) as inputs - -
    • -
    • - - whatoutputs: Prints out target(s) responsible for outputting provided file(s) - -
    • -
    - -

    - Note that this is not the same as the query language accepted by Bazel and - Buck, if you're familiar with those; generally this is lighter weight but - less flexible and powerful. We haven't ruled out adding that in the future - but have no concrete plans to do so at present. -

    -
    - -
    -

    - plz clean -

    - -

    Cleans up output build artifacts and caches.

    - -

    - This is not normally necessary since generally incrementality detection will - ensure that targets are rebuilt if needed. It's possible though for - particularly determined rules to do something they shouldn't in which case - this might be needed, or (inconceivable though it is) a bug might exist that - led to incorrect artifacts being cached. -

    - -

    - If given no arguments this cleans the entire plz-out directory and the - directory cache, if configured. It returns immediately with the actual - removal proceeding in the background; you can invoke other plz commands - freely while that continues.
    - You can pass the - --nobackground flag if you'd prefer to wait - though. -

    - -

    - If it's given targets to clean, it will need to perform a parse to work out - what to clean, and will not return until those targets have been cleaned. -

    -
    - -
    -

    plz hash

    - -

    - This command calculates the hash of outputs for one or more targets. These - can then be passed in the - hash or - hashes attributes of those targets to verify their - output is as expected - this is useful for fetching third-party dependencies - to ensure they are not changing between builds. -

    - -

    - The relevant targets will be built in order to calculate the hash, but if - they fail because it doesn't match the one recorded in the BUILD file plz - will still exit successfully (although the output files will still not be - created). -

    - -

    - One can of course achieve the same effect via running - plz build and reading the actual hash when it - fails, but this way is generally considered nicer. -

    - -

    - The --update flag will cause Please to rewrite the - BUILD file with any changed hashes that it can find. -

    -
    - -
    -

    plz fmt

    - -

    a.k.a. plz format

    - -

    - Auto-formats existing BUILD files. You can either provide a list of files to - reformat or, if none are given, it will discover all BUILD files in the - repository. -

    - -

    - The -w flag rewrites existing files in-place; if - not passed the formatted version will be printed to stdout. -

    - -

    - The implementation is currently based on a lightly modified version of - buildifier - which supports nearly a superset of the same dialect, but lacks one or two - features such as type annotations.
    - These are relatively rarely used in BUILD files though. -

    -
    - -
    -

    plz init

    - -

    - Creates an initial (and pretty empty) - .plzconfig file in the current directory (or, if - the --dir flag is passed, somewhere else). -

    - -

    You'll be warned before overwriting an existing file.

    - -

    - It will also create a wrapper script, - pleasew which runs plz if found on the local - machine, and otherwise attempts to download a copy. This can be handy for - users who don't have it installed already. -

    - -

    - There is a - --bazel_compat flag which initialises the config - file for Bazel compatibility mode. This changes behaviour in various ways to - make it easier to begin building an existing Bazel project - although more - complex projects will still likely find things that don't translate easily. -

    -
    - -
    -

    plz generate

    - -

    - This command can be used to build generated sources and link them back into - the source tree. This can be useful for tooling that expects generated sources - to be there like linters and IDEs. -

    - -

    - To build all generated sources, simply run plz generate. -

    - -

    - Please can also update a gitignore file, ignoring all the generated files automatically: - plz generate --update_gitignore .gitignore -

    - -

    To automatically link generated sources and update .gitignore files during normal builds, see the - LinkGeneratedSources, and - UpdateGitignore config values. -

    -
    - - -
    -

    - plz update -

    - -

    - Updates plz to the appropriate version. This is quite tightly governed by - the - .plzconfig file: -

    - -
      -
    • - If selfupdate is true, then it's not normally - necessary to run this since any invocation of plz will update before - running. It will still behave as normal though if invoked - explicitly. -
    • -
    • - If the version property is set then it will - attempt to download exactly that version, and fail if it can't for some - reason. - -
    • -
    • - Otherwise it will try to find the latest available version and update - to that. -
    • -
    • - The downloadlocation property determines - where it tries to download from; by default it's the central plz site, - but you could set this to a server of your own if you'd rather be more - independent. -
    • -
    -
    - -
    -

    plz export

    - -

    - Exports a subset of a please project based on a list of targets -

    - -

    - Example: plz export //cmd:main --output plz-out/export -

    - -

    There are a few flags controlling it:

    - -
      -
    • -
      -

      - -o, --output -

      - -

      - The directory to export into -

      -
      -
    • -
    • -
      -

      - --notrim -

      -

      Disables trimming unnecessary targets from exported packages. Normally targets in exported packages that - aren't dependencies of the originally exported targets are removed.

      -

      - This trimming syntax based, so doesn't always work depending on how the build definition is authored. Passing - this flag will disable this feature, avoiding cases where these rules will be erroneously trimmed. -

      -

      - To make sure a rule works without this flag, the rule must follow the naming convention, whereby children of - :name follow the format :_name#{some-tag}. This is the - format tag(name, tag) would produce. -

      -
      -
    • -
    -
    - -
    -

    plz gc

    - -

    - Runs a basic "garbage collection" step, which attempts to identify targets - that aren't in use. This is still fairly experimental since the definition - of "not used" isn't always very clear (for example, ideally simply having a - test on a library that isn't otherwise used would not be enough to keep both - of those). Because of this it suggests a set of targets that it's pretty - sure aren't used at all, and a secondary set that it's less sure on. -

    - -

    - Right now the name is a bit misleading since it finds but doesn't collect - the garbage; ideally it'd be able to rewrite the BUILD files itself. - Deleting sources is a little trickier since you'd often want to couple that - with a VC operation (i.e.git rm) and by design plz - is unaware of the VCS in use. -

    - -

    There are a few flags controlling it:

    - -
      -
    • -
      -

      - -c, --conservative -

      - -

      - Uses a more conservative algorithm (specifically any tests will keep - their targets). -

      -
      -
    • -
    • -
      -

      - -t, --targets_only -

      - -

      - Only prints the targets to be removed (not sources). Useful to pipe - them into another program. -

      -
      -
    • -
    • -
      -

      - -t, --srcs_only -

      - -

      - Only prints the sources to be removed (not targets). Useful to pipe - them into another program. -

      -
      -
    • -
    -
    - -
    -

    plz help

    - -

    - Displays help about a particular facet of Please. It knows about built-in - build rules, config settings and a few other things. Mostly this is useful - as an instant reference; you can run - plz help topics to get a list of all the topics - that it knows about. -

    -
    - -
    -

    plz op

    - -

    Re-runs whatever the previous command was.

    -
    +

    Please commands

    + +

    + Please has a rich command line interface that can be used to build and test + you code; interrogate the build graph; and much more! +

    + +
    +

    Tab completion

    + +

    + To get the most our of the Please command line interface, it is highly + recommended that you enable tab-completion. Please has a sophisticated + mechanism that is aware of your build graph, all the commands and flags, and + any aliases you may have + configured. To enable Please completions, add this line to your + .bashrc or .zshrc: +

    + +
    +    
    +    
    +    source <(plz --completion_script)
    +    
    +  
    +
    + +
    +

    Common flags

    + +

    These flags are common to all (or nearly all) operations.

    + +
    +

    + Options controlling what to build & how to build it: +

    + +
      +
    • +
      +

      + -c, --config +

      + +

      + The build config to use. The effect this has depends on the + language; typically it allows swapping between a debug or an + optimised build.
      + The default is + opt to build optimised code; + dbg is accepted for C++ and Go to build + code with debugging symbols.
      + This has no effect on Python or Java rules. +

      +
      +
    • +
    • +
      +

      + -r, --repo_root +

      + +

      + Sets the location of the repo root to use. Normally plz assumes it + is within the repo somewhere and locates the root itself, this + forces it to a specific location. +

      +
      +
    • +
    • +
      +

      + -n, --num_threads +

      + +

      + Sets the number of parallel workers to use while building. The + default is the number of logical CPUs of the current machine plus + two. +

      +
      +
    • +
    • +
      +

      + -i, --include +

      + +

      + Labels of targets to include when selecting multiple targets with + :all or /.... + These apply to labels which can be set on individual targets; a + number of them are predefined, most notably for each language (go, python, java, + cc, etc).
      + Only targets with this label will be built. +

      +
      +
    • +
    • +
      +

      + -e, --exclude +

      + +

      + The inverse of + --include; labels of targets to exclude + when selecting multiple targets with + :all or + /....
      + Takes priority over + --include.
      + You can also pass build expressions to + --exclude + to exclude targets as well as by label. +

      +
      +
    • +
    • +
      +

      + -a, --arch +

      + +

      + Architecture to compile for. By default Please will build for the + host architecture, but has some support for targeting others. See + the cross-compiling docs + for more information. +

      +
      +
    • +
    • +
      +

      + -o, --override +

      + +

      + Allows overriding individual config settings on a temporary basis; + for example + -o python.testrunner:pytest. See the + config reference + for more information on what can be overridden. +

      +
      +
    • +
    • +
      +

      + --profile +

      + +

      + Defines a profile of config file to load from the repo. For example, + --profile ci + will load + .plzconfig.ci. This can be useful to + canonicalise certain settings for non-common or scripted + configurations. +

      +
      +
    • +
    +
    + +
    +

    + Options controlling output & logging: +

    + +
      +
    • +
      +

      + -v, --verbosity +

      + +

      + Sets the amount of output logged from plz; a number between 0 and + 4.
      + Each number shows all messages at the given level and above: +

      + +
        +
      1. 0. Error
      2. +
      3. 1. Warning
      4. +
      5. 2. Notice
      6. +
      7. 3. Info
      8. +
      9. 4. Debug
      10. +
      + +

      + The default is 1, for warnings and errors only. If level 4 is + requested then it will suppress interactive output. +

      +
      +
    • +
    • +
      +

      + --log_file +

      + +

      Writes all logs out into the given file.

      +
      +
    • +
    • +
      +

      + --log_file_level +

      + +

      + Level of logging to write to the file. Defaults to 2 (notice, + warning and error). +

      +
      +
    • +
    • +
      +

      + --interactive_output +

      + +

      + Forces plz to show interactive output on stderr. By default it + autodetects based on whether stderr appears to be an interactive + terminal or not, but this flag can be used to force it on in cases + where it might get it wrong. +

      +
      +
    • +
    • +
      +

      + -p, --plain_output +

      + +

      + Forces plz not to show interactive output on stderr. Can be useful + in cases where it might obscure other messages or where the output + isn't capable of interpreting the escape codes correctly. +

      +
      +
    • +
    • +
      +

      + --colour +

      + +

      + Forces coloured output from logging & shell output. Again, this + is autodetected by default, but this can be used in cases where it + would normally detect false but it will later be consumed by + something that understands the codes (e.g. CI systems like Teamcity + or Jenkins). +

      +
      +
    • +
    • +
      +

      + --nocolour +

      + +

      + Inverse of above, forces colourless output from logging & the + shell. +

      +
      +
    • +
    • +
      +

      + --trace_file +

      + +

      + File to write Chrome tracing output into.
      + This is a JSON format that contains the actions taken by plz during + the build and their timings. You can load the file up in + about:tracing + and use that to see which parts of your build were slow. +

      +
      +
    • +
    • +
      +

      + --version +

      + +

      Prints the version of the tool and exits immediately.

      +
      +
    • +
    • +
      +

      + --show_all_output +

      + +

      + Prints all output of each building process as they run. Implies + --plain_output. +

      +
      +
    • +
    • +
      +

      + --completion_script +

      + +

      + Prints the bash / zsh completion script to stdout. This can be used + in a + .bashrc or + .zshrc, e.g. + source <(plz --completion_script). +

      +
      +
    • +
    +
    + +
    +

    + Options that enable / disable certain features: +

    + +
      +
    • +
      +

      + --noupdate +

      + +

      Disables Please attempting to auto-update itself.

      +
      +
    • +
    • +
      +

      + --nohash_verification +

      + +

      + Turns hash verification errors into non-fatal warnings.
      + Obviously this is only for local development & testing, not for + 'production' use. +

      +
      +
    • +
    • +
      +

      + --nolock +

      + +

      + Don't attempt to lock the repo exclusively while building.
      + Use with care - if two instances of plz start building the same + targets simultaneously they will likely fail with very strange + errors. +

      +
      +
    • +
    • +
      +

      + --keep_workdirs +

      + +

      + Don't clean directories in plz-out/tmp after successfully building + targets.
      + They're always left in cases where targets fail. +

      +
      +
    • +
    +
    +
    + +
    +

    + plz build +

    + +

    + This is the most common and obvious command; it builds one or more targets + and all their dependencies. A plain + plz build attempts to build everything, but more + usually you can tell it to build a particular target or targets by passing + them on the command line afterwards. For example: +

    + +
      +
    • + plz build //src/core:core builds just the one + target. +
    • +
    • + plz build //src/core:all builds every target + in. +
    • +
    • + src/core/BUILD. +
    • +
    • + plz build //src/... builds every target in + src and all subdirectories. +
    • +
    +
    + +
    +

    plz test

    + +

    + This is also a very commonly used command, it builds one or more targets and + then runs their tests. Which tests to run are specified by positional + arguments as described for + plz build. +

    + +

    + After successful completion a combined test output file will be written to + plz-out/log/test_results.xml + in something approximating xUnit XML format. +

    + +

    It takes a few special flags:

    +
      +
    • +
      +

      + --num_runs +

      + +

      + Determines how many times to run each test. The default is 1, but can + be more for tests marked as flaky. +

      +
      +
    • +
    • +
      +

      + --failing_tests_ok +

      + +

      + The return value is 0 regardless of whether any tests fail or not. It + will only be nonzero if they fail to build completely.
      + This is not commonly used, it's mostly useful for CI automation which + will parse the results file to determine ultimate success / failure. +

      +
      +
    • +
    • +
      +

      + --test_results_file +

      + +

      Specifies the location to write the combined test results to.

      +
      +
    • +
    • +
      +

      + -d, --debug +

      + +

      + Turns on interactive debug mode for this test. You can only specify + one test with this flag, because it attaches an interactive debugger + to catch failures.
      + It only works for some test types, currently python (with pytest as + the test runner), C and C++.
      + It implies + -c dbg unless that flag is explicitly + passed. +

      +
      +
    • +
    • +
      +

      + --rerun +

      + +

      + Forces the rerun of a test, even if the hash has not changed. +

      +
      +
    • +
    +
    + +
    +

    + plz cover +

    + +

    + Very similar to + plz test, but also instruments tests for coverage + and collects results. Tests normally run significantly slower in this mode + (the exact amount depends on the language). +

    + +

    Coverage isn't available for C++ tests at present.

    + +

    + All the same flags from + plz test apply here as well. In addition there are + several more: +

    + +
      +
    • +
      +

      + --nocoverage_report +

      + +

      Suppresses the coverage report output to the shell.

      +
      +
    • +
    • +
      +

      + --line_coverage_report +

      + +

      Produces a line-by-line coverage display for all source files.

      +
      +
    • +
    • +
      +

      + --include_all_files +

      + +

      + Includes any transitively dependent source files in the coverage + report (the default is just files from relevant packages). +

      +
      +
    • +
    • +
      +

      + --include_file +

      + +

      + Files to include in the coverage report (the flag can be passed more + than once for multiple). +

      +
      +
    • +
    • +
      +

      + --coverage_results_file +

      + +

      + Similar to + --test_results_file, determines where to + write the aggregated coverage results to. +

      +
      +
    • +
    • +
      +

      + -d, --debug +

      + +

      + Turns on interactive debug mode for this test. You can only specify + one test with this flag, because it attaches an interactive debugger + to catch failures.
      + It only works for some test types, currently python (with pytest as + the test runner), C and C++.
      + It implies + -c dbg unless that flag is explicitly + passed. +

      +
      +
    • +
    +
    + +
    +

    plz run

    + +

    + This is essentially shorthand for calling + plz build and then running the result of whatever + target was built. It's often handy for iterating on a single target such + that one command builds and reruns it. +

    + +

    + Because of the way the target is run after, you have to provide exactly one + target to this command. The target must be marked as + binary in its rule definition (this is implicit + for the various builtin _binary rules such as + go_binary etc). +

    + +

    + If you want to pass flags to the target rather than plz itself, you must + pass them last on the command line, after a + --. This tells Please not to attempt to parse them + as its own flags. +

    + +

    + There are two optional subcommands + sequential and + parallel which allow running multiple targets in + one go. As the names suggest, they run targets either one after the other or + all in parallel.
    + In either case, the semantics are a little different to running a single + target; arguments must be passed one by one via the + -a flag, and while stdout / stderr are connected + to the current terminal, stdin is not connected (because it'd not be clear + which process would consume it). +

    +
    + +
    +

    plz exec

    + +

    + This command executes the target in a hermetic build environment, as opposed + to the plz run command. This allows for uses cases, + such as: debugging/profiling programs that may require a predictable environment, + or running E2E tests reliant on external state which doesn't fit with Please's + caching approach. +

    + +

    + The --share_network and --share_mount flags are available (Linux only) for greater control over the sandboxed environment + where the target is run. The --share_network flag is useful + in situations where the host system might want to connect to a server that the command + started. +

    + +

    + The --output_path and --out flags allow for artifacts, produced by the command executed in the sandboxed environment, + to be copied onto the host system where plz exec is being + run from. +

    + +

    + Non-binary targets are also supported, but a custom command (see above) is required since + there isn't a binary produced that can be executed by default. These targets' results can + be accessed via the $OUTS environment variable. +

    + +

    + Only a single command is supported per execution with plz exec. + Multiple can be run with plz exec sequential or plz exec parallel, + which are analogous to their plz run equivalents. +

    +
    + +
    +

    + plz watch +

    + +

    + Watches a set of targets for changes. Whenever any one of their source files + (or that of any dependency) is changed, the targets will be rebuilt. If any + of them are tests, then they will be run as well. +

    + +

    + Optionally you can pass the + --run flag if you'd like the targets to be run + (using plz run) instead of just built / tested. +

    +
    + +
    +

    + plz query +

    + +

    + This allows you to introspect various aspects of the build graph. There are + a number of subcommands identifying what you want to query for: +

    + +
      +
    • + alltargets: Lists all targets in the + graph. +
    • +
    • + filter: Filter targets based on --include and --exclude. + This is commonly used with other commands. For example, to run e2e tests separately from other tests: + plz query changes --since master > plz-out/changes, then + cat plz-out/changes | plz query filter --include e2e - | plz test -. + +
    • +
    • + changes: Queries changed targets versus a + revision or from a set of files. +
    • +
    • + completions: Prints possible completions for + a string. +
    • +
    • + deps: Queries the dependencies of a + target. +
    • +
    • + graph: Prints a JSON representation of the + build graph. +
    • +
    • + input: Prints all transitive inputs of a + target. +
    • +
    • + output: Prints all outputs of a target. +
    • +
    • + print: Prints a representation of a single + target. +
    • +
    • + reverseDeps: Queries all the reverse + dependencies of a target. +
    • +
    • + somepath: Queries for a path between two + targets. +
    • +
    • + rules: Prints out a machine-parseable + description of all currently known build rules. +
    • +
    • + + whatinputs: Prints out target(s) with provided file(s) as inputs + +
    • +
    • + + whatoutputs: Prints out target(s) responsible for outputting provided file(s) + +
    • +
    + +

    + Note that this is not the same as the query language accepted by Bazel and + Buck, if you're familiar with those; generally this is lighter weight but + less flexible and powerful. We haven't ruled out adding that in the future + but have no concrete plans to do so at present. +

    +
    + +
    +

    + plz clean +

    + +

    Cleans up output build artifacts and caches.

    + +

    + This is not normally necessary since generally incrementality detection will + ensure that targets are rebuilt if needed. It's possible though for + particularly determined rules to do something they shouldn't in which case + this might be needed, or (inconceivable though it is) a bug might exist that + led to incorrect artifacts being cached. +

    + +

    + If given no arguments this cleans the entire plz-out directory and the + directory cache, if configured. It returns immediately with the actual + removal proceeding in the background; you can invoke other plz commands + freely while that continues.
    + You can pass the + --nobackground flag if you'd prefer to wait + though. +

    + +

    + If it's given targets to clean, it will need to perform a parse to work out + what to clean, and will not return until those targets have been cleaned. +

    +
    + +
    +

    plz hash

    + +

    + This command calculates the hash of outputs for one or more targets. These + can then be passed in the + hash or + hashes attributes of those targets to verify their + output is as expected - this is useful for fetching third-party dependencies + to ensure they are not changing between builds. +

    + +

    + The relevant targets will be built in order to calculate the hash, but if + they fail because it doesn't match the one recorded in the BUILD file plz + will still exit successfully (although the output files will still not be + created). +

    + +

    + One can of course achieve the same effect via running + plz build and reading the actual hash when it + fails, but this way is generally considered nicer. +

    + +

    + The --update flag will cause Please to rewrite the + BUILD file with any changed hashes that it can find. +

    +
    + +
    +

    plz fmt

    + +

    a.k.a. plz format

    + +

    + Auto-formats existing BUILD files. You can either provide a list of files to + reformat or, if none are given, it will discover all BUILD files in the + repository. +

    + +

    + The -w flag rewrites existing files in-place; if + not passed the formatted version will be printed to stdout. +

    + +

    + The implementation is currently based on a lightly modified version of + buildifier + which supports nearly a superset of the same dialect, but lacks one or two + features such as type annotations.
    + These are relatively rarely used in BUILD files though. +

    +
    + +
    +

    plz init

    + +

    + Creates an initial (and pretty empty) + .plzconfig file in the current directory (or, if + the --dir flag is passed, somewhere else). +

    + +

    You'll be warned before overwriting an existing file.

    + +

    + It will also create a wrapper script, + pleasew which runs plz if found on the local + machine, and otherwise attempts to download a copy. This can be handy for + users who don't have it installed already. +

    + +

    + There is a + --bazel_compat flag which initialises the config + file for Bazel compatibility mode. This changes behaviour in various ways to + make it easier to begin building an existing Bazel project - although more + complex projects will still likely find things that don't translate easily. +

    +
    + +
    +

    plz generate

    + +

    + This command can be used to build generated sources and link them back into + the source tree. This can be useful for tooling that expects generated sources + to be there like linters and IDEs. +

    + +

    + To build all generated sources, simply run plz generate. +

    + +

    + Please can also update a gitignore file, ignoring all the generated files automatically: + plz generate --update_gitignore .gitignore +

    + +

    To automatically link generated sources and update .gitignore files during normal builds, see the + LinkGeneratedSources, and + UpdateGitignore config values. +

    +
    + + +
    +

    + plz update +

    + +

    + Updates plz to the appropriate version. This is quite tightly governed by + the + .plzconfig file: +

    + +
      +
    • + If selfupdate is true, then it's not normally + necessary to run this since any invocation of plz will update before + running. It will still behave as normal though if invoked + explicitly. +
    • +
    • + If the version property is set then it will + attempt to download exactly that version, and fail if it can't for some + reason. + +
    • +
    • + Otherwise it will try to find the latest available version and update + to that. +
    • +
    • + The downloadlocation property determines + where it tries to download from; by default it's the central plz site, + but you could set this to a server of your own if you'd rather be more + independent. +
    • +
    +
    + +
    +

    plz export

    + +

    + Exports a subset of a please project based on a list of targets +

    + +

    + Example: plz export //cmd:main --output plz-out/export +

    + +

    There are a few flags controlling it:

    + +
      +
    • +
      +

      + -o, --output +

      + +

      + The directory to export into +

      +
      +
    • +
    • +
      +

      + --notrim +

      +

      Disables trimming unnecessary targets from exported packages. Normally targets in exported packages that + aren't dependencies of the originally exported targets are removed.

      +

      + This trimming syntax based, so doesn't always work depending on how the build definition is authored. Passing + this flag will disable this feature, avoiding cases where these rules will be erroneously trimmed. +

      +

      + To make sure a rule works without this flag, the rule must follow the naming convention, whereby children of + :name follow the format :_name#{some-tag}. This is the + format tag(name, tag) would produce. +

      +
      +
    • +
    +
    + +
    +

    plz gc

    + +

    + Runs a basic "garbage collection" step, which attempts to identify targets + that aren't in use. This is still fairly experimental since the definition + of "not used" isn't always very clear (for example, ideally simply having a + test on a library that isn't otherwise used would not be enough to keep both + of those). Because of this it suggests a set of targets that it's pretty + sure aren't used at all, and a secondary set that it's less sure on. +

    + +

    + Right now the name is a bit misleading since it finds but doesn't collect + the garbage; ideally it'd be able to rewrite the BUILD files itself. + Deleting sources is a little trickier since you'd often want to couple that + with a VC operation (i.e.git rm) and by design plz + is unaware of the VCS in use. +

    + +

    There are a few flags controlling it:

    + +
      +
    • +
      +

      + -c, --conservative +

      + +

      + Uses a more conservative algorithm (specifically any tests will keep + their targets). +

      +
      +
    • +
    • +
      +

      + -t, --targets_only +

      + +

      + Only prints the targets to be removed (not sources). Useful to pipe + them into another program. +

      +
      +
    • +
    • +
      +

      + -t, --srcs_only +

      + +

      + Only prints the sources to be removed (not targets). Useful to pipe + them into another program. +

      +
      +
    • +
    +
    + +
    +

    plz help

    + +

    + Displays help about a particular facet of Please. It knows about built-in + build rules, config settings and a few other things. Mostly this is useful + as an instant reference; you can run + plz help topics to get a list of all the topics + that it knows about. +

    +
    + +
    +

    plz op

    + +

    Re-runs whatever the previous command was.

    +
    From 93f5d8655c7249f8312f665073db302b6233ef52 Mon Sep 17 00:00:00 2001 From: Anya Xiao <73641458+scyyx5@users.noreply.github.com> Date: Fri, 24 Oct 2025 13:19:16 +0100 Subject: [PATCH 32/38] Revise genrule documentation --- docs/codelabs/genrule.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/codelabs/genrule.md b/docs/codelabs/genrule.md index f73642749..d85f19e49 100644 --- a/docs/codelabs/genrule.md +++ b/docs/codelabs/genrule.md @@ -273,9 +273,10 @@ wc -w $@ ### `tools/BUILD` ```python -sh_binary( +filegroup( name = "wc", - main = "wc.sh", + srcs = ["wc.sh"], + binary = True, visibility = ["PUBLIC"], ) ``` @@ -404,3 +405,4 @@ If you create something you believe will be useful to the wider world, we might [pleasings](https://github.com/thought-machine/pleasings) repo! If you get stuck, jump on [gitter](https://gitter.im/please-build/Lobby) and we'll do our best to help you! + From e24fa5bb6a9d454cdd9e2dcdc42d54260a385bb8 Mon Sep 17 00:00:00 2001 From: Anya Xiao <73641458+scyyx5@users.noreply.github.com> Date: Sat, 25 Oct 2025 13:59:30 +0100 Subject: [PATCH 33/38] Update codelab for managing Go dependencies with Puku --- docs/codelabs/go_module.md | 586 ++++++++++++++++++++++--------------- 1 file changed, 346 insertions(+), 240 deletions(-) diff --git a/docs/codelabs/go_module.md b/docs/codelabs/go_module.md index 10d1d28d2..a4e5cbfb2 100644 --- a/docs/codelabs/go_module.md +++ b/docs/codelabs/go_module.md @@ -1,5 +1,5 @@ -summary: Third-party dependencies with go_module() -description: Set up gRPC and learn how to manage third party dependencies with Please +summary: Third-party dependencies with Puku +description: Add, update, pin, and remove Go third-party dependencies using go get and plz puku (no go_module()) id: go_module categories: beginner tags: medium @@ -7,18 +7,38 @@ status: Published authors: Jon Poole Feedback Link: https://github.com/thought-machine/please -# Third-party dependencies with `go_module()` +# Third-party dependencies with Puku + ## Overview -Duration: 1 +Duration: 2 -### Prerequisites -- You must have Please installed: [Install Please](https://please.build/quickstart.html) +Notes: `go_module()` is deprecated in Core3. This codelab teaches a practical workflow that uses standard Go tooling (`go get` / `go mod`) together with Puku to generate and maintain third-party go targets (`go_repo`). + +### Goals +- Add a new third‑party dependency with `go get` +- Sync the dependency into Please with `plz puku sync` +- Let puku update BUILD deps with `plz puku fmt` +- Upgrade, pin/exclude, and remove modules safely +- Diagnose missing import / missing subrepo issues -### What you'll learn -In this codelab, we'll be setting up Please to compile third party go modules. You'll learn how to: -- Use go_module() to download and compile third party go modules -- Download and compile the library and binary parts of a module separately -- Resolving cyclical dependencies between modules +You will not use `go_module()` in this guide. + +### Prerequisites +- Please installed and configured: https://please.build/quickstart.html +- Go 1.20+ installed and on PATH +- Puku available in one of the following ways: + - Via Please alias: add an alias to `.plzconfig` (see below), or + - Installed locally (if the first doesn't work, try the second): + - `go install github.com/please-build/puku/cmd/puku@latest` + - `go get github.com/please-build/puku/cmd/puku` + +### What you’ll learn +- Add and upgrade dependencies with `go get` +- Sync `go.mod` into `third_party/go/BUILD` with `plz puku sync` +- Let `plz puku fmt` add third-party deps to your BUILD targets +- Diagnose missing imports and missing subrepos +- Pin or exclude dependency versions with `go mod edit` +- Remove third-party modules safely ### What if I get stuck? @@ -26,310 +46,396 @@ The final result of running through this codelab can be found [here](https://github.com/thought-machine/please-codelabs/tree/main/go_modules) for reference. If you really get stuck you can find us on [gitter](https://gitter.im/please-build/Lobby)! -## Initialising your project -Duration: 2 +## Initialising your project and running puku with please +Duration: 5 The easiest way to get started is from an existing Go module: -``` -$ mkdir go_module && cd go_module -$ go mod init example_module -$ plz init --no_prompt -$ plz init plugin go +```bash +mkdir puku_sync && cd puku_sync +go mod init example_module +plz init --no_prompt +plz init plugin go ``` -### A note about your Please PATH -Please doesn't use your host system's `PATH` variable. If where you installed Go isn't in this default path, you will -need to add the following to `.plzconfig`: +Define a valid Puku version number as a build configuration string in `.plzconfig`: + ``` -[build] -path = $YOUR_GO_INSTALL_HERE:/usr/local/bin:/usr/bin:/bin +[BuildConfig] +puku-version = "1.17.0" ``` -You can find out where Go is installed with `dirname $(which go)`. +Uncomment and edit the following lines in your `.plzconfig` to set up `please` version: -## Dependencies in Please vs. go build -Duration: 3 +``` +[please] +version = 17.22.0 +``` + +Configure a Please alias for Puku (optional but convenient): -If you're coming from a language specific build system like `go build`, Please can feel a bit alien. Please is language -agnostic so can't parse you source code to automatically update its BUILD files when you add a new import like -`go mod edit` would for `go build`. +``` +[Alias "puku"] +Cmd = run //third_party/binary:puku -- +PositionalLabels = true +Desc = A tool to update BUILD files in Go packages +``` -Instead, you must strictly define all the dependencies of each module. This allows Please to build go modules in a -controlled and reproducible way without actually having to understand go itself. However, it does take a little more -work to set up. +With the alias, you can use `plz puku` instead of `plz run //third_party/binary:puku`. -A basic `go_module()` usage might look like: +Then download that version of Puku in `third_party/binary/BUILD`: -### `third_party/go/BUILD` ```python -go_module( - name = "protobuf_go", - # By default, we only install the top level package i.e. golang.org/x/sys. To - # compile everything, use this wildcard. - install = ["..."], - module = "google.golang.org/protobuf", - version = "v1.25.0", - # We must tell Please that :protobuf_go depends on :cmp so we can link to it. - deps = [":cmp"], +remote_file( + name = "puku", + url = f"https://github.com/please-build/puku/releases/download/v{CONFIG.PUKU_VERSION}/puku-{CONFIG.PUKU_VERSION}-{CONFIG.OS}_{CONFIG.ARCH}", + binary = True, ) +``` + +Configure the Go plugin to point at your go.mod (recommended). Create a repo-root `BUILD` with a filegroup for go.mod: -go_module( - name = "cmp", - install = ["cmp/..."], - module = "github.com/google/go-cmp", - version = "v0.5.5", +1) Add a filegroup for go.mod at `BUILD` in repo root: +```python +filegroup( + name = "gomod", + srcs = ["go.mod"], + visibility = ["PUBLIC"], ) ``` -### A note on install -We talk about installing a package. This nomenclature comes from `go install` which would compile a package and -install it in the go path. In Please terms, this means compiling and storing the result in `plz-out`. We're not -installing anything system wide. +2) Update your `.plzconfig`: +``` +[Plugin "go"] +Target = //plugins:go +ModFile = //:gomod +``` -The install list can contain exact packages, or could contain wildcards: +This lets Puku use standard `go get` to resolve modules, then sync them into `third_party/go/BUILD`. -### `third_party/go/BUILD` -```python -go_module( - name = "module", - module = "example.com/some/module", - version = "v1.0.0", - install = [ - ".", # Refers to the root package of the module. This is the default if no install list is provided. - "...", # Refers to everything in the module - "foo/...", # installs example.com/some/module/foo and everything under it - "foo/bar", # installs example.com/some/module/foo/bar only - ] -) +### Configuring the PATH for Go + +By default, Please looks for Go in the following locations: +``` +/usr/local/bin:/usr/bin:/bin +``` + +If you installed Go elsewhere (e.g., via Homebrew on macOS, or a custom location), you must configure the path in `.plzconfig`. + +First, find where your Go binary is located: +```bash +which go ``` -## go_mod_download() +Then add the path to `.plzconfig`. For example, if Go is at `/opt/homebrew/bin/go`: + +```ini +[Build] +Path = /opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin +``` + +Or if it's at `/usr/local/go/bin/go`: + +```ini +[Build] +Path = /usr/local/go/bin:/usr/local/bin:/usr/bin:/bin +``` + +**Note:** On Windows, use `where.exe go` to find the Go installation path. + +### Installing the Go standard library (Go 1.20+) + +From Go version 1.20 onwards, the standard library is no longer included by default with the Go distribution. You must install it manually: + +```bash +GODEBUG="installgoroot=all" go install std +``` + +## Adding and updating modules Duration: 5 -For most modules, you can get away with compiling them in one pass. Sometimes it can be useful to split this out into -separate rules. There are many reasons to do this, for example: to resolve cyclic dependencies; download from a fork -of a repo; or to vendor a module. +Let's add a new third-party dependency using `go get` and sync it with Puku. -Another common case is when modules have a `main` package but can also act as a library. One example of this is -`github.com/golang/protobuf` which contains the protobuf library, as well as the protoc plugin for go. We might want to -have a binary rule for the protoc plugin, so we can refer to that in our proto config in our `.plzconfig`. +### Adding a new module -To do this, we create a `go_mod_download()` rule that will download our sources for us: +First, let's create a simple Go program that uses a third-party library. Create a file `src/hello/hello.go`: -### `third_party/go/BUILD` -```python -go_mod_download( - name = "protobuf_download", - module = "github.com/golang/protobuf", - version = "v1.4.3", +```go +package main + +import ( + "fmt" + "github.com/google/uuid" ) + +func main() { + id := uuid.New() + fmt.Printf("Generated UUID: %s\n", id.String()) +} ``` -We can then create a rule to compile the library like so: -```python -go_module( - name = "protobuf", - # Depend on our download rule instead of providing a version - download = ":protobuf_download", - install = ["..."], - module = "github.com/golang/protobuf", - # Let's skip compiling this package which as we're compiling this separately. - strip = ["protoc-gen-go"], - deps = [":protobuf_download"], -) +Now add the dependency with `go get`: + +```bash +GOTOOLCHAIN=local go get github.com/google/uuid ``` -And then compile the main package under `github.com/golang/protobuf/protoc-gen-go` like so: +Sync the changes to `third_party/go/BUILD`: + +```bash +plz puku sync -w +``` + +This creates a `go_repo()` rule in `third_party/go/BUILD` for the `uuid` module. You may need to create the `third_party/go/BUILD` file if it doesn't exist. + +### Creating the BUILD file + +Create `src/hello/BUILD`: + ```python -go_module( - name = "protoc-gen-go", - # Mark this as binary so Please knows it can be executed - binary = True, - # Depend on our download rule instead of providing a version - download = ":protobuf_download", - install = ["protoc-gen-go"], - module = "github.com/golang/protobuf", - deps = [":protobuf_go"], +go_binary( + name = "hello", + srcs = ["hello.go"], ) ``` -## Resolving cyclic dependencies -Duration: 5 +Now let Puku automatically add the dependency: -While go packages can't be cyclically dependent on each other, go modules can. For the most part, this is considered -bad practice and is quite rare, however the `google.golang.org/grpc` and `google.golang.org/genproto` modules are one -such example. +```bash +plz puku fmt //src/hello +``` -In order to solve this, we need to figure out what parts of the modules actually depend on each other. We can then -download that module and compile these two parts separately. We will use `go_mod_download()` to achieve this. +Puku will update your BUILD file to include the dependency on `//third_party/go:google-uuid` (or the subrepo format). -N.B. To run a gRPC service written in go, you will have to install almost all of `google.golang.org/grpc`. For the sake -of brevity, this example only install the subset that `google.golang.org/genproto` needs. You may want to complete this -by adding `go_module()` rules for the rest of the modules `google.golang.org/grpc` depends on. +Build and run your program: -### Installing gRPC's deps `third_party/go/BUILD` -First we must install the dependencies of `google.golang.org/grpc`: -```python -go_module( - name = "xsys", - module = "golang.org/x/sys", - install = ["..."], - version = "v0.0.0-20210415045647-66c3f260301c", -) +```bash +plz run //src/hello +``` -go_module( - name = "net", - install = ["..."], - module = "golang.org/x/net", - version = "136a25c244d3019482a795d728110278d6ba09a4", - deps = [ - ":crypto", - ":text", - ], -) +### Updating an existing module -go_module( - name = "text", - install = [ - "secure/...", - "unicode/...", - "transform", - "encoding/...", - ], - module = "golang.org/x/text", - version = "v0.3.5", -) +To update a module to a specific version: -go_module( - name = "crypto", - install = [ - "ssh/terminal", - "cast5", - ], - module = "golang.org/x/crypto", - version = "7b85b097bf7527677d54d3220065e966a0e3b613", -) +```bash +GOTOOLCHAIN=local go get github.com/google/uuid@v1.6.0 +plz puku sync -w ``` -### Finding out what gRPC needs `third_party/go/BUILD` +To update to the latest version: -Next let's try and compile gRPC. We know it has a dependency on some of genproto, but let's set that aside for now: -```python -go_module( - name = "grpc", - module = "google.golang.org/grpc", - version = "v1.34.0", - # Installing just a subset of stuff to reduce the complexity of this example. You may want to just install "...", - # and add the rest of the dependencies. - install = [ - ".", - "codes", - "status", - ], - deps = [ - # ":genproto", - ":cmp", - ":protobuf", - ":xsys", - ":net", - ":protobuf_go", - ], -) +```bash +GOTOOLCHAIN=local go get -u github.com/google/uuid +plz puku sync -w ``` -If we attempt to compile this, we will get an exception along the lines of: +After syncing, rebuild your targets to use the updated version. + +### Troubleshooting + +**Missing import error?** If you see `could not import ... (open : no such file or directory)`, the module providing that package is missing. Add it with: + +```bash +go get +plz puku sync -w ``` -google.golang.org/grpc/internal/status/status.go, line 36, column 2: can't find import: "google.golang.org/genproto/googleapis/rpc/status" + +**Missing subrepo error?** If you see `Subrepo ... is not defined`, you need to add or migrate the module: + +```bash +go get +plz puku sync -w ``` -So let's add `google.golang.org/genproto/googleapis/rpc/...` as a dependency: -```python -go_mod_download( - name = "genproto_download", - module = "google.golang.org/genproto", - version = "v0.0.0-20210315173758-2651cd453018", -) +## Stop a module from updating +Duration: 3 -go_module( - name = "genproto_rpc", - download = ":genproto_download", - install = [ - "googleapis/rpc/...", - ], - module = "google.golang.org/genproto", - deps = [ - ":protobuf", - ], -) +Sometimes you need to prevent a module from being updated due to breaking changes or compatibility issues. -go_module( - name = "genproto_api", - download = ":genproto_download", - install = [ - "googleapis/api/...", - ], - module = "google.golang.org/genproto", - deps = [ - ":grpc", - ":protobuf", - ], -) +### Excluding a specific version + +Use the `exclude` directive to prevent a specific version from being used: + +```bash +go mod edit -exclude github.com/example/module@v2.0.0 +plz puku sync -w +``` + +This prevents version `v2.0.0` from being selected. Go will use the next highest non-excluded version. + +To remove an exclusion: + +```bash +go mod edit -dropexclude github.com/example/module@v2.0.0 +plz puku sync -w +``` + +### Pinning to a specific version + +Use the `replace` directive to pin a module to a specific version: + +```bash +go mod edit -replace github.com/example/module=github.com/example/module@v1.5.0 +plz puku sync -w +``` + +This pins the module to `v1.5.0` regardless of what other dependencies require. + +To unpin (and upgrade at the same time): + +```bash +go mod edit -dropreplace github.com/example/module +go get -u github.com/example/module +plz puku sync -w +``` + +**Warning:** Pinning modules can cause compatibility issues with other dependencies. Use sparingly and resolve as soon as possible. + +### Example scenario + +Let's say a new version of `uuid` has a breaking change. Pin it to a working version: + +```bash +go mod edit -replace github.com/google/uuid=github.com/google/uuid@v1.3.0 +plz puku sync -w +plz build //src/hello +``` + +## Removing modules +Duration: 3 + +Before removing a module, ensure it's not used anywhere in your codebase. + +### Steps to remove a module + +1. **Verify no dependencies exist:** + +```bash +plz query revdeps //third_party/go:module_name --level=-1 | grep -v //third_party/go +``` + +If this returns no results, the module is safe to remove. + +2. **Remove the `go_repo()` target from `third_party/go/BUILD`:** + +Open `third_party/go/BUILD` and delete the corresponding `go_repo()` rule. + +3. **Remove from `go.mod` and `go.sum`:** + +```bash +go mod edit -droprequire github.com/example/module +go mod tidy +``` + +4. **Sync the changes:** + +```bash +plz puku sync -w ``` -And update our `:grpc` rule to add `:genproto_rpc` as a dependency: +**Note:** Puku does not currently automate module removal, so this process is manual. + +### Example + +Let's say we want to remove an unused module: + +```bash +# Check for dependencies +plz query revdeps //third_party/go:unused_module --level=-1 | grep -v //third_party/go + +# If safe, remove from go.mod +go mod edit -droprequire github.com/unused/module +go mod tidy + +# Manually delete the go_repo() rule from third_party/go/BUILD +# Then sync +plz puku sync -w +``` + +## Using new modules +Duration: 4 + +Once you've added a module with `go get` and `plz puku sync`, you can use it in your code. + +### Automatic dependency management + +The easiest way is to let Puku handle dependencies automatically: + +1. Import the package in your `.go` file +2. Run `plz puku fmt //your/package` + +Puku will parse your imports and add the necessary dependencies to your BUILD file. + +### Manual dependency specification + +There are two ways to specify dependencies on third-party packages: + +**1. Subrepo convention (recommended):** + ```python -go_module( - name = "grpc", - module = "google.golang.org/grpc", - version = "v1.34.0", - # Installing just a subset of stuff to reduce the complexity of this example. You may want to just install "...", - # and add the rest of the dependencies. - install = [ - ".", - "codes", - "status", - ], +go_library( + name = "mylib", + srcs = ["mylib.go"], deps = [ - ":genproto_rpc", - ":cmp", - ":protobuf", - ":xsys", - ":net", - ":protobuf_go", + "///third_party/go/github.com_google_uuid//", ], ) ``` -And if we compile that with `plz build //third_party/go:grpc //third_party/go:genproto_api` we should see they build -now. +The subrepo format is: `///third_party/go///` + +**2. Install list (go_module style):** -## Using third party libraries -Third party dependencies can be depended on in the same way as `go_library()` rules: +Add packages to the `install` list on the `go_repo()` target: + +```python +go_repo( + name = "google-uuid", + module = "github.com/google/uuid", + version = "v1.6.0", + install = ["."], # Installs the root package +) +``` + +Then depend on it like: -### `third_party/go/BUILD` ```python go_library( - name = "service", - srcs = ["service.go"], - deps = ["//third_party/go:net"], + name = "mylib", + srcs = ["mylib.go"], + deps = ["//third_party/go:google-uuid"], ) ``` -For more information on writing go code with Please, check out the [go](/codelabs/go_intro) codelab. +### Watch mode + +For active development, use watch mode to automatically update BUILD files as you code: + +```bash +plz puku watch //src/... +``` + +This watches for changes to `.go` files and updates dependencies automatically. + +### Best practices + +- Use `plz puku fmt` to keep dependencies up to date +- Use the subrepo format for better build incrementality +- Review changes before committing to avoid unexpected version changes +- Run `plz test` after adding/updating dependencies to catch issues early ## What's next? Duration: 1 -Hopefully you now have an idea as to how to build Go modules with Please. Please is capable of so much more though! +Congratulations! You now know how to manage Go third-party dependencies using `go get` and Puku. + +### Learn more -- [Please basics](/basics.html) - A more general introduction to Please. It covers a lot of what we have in this -tutorial in more detail. -- [go plugin rules](/plugins.html#go) - See the rest of the Go plugin rules and config. +- [Puku GitHub repository](https://github.com/please-build/puku) - Complete Puku reference +- [Please basics](/basics.html) - A more general introduction to Please. It covers a lot of what we have in this tutorial in more detail. +- [Go plugin rules](/plugins.html#go) - See the rest of the Go plugin rules and config. - [Built-in rules](/lexicon.html#go) - See the rest of the built in rules. - [Config](/config.html) - See the available config options for Please. -- [Command line interface](/commands.html) - Please has a powerful command line interface. Interrogate the build graph, -determine file changes since master, watch rules and build them automatically as things change, and much more! Use -`plz help`, and explore this rich set of commands! +- [Command line interface](/commands.html) - Please has a powerful command line interface. Interrogate the build graph, determine file changes since master, watch rules and build them automatically as things change, and much more! Use `plz help`, and explore this rich set of commands! Otherwise, why not try one of the other codelabs! From 4ea14169978affc1bc29b4b92f22c96654a86ce8 Mon Sep 17 00:00:00 2001 From: Anya Xiao <73641458+scyyx5@users.noreply.github.com> Date: Sat, 25 Oct 2025 14:54:50 +0100 Subject: [PATCH 34/38] Normalize line endings --- docs/codelabs/genrule.md | 816 +++++------ docs/codelabs/github_actions.md | 352 ++--- docs/codelabs/go_intro.md | 1012 +++++++------- docs/codelabs/k8s.md | 912 ++++++------- docs/codelabs/plz_query.md | 620 ++++----- docs/codelabs/python_intro.md | 710 +++++----- docs/codelabs/using_plugins.md | 318 ++--- docs/commands.html | 2260 +++++++++++++++---------------- 8 files changed, 3500 insertions(+), 3500 deletions(-) diff --git a/docs/codelabs/genrule.md b/docs/codelabs/genrule.md index d85f19e49..8b9f996b4 100644 --- a/docs/codelabs/genrule.md +++ b/docs/codelabs/genrule.md @@ -1,408 +1,408 @@ -summary: Writing custom build definitions -description: Start here to learn how to write custom build rules to automate nearly anything in your build -id: genrule -categories: intermediate -tags: medium -status: Published -authors: Jon Poole -Feedback Link: https://github.com/thought-machine/please - -# Custom build rules with `genrule()` -## Overview -Duration: 1 - -### Prerequisites -- You must have Please installed: [Install Please](https://please.build/quickstart.html) -- You should be comfortable using the existing build rules. - -### What you'll learn -We'll be working through a contrived example writing a build definition for -[wc](https://www.gnu.org/software/coreutils/manual/html_node/wc-invocation.html#wc-invocation) from core utils. -In doing so you'll: -- Be introduced to genrule(), the generic build rule -- Explore the build environment with `--shell` -- Write and use custom build rule definitions -- Manage and write custom tools for your build definition -- Add configuration for your build definitions - -### What if I get stuck? - -The final result of running through this codelab can be found -[here](https://github.com/thought-machine/please-codelabs/tree/main/custom_rules) for reference. If you really get stuck -you can find us on [gitter](https://gitter.im/please-build/Lobby)! - -## genrule() -Duration: 3 - -Before we jump into writing custom build definitions, let me introduce you to `genrule()`, the generic build rule. Let's -just create a new project and initialise Please in it: -```bash -mkdir custom_rules && cd custom_rules -plz init --no_prompt -``` - -Then create a `BUILD` file in the root of the repository like so: -### `BUILD` -```python -genrule( - name = "word_count", - srcs = ["file.txt"], - deps = [], - cmd = "wc $SRC > $OUT", - outs = ["file.wc"], -) -``` - -Then create file.txt: -```bash -echo "the quick brown fox jumped over the lazy dog" > file.txt -``` - -and build it: - -```bash -$ plz build //:word_count -Build finished; total time 70ms, incrementality 0.0%. Outputs: -//:word_count: - plz-out/gen/file.wc - -$ cat plz-out/gen/file.wc - 1 9 45 file.txt -``` - -### Troubleshooting: "can't store data at section "scm"" - -This message means the runner is using an older Please release that doesn’t understand the `[scm]` section in your `.plzconfig`, so parsing fails before any build work begins. - -**How to fix** -- Upgrade the Please version invoked in CI (pin the same version locally via `pleasew`, `setup-please`, or `PLZ_VERSION`). -- If upgrading immediately is impractical, temporarily remove or comment the `[scm]` block until the runner is updated. - -### So what's going on? -Here we've used one of the built-in rules, `genrule()`, to run a custom command. `genrule()` can take a number of -parameters, most notably: the name of the rule, the inputs (sources and dependencies), its outputs, and the command -we want to run. The full list of available arguments can be found on the [`genrule()`](/lexicon.html#genrule) -documentation. - -Here we've used it to count the number of words in `file.txt`. Please has helpfully set up some environment variables -that help us find our inputs, as well as where to put our outputs: - -- `$SRC` - Set when there's only one item in the `srcs` list. Contains the path to that source file. -- `$SRCS` - Contains a space-separated list of the sources of the rule. -- `$OUT` - Set when there's only one item in the `outs` list. Contains the expected path of that output. -- `$OUTS` - Contains a space-separated list of the expected paths of the outputs of the rule. - -For a complete list of available variables, see the [build env](/build_rules.html#build-env) docs. - -The command `wc $SRC > $OUT` is therefore translated into `wc file.txt > file.wc` and we can see that the output of the -rule has been saved to `plz-out/gen/file.wc`. - -## The build directory -Duration: 7 - -One of the key features of Please is that builds are hermetic, that is, commands are executed in an isolated and -controlled environment. Rules can't access files or env vars that are not explicitly made available to them. As a -result, incremental builds very rarely break when using Please. - -Considering this, debugging builds would be quite hard if we couldn't play around in this build environment. Luckily, -Please makes this trivial with the `--shell` flag: - -``` -$ plz build --shell :word_count -Temp directories prepared, total time 50ms: - //:word_count: plz-out/tmp/word_count._build - Command: wc $SRC > $OUT - -bash-4.4$ pwd -/plz-out/tmp/word_count._build - -bash-4.4$ wc $SRC > $OUT - -bash-4.4$ cat $OUT - 1 9 45 file.txt -``` - -As we can see, Please has prepared a temporary directory for us under `plz-out/tmp`, and put us in a true-to-life bash -environment. You may run `printenv`, to see the environment variables that Please has made available to us: - -``` -bash-4.4$ printenv -OS=linux -ARCH=amd64 -LANG=en_GB.UTF-8 -TMP_DIR=/plz-out/tmp/word_count._build -CMD=wc $SRC > $OUT -OUT=/plz-out/tmp/word_count._build/file.wc -TOOLS= -SRCS=file.txt -PKG= -CONFIG=opt -PYTHONHASHSEED=42 -SRC=file.txt -OUTS=file.wc -PWD=/plz-out/tmp/word_count._build -HOME=/plz-out/tmp/word_count._build -NAME=word_count -TMPDIR=/plz-out/tmp/word_count._build -BUILD_CONFIG=opt -XOS=linux -XARCH=x86_64 -SHLVL=1 -PATH=/.please:/usr/local/bin:/usr/bin:/bin -GOOS=linux -PKG_DIR=. -GOARCH=amd64 -_=/usr/bin/printenv -``` - -As you can see, the rule doesn't have access to any of the variables from the host machine. Even `$PATH` has been set -based on configuration in `.plzconfig`: - -The `--shell` flag works for all targets (except filegroups), which of course means any of the built-in rules! Note, -`--shell` also works on `plz test`. You can `plz build --shell //my:test` to see how the test is built, and then -`plz test --shell //my:test` to see how it will be run. - -## Build definitions -Duration: 5 - -We've managed to write a custom rule to count the number of words in `file.txt`, however, we have no way of reusing this, -so let's create a `wordcount()` build definition! - -A build definition is just a function that creates one or more build targets which define how to build something. These -are typically defined inside `.build_def` files within your repository. Let's just create a folder for our definition: - -### `build_defs/word_count.build_defs` -```python -def word_count(name:str, file:str) -> str: - return genrule( - name = name, - srcs = [file], - outs = [f"{name}.wc"], - cmd = "wc $SRC > $OUT", - ) -``` - -We then need some way to access these build definitions from other packages. To do this, we typically use a filegroup: - -### `build_defs/BUILD` -```python -filegroup( - name = "word_count", - srcs = ["word_count.build_defs"], - visibility = ["PUBLIC"], -) -``` - -We can then use this in place of our `genrule()`: - -### `BUILD` -```python -subinclude("//build_defs:word_count") - -word_count( - name = "word_count", - file = "file.txt", -) -``` - -And check it still works: - -```bash -plz build //:word_count -``` -The output: - -``` -Build finished; total time 30ms, incrementality 100.0%. Outputs: -//:word_count: - plz-out/gen/word_count.wc -``` - -### `subinclude()` -Subinclude is primarily used for including build definitions into your `BUILD` file. It can be thought of like a -Python import except it operates on a build target instead. Under the hood, subinclude parses the output of the target -and makes the top-level declarations available in the current package's scope. - -The build target is usually a filegroup, however, this doesn't have to be the case. In fact, the build target can be -anything that produces parsable outputs. - -It's almost always a bad idea to build anything as part of a subinclude. These rules will be built at parse time, -which can be hard to debug, but more importantly, will block the parser while it waits for that rule to build. Use -non-filegroup subincludes under very careful consideration! - -## Managing tools -Duration: 7 - -Right now we're relying on `wc` to be available on the configured path. This is a pretty safe bet, however, Please -provides a powerful mechanism for managing tools, so let's over-engineer this: - -### `build_defs/word_count.build_defs` -```python -def word_count(name:str, file:str, wc_tool:str="wc") -> str: - return genrule( - name = name, - srcs = [file], - outs = [f"{name}.wc"], - cmd = "$TOOLS_WC $SRC > $OUT", - tools = { - "WC": [wc_tool], - } - ) -``` - -Here we've configured our build definition to take the word count tool in as a parameter. This is then passed to -`genrule()` via the `tools` parameter. Please has set up the `$TOOLS_WC` environment variable which we can used to -locate our tool. The name of this variable is based on the key in this dictionary. - -In this contrived example, this may not seem very useful, however, Please will perform some important tasks for us: - -- If the tool is a program, Please will check it's available on the path at parse time. -- If the tool is a build rule, Please will build this rule and configure `$TOOLS_WC` so it can be invoked. Whether the -tool is on the path or a build rule is transparent to you, the rule's author! - -### Custom word count tool -Currently, our word count rule doesn't just get the word count: it also gets the character and line count as well. I -mentioned that these can be build rules so let's create a true word count tool that counts just words: - -### `tools/wc.sh` -```shell script -#!/bin/bash - -wc -w $@ -``` - -### `tools/BUILD` -```python -filegroup( - name = "wc", - srcs = ["wc.sh"], - binary = True, - visibility = ["PUBLIC"], -) -``` - -and let's test that out: - -``` -$ plz run //tools:wc -- file.txt -9 file.txt -``` - -Brilliant! We can now use this in our build rule like so: - -### `BUILD` -```python -subinclude("//build_defs:word_count") - -word_count( - name = "lines_words_and_chars", - file = "file.txt", -) - -word_count( - name = "just_words", - file = "file.txt", - wc_tool = "//tools:wc", -) -``` - -and check it all works: - -``` -$ plz build //:lines_words_and_chars //:just_words -Build finished; total time 30ms, incrementality 100.0%. Outputs: -//:lines_words_and_chars: - plz-out/gen/lines_words_and_chars.wc -//:just_words: - plz-out/gen/just_words.wc - -$ cat plz-out/gen/lines_words_and_chars.wc -1 9 45 file.txt - -$ cat plz-out/gen/just_words.wc -9 file.txt -``` - -## Configuration -Duration: 6 - -Right now, we have to specify the new word count tool each time we use our build definition! Let's have a look at how we -can configure this in our `.plzconfig` instead: - -### `.plzconfig` -``` -[Buildconfig] -word-count-tool = //tools:wc -``` - -The `[buildconfig]` section can be used to add configuration specific to your project. By adding the `word-count-tool` -config option here, we can use this in our build definition: - -### `build_defs/word_count.build_defs` -```python -def word_count(name:str, file:str, wc_tool:str=CONFIG.WORD_COUNT_TOOL) -> str: - return genrule( - name = name, - srcs = [file], - outs = [f"{name}.wc"], - cmd = "$TOOLS_WC $SRC > $OUT", - tools = { - "WC": [wc_tool], - } - ) - -CONFIG.setdefault('WORD_COUNT_TOOL', 'wc') -``` - -Here we've set the default value for `wc_tool` to `CONFIG.WORD_COUNT_TOOL`, which will contain our config value from -`.plzconfig`. What if that's not set though? That's why we also set a sensible default configuration value with -`CONFIG.setdefault('WORD_COUNT_TOOL', 'wc')`! - - -We then need to update our build rules: - -### `BUILD` -```python -subinclude("//build_defs:word_count") - -word_count( - name = "lines_words_and_chars", - file = "file.txt", - wc_tool = "wc", -) - -word_count( - name = "just_words", - file = "file.txt", -) -``` - -and check it all works: - -``` -$ plz build //:lines_words_and_chars //:just_words -Build finished; total time 30ms, incrementality 100.0%. Outputs: -//:lines_words_and_chars: - plz-out/gen/lines_words_and_chars.wc -//:just_words: - plz-out/gen/just_words.wc - -$ cat plz-out/gen/lines_words_and_chars.wc -1 9 45 file.txt - -$ cat plz-out/gen/just_words.wc -9 file.txt -``` - -## Conclusion -Duration: 2 - -Congratulations! You've written your first build definition! While contrived, this example demonstrates most of the -mechanisms used to create a rich set of build definitions for a new language or technology. To get a better understanding -of build rules, I recommend reading through the advanced topics on [please.build](/build_rules.html). - -If you create something you believe will be useful to the wider world, we might be able to find a home for it in the -[pleasings](https://github.com/thought-machine/pleasings) repo! - -If you get stuck, jump on [gitter](https://gitter.im/please-build/Lobby) and we'll do our best to help you! - +summary: Writing custom build definitions +description: Start here to learn how to write custom build rules to automate nearly anything in your build +id: genrule +categories: intermediate +tags: medium +status: Published +authors: Jon Poole +Feedback Link: https://github.com/thought-machine/please + +# Custom build rules with `genrule()` +## Overview +Duration: 1 + +### Prerequisites +- You must have Please installed: [Install Please](https://please.build/quickstart.html) +- You should be comfortable using the existing build rules. + +### What you'll learn +We'll be working through a contrived example writing a build definition for +[wc](https://www.gnu.org/software/coreutils/manual/html_node/wc-invocation.html#wc-invocation) from core utils. +In doing so you'll: +- Be introduced to genrule(), the generic build rule +- Explore the build environment with `--shell` +- Write and use custom build rule definitions +- Manage and write custom tools for your build definition +- Add configuration for your build definitions + +### What if I get stuck? + +The final result of running through this codelab can be found +[here](https://github.com/thought-machine/please-codelabs/tree/main/custom_rules) for reference. If you really get stuck +you can find us on [gitter](https://gitter.im/please-build/Lobby)! + +## genrule() +Duration: 3 + +Before we jump into writing custom build definitions, let me introduce you to `genrule()`, the generic build rule. Let's +just create a new project and initialise Please in it: +```bash +mkdir custom_rules && cd custom_rules +plz init --no_prompt +``` + +Then create a `BUILD` file in the root of the repository like so: +### `BUILD` +```python +genrule( + name = "word_count", + srcs = ["file.txt"], + deps = [], + cmd = "wc $SRC > $OUT", + outs = ["file.wc"], +) +``` + +Then create file.txt: +```bash +echo "the quick brown fox jumped over the lazy dog" > file.txt +``` + +and build it: + +```bash +$ plz build //:word_count +Build finished; total time 70ms, incrementality 0.0%. Outputs: +//:word_count: + plz-out/gen/file.wc + +$ cat plz-out/gen/file.wc + 1 9 45 file.txt +``` + +### Troubleshooting: "can't store data at section "scm"" + +This message means the runner is using an older Please release that doesn’t understand the `[scm]` section in your `.plzconfig`, so parsing fails before any build work begins. + +**How to fix** +- Upgrade the Please version invoked in CI (pin the same version locally via `pleasew`, `setup-please`, or `PLZ_VERSION`). +- If upgrading immediately is impractical, temporarily remove or comment the `[scm]` block until the runner is updated. + +### So what's going on? +Here we've used one of the built-in rules, `genrule()`, to run a custom command. `genrule()` can take a number of +parameters, most notably: the name of the rule, the inputs (sources and dependencies), its outputs, and the command +we want to run. The full list of available arguments can be found on the [`genrule()`](/lexicon.html#genrule) +documentation. + +Here we've used it to count the number of words in `file.txt`. Please has helpfully set up some environment variables +that help us find our inputs, as well as where to put our outputs: + +- `$SRC` - Set when there's only one item in the `srcs` list. Contains the path to that source file. +- `$SRCS` - Contains a space-separated list of the sources of the rule. +- `$OUT` - Set when there's only one item in the `outs` list. Contains the expected path of that output. +- `$OUTS` - Contains a space-separated list of the expected paths of the outputs of the rule. + +For a complete list of available variables, see the [build env](/build_rules.html#build-env) docs. + +The command `wc $SRC > $OUT` is therefore translated into `wc file.txt > file.wc` and we can see that the output of the +rule has been saved to `plz-out/gen/file.wc`. + +## The build directory +Duration: 7 + +One of the key features of Please is that builds are hermetic, that is, commands are executed in an isolated and +controlled environment. Rules can't access files or env vars that are not explicitly made available to them. As a +result, incremental builds very rarely break when using Please. + +Considering this, debugging builds would be quite hard if we couldn't play around in this build environment. Luckily, +Please makes this trivial with the `--shell` flag: + +``` +$ plz build --shell :word_count +Temp directories prepared, total time 50ms: + //:word_count: plz-out/tmp/word_count._build + Command: wc $SRC > $OUT + +bash-4.4$ pwd +/plz-out/tmp/word_count._build + +bash-4.4$ wc $SRC > $OUT + +bash-4.4$ cat $OUT + 1 9 45 file.txt +``` + +As we can see, Please has prepared a temporary directory for us under `plz-out/tmp`, and put us in a true-to-life bash +environment. You may run `printenv`, to see the environment variables that Please has made available to us: + +``` +bash-4.4$ printenv +OS=linux +ARCH=amd64 +LANG=en_GB.UTF-8 +TMP_DIR=/plz-out/tmp/word_count._build +CMD=wc $SRC > $OUT +OUT=/plz-out/tmp/word_count._build/file.wc +TOOLS= +SRCS=file.txt +PKG= +CONFIG=opt +PYTHONHASHSEED=42 +SRC=file.txt +OUTS=file.wc +PWD=/plz-out/tmp/word_count._build +HOME=/plz-out/tmp/word_count._build +NAME=word_count +TMPDIR=/plz-out/tmp/word_count._build +BUILD_CONFIG=opt +XOS=linux +XARCH=x86_64 +SHLVL=1 +PATH=/.please:/usr/local/bin:/usr/bin:/bin +GOOS=linux +PKG_DIR=. +GOARCH=amd64 +_=/usr/bin/printenv +``` + +As you can see, the rule doesn't have access to any of the variables from the host machine. Even `$PATH` has been set +based on configuration in `.plzconfig`: + +The `--shell` flag works for all targets (except filegroups), which of course means any of the built-in rules! Note, +`--shell` also works on `plz test`. You can `plz build --shell //my:test` to see how the test is built, and then +`plz test --shell //my:test` to see how it will be run. + +## Build definitions +Duration: 5 + +We've managed to write a custom rule to count the number of words in `file.txt`, however, we have no way of reusing this, +so let's create a `wordcount()` build definition! + +A build definition is just a function that creates one or more build targets which define how to build something. These +are typically defined inside `.build_def` files within your repository. Let's just create a folder for our definition: + +### `build_defs/word_count.build_defs` +```python +def word_count(name:str, file:str) -> str: + return genrule( + name = name, + srcs = [file], + outs = [f"{name}.wc"], + cmd = "wc $SRC > $OUT", + ) +``` + +We then need some way to access these build definitions from other packages. To do this, we typically use a filegroup: + +### `build_defs/BUILD` +```python +filegroup( + name = "word_count", + srcs = ["word_count.build_defs"], + visibility = ["PUBLIC"], +) +``` + +We can then use this in place of our `genrule()`: + +### `BUILD` +```python +subinclude("//build_defs:word_count") + +word_count( + name = "word_count", + file = "file.txt", +) +``` + +And check it still works: + +```bash +plz build //:word_count +``` +The output: + +``` +Build finished; total time 30ms, incrementality 100.0%. Outputs: +//:word_count: + plz-out/gen/word_count.wc +``` + +### `subinclude()` +Subinclude is primarily used for including build definitions into your `BUILD` file. It can be thought of like a +Python import except it operates on a build target instead. Under the hood, subinclude parses the output of the target +and makes the top-level declarations available in the current package's scope. + +The build target is usually a filegroup, however, this doesn't have to be the case. In fact, the build target can be +anything that produces parsable outputs. + +It's almost always a bad idea to build anything as part of a subinclude. These rules will be built at parse time, +which can be hard to debug, but more importantly, will block the parser while it waits for that rule to build. Use +non-filegroup subincludes under very careful consideration! + +## Managing tools +Duration: 7 + +Right now we're relying on `wc` to be available on the configured path. This is a pretty safe bet, however, Please +provides a powerful mechanism for managing tools, so let's over-engineer this: + +### `build_defs/word_count.build_defs` +```python +def word_count(name:str, file:str, wc_tool:str="wc") -> str: + return genrule( + name = name, + srcs = [file], + outs = [f"{name}.wc"], + cmd = "$TOOLS_WC $SRC > $OUT", + tools = { + "WC": [wc_tool], + } + ) +``` + +Here we've configured our build definition to take the word count tool in as a parameter. This is then passed to +`genrule()` via the `tools` parameter. Please has set up the `$TOOLS_WC` environment variable which we can used to +locate our tool. The name of this variable is based on the key in this dictionary. + +In this contrived example, this may not seem very useful, however, Please will perform some important tasks for us: + +- If the tool is a program, Please will check it's available on the path at parse time. +- If the tool is a build rule, Please will build this rule and configure `$TOOLS_WC` so it can be invoked. Whether the +tool is on the path or a build rule is transparent to you, the rule's author! + +### Custom word count tool +Currently, our word count rule doesn't just get the word count: it also gets the character and line count as well. I +mentioned that these can be build rules so let's create a true word count tool that counts just words: + +### `tools/wc.sh` +```shell script +#!/bin/bash + +wc -w $@ +``` + +### `tools/BUILD` +```python +filegroup( + name = "wc", + srcs = ["wc.sh"], + binary = True, + visibility = ["PUBLIC"], +) +``` + +and let's test that out: + +``` +$ plz run //tools:wc -- file.txt +9 file.txt +``` + +Brilliant! We can now use this in our build rule like so: + +### `BUILD` +```python +subinclude("//build_defs:word_count") + +word_count( + name = "lines_words_and_chars", + file = "file.txt", +) + +word_count( + name = "just_words", + file = "file.txt", + wc_tool = "//tools:wc", +) +``` + +and check it all works: + +``` +$ plz build //:lines_words_and_chars //:just_words +Build finished; total time 30ms, incrementality 100.0%. Outputs: +//:lines_words_and_chars: + plz-out/gen/lines_words_and_chars.wc +//:just_words: + plz-out/gen/just_words.wc + +$ cat plz-out/gen/lines_words_and_chars.wc +1 9 45 file.txt + +$ cat plz-out/gen/just_words.wc +9 file.txt +``` + +## Configuration +Duration: 6 + +Right now, we have to specify the new word count tool each time we use our build definition! Let's have a look at how we +can configure this in our `.plzconfig` instead: + +### `.plzconfig` +``` +[Buildconfig] +word-count-tool = //tools:wc +``` + +The `[buildconfig]` section can be used to add configuration specific to your project. By adding the `word-count-tool` +config option here, we can use this in our build definition: + +### `build_defs/word_count.build_defs` +```python +def word_count(name:str, file:str, wc_tool:str=CONFIG.WORD_COUNT_TOOL) -> str: + return genrule( + name = name, + srcs = [file], + outs = [f"{name}.wc"], + cmd = "$TOOLS_WC $SRC > $OUT", + tools = { + "WC": [wc_tool], + } + ) + +CONFIG.setdefault('WORD_COUNT_TOOL', 'wc') +``` + +Here we've set the default value for `wc_tool` to `CONFIG.WORD_COUNT_TOOL`, which will contain our config value from +`.plzconfig`. What if that's not set though? That's why we also set a sensible default configuration value with +`CONFIG.setdefault('WORD_COUNT_TOOL', 'wc')`! + + +We then need to update our build rules: + +### `BUILD` +```python +subinclude("//build_defs:word_count") + +word_count( + name = "lines_words_and_chars", + file = "file.txt", + wc_tool = "wc", +) + +word_count( + name = "just_words", + file = "file.txt", +) +``` + +and check it all works: + +``` +$ plz build //:lines_words_and_chars //:just_words +Build finished; total time 30ms, incrementality 100.0%. Outputs: +//:lines_words_and_chars: + plz-out/gen/lines_words_and_chars.wc +//:just_words: + plz-out/gen/just_words.wc + +$ cat plz-out/gen/lines_words_and_chars.wc +1 9 45 file.txt + +$ cat plz-out/gen/just_words.wc +9 file.txt +``` + +## Conclusion +Duration: 2 + +Congratulations! You've written your first build definition! While contrived, this example demonstrates most of the +mechanisms used to create a rich set of build definitions for a new language or technology. To get a better understanding +of build rules, I recommend reading through the advanced topics on [please.build](/build_rules.html). + +If you create something you believe will be useful to the wider world, we might be able to find a home for it in the +[pleasings](https://github.com/thought-machine/pleasings) repo! + +If you get stuck, jump on [gitter](https://gitter.im/please-build/Lobby) and we'll do our best to help you! + diff --git a/docs/codelabs/github_actions.md b/docs/codelabs/github_actions.md index e959a6782..13e635a63 100644 --- a/docs/codelabs/github_actions.md +++ b/docs/codelabs/github_actions.md @@ -1,176 +1,176 @@ -summary: Running Please on GitHub Actions -description: GitHub Actions is an extensible CI/CD platform provided by GitHub -id: github_actions -categories: intermediate -tags: medium -status: Published -authors: Márk Sági-Kazár -Feedback Link: https://github.com/thought-machine/please - -# Running Please on GitHub Actions -## Overview -Duration: 2 - -### Prerequisites -- A repository on [GitHub](https://github.com) -- A project with Please initialized in that repository - -### What you'll learn -- How to setup [GitHub Actions](https://github.com/features/actions) -- How to use Please in a GitHub Actions build -- How to use the [setup-please](https://github.com/sagikazarmark/setup-please-action) action for better integration - -### What if I get stuck? -If you get stuck with GitHub Actions, check out the [official documentation](https://docs.github.com/en/free-pro-team@latest/actions). - -You can find usage examples of the [setup-please](https://github.com/sagikazarmark/setup-please-action) action in [this](https://github.com/sagikazarmark/todobackend-go-kit/blob/20292fc09e25196e751e087da7c5e659cd6c452f/.github/workflows/ci.yaml) repository. - -If you really get -stuck you can find us on [gitter](https://gitter.im/please-build/Lobby)! - -## GitHub Actions -Duration: 5 - -GitHub Actions is GitHub's built-in automation platform for CI/CD and other workflows. It runs workflows defined as YAML files in the .github/workflows directory, triggered by events (push, pull_request, schedule, manual, etc.). Workflows consist of jobs (run on hosted or self‑hosted runners) and steps that execute shell commands or reusable actions from the marketplace. Key benefits include tight GitHub integration, flexible triggers and matrices, a large action marketplace, and caching for faster builds. - -### Setting up GitHub Actions - -Workflow definitions are simple YAML files stored in the `.github/workflows` directory of your repository. - -The following snippet triggers a workflow named `CI` whenever commits are pushed to the `master` branch: - -```yaml -name: CI - -on: - push: - branches: - - master - pull_request: - -jobs: - test: - name: Test - runs-on: ubuntu-latest - - steps: - - name: Checkout code - uses: actions/checkout@v2 - - - name: Test - run: echo "Tests passed" -``` - -Go ahead and add the above snippet to `.github/workflows/ci.yaml` in your project. Then go to `https://github.com/YOU/YOUR-PROJECT/actions` and observe the workflow. - -## Please build -Duration: 4 - -Now we have a project setup with GitHub Actions, it's time to start building with Please! Let's change `ci.yaml` a little: - -```yaml -name: CI - -on: - push: - branches: - - master - pull_request: - -jobs: - test: - name: Test - runs-on: ubuntu-latest - - steps: - # Setup your language of choice here: - # https://github.com/actions/?q=setup-&type=&language= - - - name: Checkout code - uses: actions/checkout@v2 - - # Run please build - - name: Test - run: ./pleasew build //... -``` - -Compared to the example earlier, this workflow uses the `pleasew` script to download Please and build the project. - -Notice the `//...` bit at the end of the command: it's necessary on GitHub Actions. -Check [this](https://github.com/thought-machine/please/issues/1174) issue for more details. - -## setup-please action -Duration: 10 - -The [setup-please](https://github.com/sagikazarmark/setup-please-action) action provides better integration for Please. - -### What is an _action_? - -As you've seen in the previous examples, workflows consist of _steps_. -A workflow step can be as simple as a shell script: - -```yaml -- name: Test - run: ./pleasew build //... -``` - -Shell scripts (no matter how awesome they are) are not always the right tool for the job. Complex build steps might require a more expressive language which takes us to the second type of workflow steps, called _actions_: - -```yaml -- name: Checkout code - uses: actions/checkout@v2 -``` - -An _action_ can be written in any language (distributed as Docker images), but JavaScript is supported natively. - -### Why not just use ./pleasew? - -The above section about _actions_ begs the question: why not just use `pleasew`? Why do we need an action for running Please. - -Please itself can perfectly run on GitHub Actions on its own, so you don't need an _action_ per se. That being said, there are a couple issues when using `pleasew`: - -- The wrapper script does not understand Please configuration which can lead to multiple downloads of different versions to different locations which takes time and time is expensive in CI. -- When using self-hosted runners, GitHub Actions offers a cache specifically for tools (like Please) that can further speed up workflows, but it requires a custom action. - -The [setup-please](https://github.com/sagikazarmark/setup-please-action) action provides better integration for Please solving the above issues (and a lot more). - -### Using the setup-please action - -Adding the [setup-please](https://github.com/sagikazarmark/setup-please-action) action to your workflow is simply adding two lines: - -```yaml -name: CI - -on: - push: - branches: - - master - pull_request: - -jobs: - test: - name: Test - runs-on: ubuntu-latest - - steps: - # Setup your language of choice here: - # https://github.com/actions/?q=setup-&type=&language= - - - name: Checkout code - uses: actions/checkout@v2 - - # Make sure it's added after the checkout step - - name: Set up Please - uses: sagikazarmark/setup-please-action@v0 - - # Run please build - # You can use plz thanks to the setup action - - name: Test - run: plz test //... -``` - -The readme of [setup-please](https://github.com/sagikazarmark/setup-please-action) explains more use cases and configuration options: - -- global include/exclude labels -- global profile -- saving logs as artifacts +summary: Running Please on GitHub Actions +description: GitHub Actions is an extensible CI/CD platform provided by GitHub +id: github_actions +categories: intermediate +tags: medium +status: Published +authors: Márk Sági-Kazár +Feedback Link: https://github.com/thought-machine/please + +# Running Please on GitHub Actions +## Overview +Duration: 2 + +### Prerequisites +- A repository on [GitHub](https://github.com) +- A project with Please initialized in that repository + +### What you'll learn +- How to setup [GitHub Actions](https://github.com/features/actions) +- How to use Please in a GitHub Actions build +- How to use the [setup-please](https://github.com/sagikazarmark/setup-please-action) action for better integration + +### What if I get stuck? +If you get stuck with GitHub Actions, check out the [official documentation](https://docs.github.com/en/free-pro-team@latest/actions). + +You can find usage examples of the [setup-please](https://github.com/sagikazarmark/setup-please-action) action in [this](https://github.com/sagikazarmark/todobackend-go-kit/blob/20292fc09e25196e751e087da7c5e659cd6c452f/.github/workflows/ci.yaml) repository. + +If you really get +stuck you can find us on [gitter](https://gitter.im/please-build/Lobby)! + +## GitHub Actions +Duration: 5 + +GitHub Actions is GitHub's built-in automation platform for CI/CD and other workflows. It runs workflows defined as YAML files in the .github/workflows directory, triggered by events (push, pull_request, schedule, manual, etc.). Workflows consist of jobs (run on hosted or self‑hosted runners) and steps that execute shell commands or reusable actions from the marketplace. Key benefits include tight GitHub integration, flexible triggers and matrices, a large action marketplace, and caching for faster builds. + +### Setting up GitHub Actions + +Workflow definitions are simple YAML files stored in the `.github/workflows` directory of your repository. + +The following snippet triggers a workflow named `CI` whenever commits are pushed to the `master` branch: + +```yaml +name: CI + +on: + push: + branches: + - master + pull_request: + +jobs: + test: + name: Test + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v2 + + - name: Test + run: echo "Tests passed" +``` + +Go ahead and add the above snippet to `.github/workflows/ci.yaml` in your project. Then go to `https://github.com/YOU/YOUR-PROJECT/actions` and observe the workflow. + +## Please build +Duration: 4 + +Now we have a project setup with GitHub Actions, it's time to start building with Please! Let's change `ci.yaml` a little: + +```yaml +name: CI + +on: + push: + branches: + - master + pull_request: + +jobs: + test: + name: Test + runs-on: ubuntu-latest + + steps: + # Setup your language of choice here: + # https://github.com/actions/?q=setup-&type=&language= + + - name: Checkout code + uses: actions/checkout@v2 + + # Run please build + - name: Test + run: ./pleasew build //... +``` + +Compared to the example earlier, this workflow uses the `pleasew` script to download Please and build the project. + +Notice the `//...` bit at the end of the command: it's necessary on GitHub Actions. +Check [this](https://github.com/thought-machine/please/issues/1174) issue for more details. + +## setup-please action +Duration: 10 + +The [setup-please](https://github.com/sagikazarmark/setup-please-action) action provides better integration for Please. + +### What is an _action_? + +As you've seen in the previous examples, workflows consist of _steps_. +A workflow step can be as simple as a shell script: + +```yaml +- name: Test + run: ./pleasew build //... +``` + +Shell scripts (no matter how awesome they are) are not always the right tool for the job. Complex build steps might require a more expressive language which takes us to the second type of workflow steps, called _actions_: + +```yaml +- name: Checkout code + uses: actions/checkout@v2 +``` + +An _action_ can be written in any language (distributed as Docker images), but JavaScript is supported natively. + +### Why not just use ./pleasew? + +The above section about _actions_ begs the question: why not just use `pleasew`? Why do we need an action for running Please. + +Please itself can perfectly run on GitHub Actions on its own, so you don't need an _action_ per se. That being said, there are a couple issues when using `pleasew`: + +- The wrapper script does not understand Please configuration which can lead to multiple downloads of different versions to different locations which takes time and time is expensive in CI. +- When using self-hosted runners, GitHub Actions offers a cache specifically for tools (like Please) that can further speed up workflows, but it requires a custom action. + +The [setup-please](https://github.com/sagikazarmark/setup-please-action) action provides better integration for Please solving the above issues (and a lot more). + +### Using the setup-please action + +Adding the [setup-please](https://github.com/sagikazarmark/setup-please-action) action to your workflow is simply adding two lines: + +```yaml +name: CI + +on: + push: + branches: + - master + pull_request: + +jobs: + test: + name: Test + runs-on: ubuntu-latest + + steps: + # Setup your language of choice here: + # https://github.com/actions/?q=setup-&type=&language= + + - name: Checkout code + uses: actions/checkout@v2 + + # Make sure it's added after the checkout step + - name: Set up Please + uses: sagikazarmark/setup-please-action@v0 + + # Run please build + # You can use plz thanks to the setup action + - name: Test + run: plz test //... +``` + +The readme of [setup-please](https://github.com/sagikazarmark/setup-please-action) explains more use cases and configuration options: + +- global include/exclude labels +- global profile +- saving logs as artifacts diff --git a/docs/codelabs/go_intro.md b/docs/codelabs/go_intro.md index a68a80c91..a439b3fc0 100644 --- a/docs/codelabs/go_intro.md +++ b/docs/codelabs/go_intro.md @@ -1,506 +1,506 @@ -summary: Getting started with Go -description: Building and testing with Go and Please, as well as managing third-party dependencies via go_repo -id: go_intro -categories: beginner -tags: medium -status: Published -authors: Jon Poole -Feedback Link: https://github.com/thought-machine/please - -# Getting started with Go -## Overview -Duration: 4 - -### Prerequisites -- You must have Please installed: [Install please](https://please.build/quickstart.html) -- Go must be installed: [Install Go](https://golang.org/doc/install#install) - -### What you'll learn -- Configuring Please for Go using the Go plugin -- Creating an executable Go binary -- Adding Go packages to your project -- Testing your code -- Including third-party libraries - -### What if I get stuck? - -The final result of running through this codelab can be found -[here](https://github.com/thought-machine/please-codelabs/tree/main/getting_started_go) for reference. If you really get -stuck you can find us on [gitter](https://gitter.im/please-build/Lobby)! - -## Initialising your project -Duration: 2 - -The easiest way to get started is from an existing Go module: - -```bash -mkdir getting_started_go && cd getting_started_go -plz init -plz init plugin go -go mod init github.com/example/module -``` - - -### So what just happened? -You will see this has created a number of files in your working folder: -```text -$ tree -a - . - ├── go.mod - ├── pleasew - ├── plugins - │ └── BUILD - └── .plzconfig -``` - -The `go.mod` file was generated by `go` and contains information about the Go module. While Please doesn't directly use -this file, it can be useful for integrating your project with the Go ecosystem and IDEs. You may remove it if you wish. - -The `pleasew` script is a wrapper script that will automatically install Please if it's not already! This -means Please projects are portable and can always be built via -`git clone https://... example_module && cd example_module && ./pleasew build`. - -The `plugins/BUILD` is a file generated by `plz init plugin go` which defines a build target for the Go plugin. - -The file `.plzconfig` contains the project configuration for Please. Please will have initialised this with the Go -plugin configuration for us: - -### `.plzconfig` -``` -[parse] -preloadsubincludes = ///go//build_defs:go ; Makes the Go rules available automatically in BUILD files - -[Plugin "go"] -Target = //plugins:go -``` - -This configures the Go plugin, and makes the build definitions available in the parse context throughout the repo -automatically. Alternatively, if you're not using Go everywhere, you can remove the `preloadsubincludes` config and add -`subinclude("///go//build_defs:go")` to each `BUILD` file that needs access to Go rules. - -### Troubleshooting: "unknown rule go_binary" -Duration: 1 - -Seeing `unknown rule go_binary` (or similar for other Go rules) means the plugin was not loaded. Confirm the plugin target exists and re-run the init script if needed. - -**Fix checklist** -- `plz query config Plugin.go.Target` should report `//plugins:go`. -- Ensure `plugins/BUILD` is present and contains the Go plugin target. -- If `.plzconfig` was edited manually, re-run `plz init plugin go` or restore the snippet above. - -Read the [config](/config.html) and [go plugin config](/plugins.html#go.config) docs for more information on -configuration. - -Finally, the `plz-out` directory contains artifacts built by plz. - -## Setting up our import path -Duration: 1 - -As we've initialised a Go module, all imports should be resolved relative to the module name. To instruct Please to -use this import path, we have to configure the Go plugin as such: - -### `.plzconfig` -```text -[Plugin "go"] -Target = //plugins:go -ImportPath = github.com/example/module ; Should match the module name in go.mod -``` - -## Setting up your toolchain -Duration: 2 - -If you have followed the [Golang quickstart guide](https://go.dev/doc/tutorial/getting-started), or if you're using -1.20 or newer, there's a good chance additional configuration is required. There are two options for configuring your -Go toolchain with Please. - -### Recommended: managed toolchain - -The simplest way is to let Please manage your toolchain for you. The `go_toolchain()` rule will download the Go -toolchain, compiling the standard library if necessary. Simply add the following rule to your project: - -### `third_party/go/BUILD` -```python -go_toolchain( - name = "toolchain", - version = "1.20", -) -``` - -And then configure the Go plugin to use it like so: -### `.plzconfig` -```text -[Plugin "go"] -Target = //plugins:go -ImportPath = github.com/example/module -GoTool = //third_party/go:toolchain|go -``` - -### Using Go from the system PATH - -By default, Please will look for Go in the following locations: -``` -/usr/local/bin:/usr/bin:/bin -``` - -If you have Please installed elsewhere, you must configure the path like so: - -### `.plzconfig` -```text -[Build] -Path = /usr/local/go/bin:/usr/local/bin:/usr/bin:/bin -``` - -Additionally, from version 1.20, golang no longer includes the standard library with its distribution. To use 1.20 from -the path with Please, you must install it. This can be done like so: - -```bash -GODEBUG="installgoroot=all" go install std -``` - -## Hello, world! -Duration: 4 - -Now we have a Please project, it's time to start adding some code to it! Let's create a "hello world" Go program: - -### `src/main.go` -```go -package main - -import "fmt" - -func main() { - fmt.Println("Hello, world!") -} -``` - -We now need to tell Please about our Go code. Please projects define metadata about the targets that are available to be -built in `BUILD` files. Let's create a `BUILD` file to build this program: - -### `src/BUILD` -```python -go_binary( - name = "main", - srcs = ["main.go"], -) -``` - -That's it! You can now run this with: - -```bash -plz run //src:main -``` - -You should see the output: - -```text -Hello, world! -``` - -There's a lot going on here; first off, `go_binary()` is one of the [go plugin functions](/plugins.html#go). This build -function creates a "build target" in the `src` package. A package, in the Please sense, is any directory that contains a -`BUILD` file. - -Each build target can be identified by a build label in the format `//path/to/package:label`, i.e. `//src:main`. -There are a number of things you can do with a build target such as `plz build //src:main`, however, as you've seen, -if the target is a binary, you may run it with `plz run`. - -## Adding packages -Duration: 5 - -Let's add a `src/greetings` package to our Go project: - -### `src/greetings/greetings.go` -```go -package greetings - -import ( - "math/rand" -) - -var greetings = []string{ - "Hello", - "Bonjour", - "Marhabaan", -} - -func Greeting() string { - return greetings[rand.Intn(len(greetings))] -} -``` - -We then need to tell Please how to compile this library: - -### `src/greetings/BUILD` -```python -go_library( - name = "greetings", - srcs = ["greetings.go"], - visibility = ["//src/..."], -) -``` - -Then run the following command to build the greetings package: - -```bash -plz build //src/greetings -``` - -You should see output similar to: - -```text -Build finished; total time 290ms, incrementality 50.0%. Outputs: -//src/greetings:greetings: - plz-out/gen/src/greetings/greetings.a -``` - -Here we can see that the output of a `go_library` rule is a `.a` file which is stored in -`plz-out/gen/src/greetings/greetings.a`. This is a [static library archive](https://en.wikipedia.org/wiki/Static_library) -representing the compiled output of our package. - -We have also provided a `visibility` list to this rule. This is used to control where this `go_library()` rule can be -used within our project. In this case, any rule under `src`, denoted by the `...` syntax. - -NB: This syntax can also be used on the command line e.g. `plz build //src/...` - -## Using our new package -Duration: 2 -To maintain a principled model for incremental and hermetic builds, Please requires that rules are explicit about their -inputs and outputs. To use this new package in our "hello world" program, we have to add it as a dependency of our binary rule: - -### `src/BUILD` -```python -go_binary( - name = "main", - srcs = ["main.go"], - # NB: if the package and rule name are the same, you may omit the name i.e. this could be just //src/greetings - deps = ["//src/greetings:greetings"], -) -``` - -You can see we use a build label to refer to another rule here. Please will make sure that this rule is built before -making its outputs available to our rule here. - -Then update `src/main.go`: -### `src/main.go` -```go -package main - -import ( - "fmt" - - "github.com/example/module/src/greetings" -) - -func main() { - fmt.Printf("%s, world!\n", greetings.Greeting()) -} -``` - -Give it a whirl by running the following command: - -```text -$ plz run //src:main -Bonjour, world! -``` - -The greeting is selected at random, so your output may vary each time you run the command. - -## Testing our code -Duration: 5 - -Let's create a very simple test for our library: -### `src/greetings/greetings_test.go` -```go -package greetings - -import "testing" - -func TestGreeting(t *testing.T) { - if Greeting() == "" { - panic("Greeting failed to produce a result") - } -} -``` - -We then need to tell Please about our tests: -### `src/greetings/BUILD` -```python -go_library( - name = "greetings", - srcs = ["greetings.go"], - visibility = ["//src/..."], -) - -go_test( - name = "greetings_test", - srcs = ["greetings_test.go"], - # Here we have used the shorthand `:greetings` label format. This format can be used to refer to a rule in the same - # package and is shorthand for `//src/greetings:greetings`. - deps = [":greetings"], -) -``` - -We've used `go_test()`. This is a special build rule that is considered a test. These rules can be executed as such: -```text -$ plz test //src/... -//src/greetings:greetings_test 1 test run in 3ms; 1 passed -1 test target and 1 test run in 3ms; 1 passed. Total time 90ms. -``` - -Please will run all the tests it finds under `//src/...`, and aggregate the results up. This works even across -languages allowing you to test your whole project with a single command. - -### External tests - -Go has a concept of "external" tests. This means that tests can exist in the same folder as the production code, but -they have a different package. Please supports this through the `external = True` argument on `go_test()`: - -### `src/greetings/greetings_test.go` -```go -package greetings_test - -import ( - "testing" - - // We now need to import the "production" package - "github.com/example/module/src/greetings" -) - -func TestGreeting(t *testing.T) { - if greetings.Greeting() == "" { - panic("Greeting failed to produce a result") - } -} -``` - -### `src/greetings/BUILD` -```python -go_library( - name = "greetings", - srcs = ["greetings.go"], - visibility = ["//src/..."], -) - -go_test( - name = "greetings_test", - srcs = ["greetings_test.go"], - deps = [":greetings"], - external = True, -) -``` - -Check if it works: -```text -$ plz test //src/... -//src/greetings:greetings_test 1 test run in 3ms; 1 passed - 1 test target and 1 test run in 3ms; 1 passed. Total time 90ms. -``` -## Third-party dependencies -Duration: 7 - -To add third party dependencies to Please, the easiest way is to use `///go//tools:please_go` to resolve them, and then -add them to `third_party/go/BUILD`. Let's add `github.com/stretchr/testify`: - -```text -$ plz run ///go//tools:please_go -- get github.com/stretchr/testify@v1.8.2 -go_repo(module="github.com/stretchr/objx", version="v0.5.0") -go_repo(module="gopkg.in/yaml.v3", version="v3.0.1") -go_repo(module="gopkg.in/check.v1", version="v0.0.0-20161208181325-20d25e280405") -go_repo(module="github.com/stretchr/testify", version="v1.8.2") -go_repo(module="github.com/davecgh/go-spew", version="v1.1.1") -go_repo(module="github.com/pmezard/go-difflib", version="v1.0.0") -``` - -We can then add them to `third_party/go/BUILD`: -```python -# We give direct modules a name and install list so we can reference them nicely -go_repo( - name = "testify", - module = "github.com/stretchr/testify", - version="v1.8.2", - # We add the subset of packages we actually depend on here - install = [ - "assert", - "require", - ] -) - -# Indirect modules are referenced internally, so we don't have to name them if we don't want to. They can still be -# referenced by the following build label naming convention: ///third_party/go/github.com_owner_repo//package. -# -# NB: Any slashes in the module name will be replaced by _ -go_repo(module="github.com/davecgh/go-spew", version="v1.1.1") -go_repo(module="github.com/pmezard/go-difflib", version="v1.0.0") -go_repo(module="github.com/stretchr/objx", version="v0.5.0") -go_repo(module="gopkg.in/yaml.v3", version="v3.0.1") -go_repo(module="gopkg.in/check.v1", version="v0.0.0-20161208181325-20d25e280405") -``` - -More information as to how `go_repo` works can be found -[here](/plugins.html#go_repo). - -NB: This build label looks a little different. That's because it's referencing a build target in a subrepo. -### Updating our tests - -We can now use this library in our tests: - -### `src/greetings/greetings_test.go` -```go -package greetings_test - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/example/module/src/greetings" -) - -func TestGreeting(t *testing.T) { - assert.NotEqual(t, greetings.Greeting(), "") -} -``` - -### `src/greetings/BUILD` -```python -go_library( - name = "greetings", - srcs = ["greetings.go"], - visibility = ["//src/..."], -) - -go_test( - name = "greetings_test", - srcs = ["greetings_test.go"], - deps = [ - ":greetings", - # Could use a subrepo label i.e. ///third_party/go/github.com_stretchr_testify//assert instead if we want - "//third_party/go:testify", - ], - external = True, -) -``` - -And then we can check it all works: -```text -$ plz test -//src/greetings:greetings_test 1 test run in 3ms; 1 passed -1 test target and 1 test run; 1 passed. -Total time: 480ms real, 0s compute. -``` - -## What next? -Duration: 1 - -Hopefully you now have an idea as to how to build Go with Please. Please is capable of so much more though! - -- [Please basics](/basics.html) - A more general introduction to Please. It covers a lot of what we have in this -tutorial in more detail. -- [Go plugin rules](/plugins.html#go) - See the rest of the Go plugin rules and config. -- [Built-in rules](/lexicon.html#go) - See the rest of the built in rules. -- [Config](/config.html) - See the available config options for Please. -- [Command line interface](/commands.html) - Please has a powerful command line interface. Interrogate the build graph, -determine files changes since master, watch rules and build them automatically as things change and much more! Use -`plz help`, and explore this rich set of commands! - -Otherwise, why not try one of the other codelabs! +summary: Getting started with Go +description: Building and testing with Go and Please, as well as managing third-party dependencies via go_repo +id: go_intro +categories: beginner +tags: medium +status: Published +authors: Jon Poole +Feedback Link: https://github.com/thought-machine/please + +# Getting started with Go +## Overview +Duration: 4 + +### Prerequisites +- You must have Please installed: [Install please](https://please.build/quickstart.html) +- Go must be installed: [Install Go](https://golang.org/doc/install#install) + +### What you'll learn +- Configuring Please for Go using the Go plugin +- Creating an executable Go binary +- Adding Go packages to your project +- Testing your code +- Including third-party libraries + +### What if I get stuck? + +The final result of running through this codelab can be found +[here](https://github.com/thought-machine/please-codelabs/tree/main/getting_started_go) for reference. If you really get +stuck you can find us on [gitter](https://gitter.im/please-build/Lobby)! + +## Initialising your project +Duration: 2 + +The easiest way to get started is from an existing Go module: + +```bash +mkdir getting_started_go && cd getting_started_go +plz init +plz init plugin go +go mod init github.com/example/module +``` + + +### So what just happened? +You will see this has created a number of files in your working folder: +```text +$ tree -a + . + ├── go.mod + ├── pleasew + ├── plugins + │ └── BUILD + └── .plzconfig +``` + +The `go.mod` file was generated by `go` and contains information about the Go module. While Please doesn't directly use +this file, it can be useful for integrating your project with the Go ecosystem and IDEs. You may remove it if you wish. + +The `pleasew` script is a wrapper script that will automatically install Please if it's not already! This +means Please projects are portable and can always be built via +`git clone https://... example_module && cd example_module && ./pleasew build`. + +The `plugins/BUILD` is a file generated by `plz init plugin go` which defines a build target for the Go plugin. + +The file `.plzconfig` contains the project configuration for Please. Please will have initialised this with the Go +plugin configuration for us: + +### `.plzconfig` +``` +[parse] +preloadsubincludes = ///go//build_defs:go ; Makes the Go rules available automatically in BUILD files + +[Plugin "go"] +Target = //plugins:go +``` + +This configures the Go plugin, and makes the build definitions available in the parse context throughout the repo +automatically. Alternatively, if you're not using Go everywhere, you can remove the `preloadsubincludes` config and add +`subinclude("///go//build_defs:go")` to each `BUILD` file that needs access to Go rules. + +### Troubleshooting: "unknown rule go_binary" +Duration: 1 + +Seeing `unknown rule go_binary` (or similar for other Go rules) means the plugin was not loaded. Confirm the plugin target exists and re-run the init script if needed. + +**Fix checklist** +- `plz query config Plugin.go.Target` should report `//plugins:go`. +- Ensure `plugins/BUILD` is present and contains the Go plugin target. +- If `.plzconfig` was edited manually, re-run `plz init plugin go` or restore the snippet above. + +Read the [config](/config.html) and [go plugin config](/plugins.html#go.config) docs for more information on +configuration. + +Finally, the `plz-out` directory contains artifacts built by plz. + +## Setting up our import path +Duration: 1 + +As we've initialised a Go module, all imports should be resolved relative to the module name. To instruct Please to +use this import path, we have to configure the Go plugin as such: + +### `.plzconfig` +```text +[Plugin "go"] +Target = //plugins:go +ImportPath = github.com/example/module ; Should match the module name in go.mod +``` + +## Setting up your toolchain +Duration: 2 + +If you have followed the [Golang quickstart guide](https://go.dev/doc/tutorial/getting-started), or if you're using +1.20 or newer, there's a good chance additional configuration is required. There are two options for configuring your +Go toolchain with Please. + +### Recommended: managed toolchain + +The simplest way is to let Please manage your toolchain for you. The `go_toolchain()` rule will download the Go +toolchain, compiling the standard library if necessary. Simply add the following rule to your project: + +### `third_party/go/BUILD` +```python +go_toolchain( + name = "toolchain", + version = "1.20", +) +``` + +And then configure the Go plugin to use it like so: +### `.plzconfig` +```text +[Plugin "go"] +Target = //plugins:go +ImportPath = github.com/example/module +GoTool = //third_party/go:toolchain|go +``` + +### Using Go from the system PATH + +By default, Please will look for Go in the following locations: +``` +/usr/local/bin:/usr/bin:/bin +``` + +If you have Please installed elsewhere, you must configure the path like so: + +### `.plzconfig` +```text +[Build] +Path = /usr/local/go/bin:/usr/local/bin:/usr/bin:/bin +``` + +Additionally, from version 1.20, golang no longer includes the standard library with its distribution. To use 1.20 from +the path with Please, you must install it. This can be done like so: + +```bash +GODEBUG="installgoroot=all" go install std +``` + +## Hello, world! +Duration: 4 + +Now we have a Please project, it's time to start adding some code to it! Let's create a "hello world" Go program: + +### `src/main.go` +```go +package main + +import "fmt" + +func main() { + fmt.Println("Hello, world!") +} +``` + +We now need to tell Please about our Go code. Please projects define metadata about the targets that are available to be +built in `BUILD` files. Let's create a `BUILD` file to build this program: + +### `src/BUILD` +```python +go_binary( + name = "main", + srcs = ["main.go"], +) +``` + +That's it! You can now run this with: + +```bash +plz run //src:main +``` + +You should see the output: + +```text +Hello, world! +``` + +There's a lot going on here; first off, `go_binary()` is one of the [go plugin functions](/plugins.html#go). This build +function creates a "build target" in the `src` package. A package, in the Please sense, is any directory that contains a +`BUILD` file. + +Each build target can be identified by a build label in the format `//path/to/package:label`, i.e. `//src:main`. +There are a number of things you can do with a build target such as `plz build //src:main`, however, as you've seen, +if the target is a binary, you may run it with `plz run`. + +## Adding packages +Duration: 5 + +Let's add a `src/greetings` package to our Go project: + +### `src/greetings/greetings.go` +```go +package greetings + +import ( + "math/rand" +) + +var greetings = []string{ + "Hello", + "Bonjour", + "Marhabaan", +} + +func Greeting() string { + return greetings[rand.Intn(len(greetings))] +} +``` + +We then need to tell Please how to compile this library: + +### `src/greetings/BUILD` +```python +go_library( + name = "greetings", + srcs = ["greetings.go"], + visibility = ["//src/..."], +) +``` + +Then run the following command to build the greetings package: + +```bash +plz build //src/greetings +``` + +You should see output similar to: + +```text +Build finished; total time 290ms, incrementality 50.0%. Outputs: +//src/greetings:greetings: + plz-out/gen/src/greetings/greetings.a +``` + +Here we can see that the output of a `go_library` rule is a `.a` file which is stored in +`plz-out/gen/src/greetings/greetings.a`. This is a [static library archive](https://en.wikipedia.org/wiki/Static_library) +representing the compiled output of our package. + +We have also provided a `visibility` list to this rule. This is used to control where this `go_library()` rule can be +used within our project. In this case, any rule under `src`, denoted by the `...` syntax. + +NB: This syntax can also be used on the command line e.g. `plz build //src/...` + +## Using our new package +Duration: 2 +To maintain a principled model for incremental and hermetic builds, Please requires that rules are explicit about their +inputs and outputs. To use this new package in our "hello world" program, we have to add it as a dependency of our binary rule: + +### `src/BUILD` +```python +go_binary( + name = "main", + srcs = ["main.go"], + # NB: if the package and rule name are the same, you may omit the name i.e. this could be just //src/greetings + deps = ["//src/greetings:greetings"], +) +``` + +You can see we use a build label to refer to another rule here. Please will make sure that this rule is built before +making its outputs available to our rule here. + +Then update `src/main.go`: +### `src/main.go` +```go +package main + +import ( + "fmt" + + "github.com/example/module/src/greetings" +) + +func main() { + fmt.Printf("%s, world!\n", greetings.Greeting()) +} +``` + +Give it a whirl by running the following command: + +```text +$ plz run //src:main +Bonjour, world! +``` + +The greeting is selected at random, so your output may vary each time you run the command. + +## Testing our code +Duration: 5 + +Let's create a very simple test for our library: +### `src/greetings/greetings_test.go` +```go +package greetings + +import "testing" + +func TestGreeting(t *testing.T) { + if Greeting() == "" { + panic("Greeting failed to produce a result") + } +} +``` + +We then need to tell Please about our tests: +### `src/greetings/BUILD` +```python +go_library( + name = "greetings", + srcs = ["greetings.go"], + visibility = ["//src/..."], +) + +go_test( + name = "greetings_test", + srcs = ["greetings_test.go"], + # Here we have used the shorthand `:greetings` label format. This format can be used to refer to a rule in the same + # package and is shorthand for `//src/greetings:greetings`. + deps = [":greetings"], +) +``` + +We've used `go_test()`. This is a special build rule that is considered a test. These rules can be executed as such: +```text +$ plz test //src/... +//src/greetings:greetings_test 1 test run in 3ms; 1 passed +1 test target and 1 test run in 3ms; 1 passed. Total time 90ms. +``` + +Please will run all the tests it finds under `//src/...`, and aggregate the results up. This works even across +languages allowing you to test your whole project with a single command. + +### External tests + +Go has a concept of "external" tests. This means that tests can exist in the same folder as the production code, but +they have a different package. Please supports this through the `external = True` argument on `go_test()`: + +### `src/greetings/greetings_test.go` +```go +package greetings_test + +import ( + "testing" + + // We now need to import the "production" package + "github.com/example/module/src/greetings" +) + +func TestGreeting(t *testing.T) { + if greetings.Greeting() == "" { + panic("Greeting failed to produce a result") + } +} +``` + +### `src/greetings/BUILD` +```python +go_library( + name = "greetings", + srcs = ["greetings.go"], + visibility = ["//src/..."], +) + +go_test( + name = "greetings_test", + srcs = ["greetings_test.go"], + deps = [":greetings"], + external = True, +) +``` + +Check if it works: +```text +$ plz test //src/... +//src/greetings:greetings_test 1 test run in 3ms; 1 passed + 1 test target and 1 test run in 3ms; 1 passed. Total time 90ms. +``` +## Third-party dependencies +Duration: 7 + +To add third party dependencies to Please, the easiest way is to use `///go//tools:please_go` to resolve them, and then +add them to `third_party/go/BUILD`. Let's add `github.com/stretchr/testify`: + +```text +$ plz run ///go//tools:please_go -- get github.com/stretchr/testify@v1.8.2 +go_repo(module="github.com/stretchr/objx", version="v0.5.0") +go_repo(module="gopkg.in/yaml.v3", version="v3.0.1") +go_repo(module="gopkg.in/check.v1", version="v0.0.0-20161208181325-20d25e280405") +go_repo(module="github.com/stretchr/testify", version="v1.8.2") +go_repo(module="github.com/davecgh/go-spew", version="v1.1.1") +go_repo(module="github.com/pmezard/go-difflib", version="v1.0.0") +``` + +We can then add them to `third_party/go/BUILD`: +```python +# We give direct modules a name and install list so we can reference them nicely +go_repo( + name = "testify", + module = "github.com/stretchr/testify", + version="v1.8.2", + # We add the subset of packages we actually depend on here + install = [ + "assert", + "require", + ] +) + +# Indirect modules are referenced internally, so we don't have to name them if we don't want to. They can still be +# referenced by the following build label naming convention: ///third_party/go/github.com_owner_repo//package. +# +# NB: Any slashes in the module name will be replaced by _ +go_repo(module="github.com/davecgh/go-spew", version="v1.1.1") +go_repo(module="github.com/pmezard/go-difflib", version="v1.0.0") +go_repo(module="github.com/stretchr/objx", version="v0.5.0") +go_repo(module="gopkg.in/yaml.v3", version="v3.0.1") +go_repo(module="gopkg.in/check.v1", version="v0.0.0-20161208181325-20d25e280405") +``` + +More information as to how `go_repo` works can be found +[here](/plugins.html#go_repo). + +NB: This build label looks a little different. That's because it's referencing a build target in a subrepo. +### Updating our tests + +We can now use this library in our tests: + +### `src/greetings/greetings_test.go` +```go +package greetings_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/example/module/src/greetings" +) + +func TestGreeting(t *testing.T) { + assert.NotEqual(t, greetings.Greeting(), "") +} +``` + +### `src/greetings/BUILD` +```python +go_library( + name = "greetings", + srcs = ["greetings.go"], + visibility = ["//src/..."], +) + +go_test( + name = "greetings_test", + srcs = ["greetings_test.go"], + deps = [ + ":greetings", + # Could use a subrepo label i.e. ///third_party/go/github.com_stretchr_testify//assert instead if we want + "//third_party/go:testify", + ], + external = True, +) +``` + +And then we can check it all works: +```text +$ plz test +//src/greetings:greetings_test 1 test run in 3ms; 1 passed +1 test target and 1 test run; 1 passed. +Total time: 480ms real, 0s compute. +``` + +## What next? +Duration: 1 + +Hopefully you now have an idea as to how to build Go with Please. Please is capable of so much more though! + +- [Please basics](/basics.html) - A more general introduction to Please. It covers a lot of what we have in this +tutorial in more detail. +- [Go plugin rules](/plugins.html#go) - See the rest of the Go plugin rules and config. +- [Built-in rules](/lexicon.html#go) - See the rest of the built in rules. +- [Config](/config.html) - See the available config options for Please. +- [Command line interface](/commands.html) - Please has a powerful command line interface. Interrogate the build graph, +determine files changes since master, watch rules and build them automatically as things change and much more! Use +`plz help`, and explore this rich set of commands! + +Otherwise, why not try one of the other codelabs! diff --git a/docs/codelabs/k8s.md b/docs/codelabs/k8s.md index 7ae13be8d..85bd922c3 100644 --- a/docs/codelabs/k8s.md +++ b/docs/codelabs/k8s.md @@ -1,456 +1,456 @@ -id: k8s -summary: Kubernetes and Docker -description: Learn about using Please to build and deploy Docker images and Kubernetes manifests -categories: intermediate -tags: medium -status: Published -authors: Jon Poole -Feedback Link: https://github.com/thought-machine/please - -# Kubernetes and Docker -## Overview -Duration: 1 - -### Prerequisites -- You must have Please installed: [Install Please](https://please.build/quickstart.html) -- You should be comfortable using the existing build rules. -- You should be familiar with [Docker](https://docs.docker.com/get-started/) - and [Kubernetes](https://kubernetes.io/docs/tutorials/kubernetes-basics/) - -This codelab uses Golang for the example service however the language used for this service isn't that important. Just -make sure you're able to build a binary in whatever your preferred language is. - -### What you'll learn -This codelab is quite long and tries to give an idea of what a complete build pipeline might look like for a docker and -kubernetes based project. You'll learn: - -- How to build a service and bake that into docker image -- How to build a kubernetes deployment for that docker image -- Starting minikube and testing your deployment out -- Setting up aliases to streamline your dev workflow - -### What if I get stuck? - -The final result of running through this codelab can be found -[here](https://github.com/thought-machine/please-codelabs/tree/main/kubernetes_and_docker) for reference. If you really get stuck -you can find us on [gitter](https://gitter.im/please-build/Lobby)! - -## Creating a service -Duration: 5 - -First up, let's create a service to deploy. It's not really important what it does or what language we implement it in. -For this codelab, we'll make a simple hello world HTTP service in go. - -### Initialising the project -```bash -plz init -go mod init github.com/example/module -plz init plugin go -``` - -### Set up the Go plugin - -Add a go toolchain to `third_party/go/BUILD` -```go -go_toolchain( - name = "toolchain", - version = "1.20", -) -``` - -And configure the plugin: -``` -[Plugin "go"] -Target = //plugins:go -ImportPath = github.com/example/module -GoTool = //third_party/go:toolchain|go -``` - -For more information on this, check out the Go codelab. - -### Creating a Go service -Create a file `hello_service/service.go`: - -```golang -package main - -import ( - "fmt" - "log" - "net/http" -) - -func main() { - http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintln(w, "This is my website!") - }) - - http.HandleFunc("/hello", func(w http.ResponseWriter, r *http.Request) { - fmt.Fprintln(w, "Hello, HTTP!") - }) - - if err := http.ListenAndServe(":8000", nil); err != nil { - log.Fatal("Error starting the server: ", err) - } -} -``` - -Then create a `hello_service/BUILD` file like so: -```python -go_binary( - name = "hello_service", - srcs = ["service.go"], - visibility = ["//hello_service/k8s:all"], -) -``` - -And test it works: - -```bash -plz run //hello_service:hello_service & -curl localhost:8000 -pkill hello_service -``` - -The output should look like this: -```bash -[1] 28694 -Hello, world! -[1]+ Terminated plz run //hello_service -``` - -## Building a Docker image -Duration: 5 - -Before we create a docker image for our service, it can be useful to create a base image that all our services share. -This can be used this to install language runtimes e.g. a python interpreter. If you're using a language that requires -a runtime, this is where you should install it. In this case, we're using Go so this isn't strictly necessary. - -Let's create a base docker file for our repo that all our services will use in `common/docker/Dockerfile-base`: -``` -FROM ubuntu:22.04 - -RUN apt update -y && apt upgrade -y -``` - -### Docker build rules - -To use the docker build rules, we need to install the docker plugin, as well as the shell plugin which it requires: - -```bash -plz init plugin shell && plz init plugin docker -``` - -We can then build a set of scripts that help us build, and push our docker images. Add the following to `common/docker/BUILD`: - -```python -docker_image( - name = "base", - dockerfile = "Dockerfile-base", - visibility = ["PUBLIC"], -) -``` - -And then let's build that: -``` -$ plz build //common/docker:base -Build finished; total time 80ms, incrementality 40.0%. Outputs: -//common/docker:base: - plz-out/bin/common/docker/base.sh -``` - -### So what's going on? -As promised, the output of the docker image rule is a script that can build the docker image for you. We can have a -look at what the script is doing: - -```bash -$ cat plz-out/bin/common/docker/base.sh -#!/bin/sh -docker build -t please-examples/base:0d45575ad71adea9861b079e5d56ff0bdc179a1868d06d6b3d102721824c1538 \ - -f Dockerfile-base - < plz-out/gen/common/docker/_base#docker_context.tar.gz -``` - -There's a couple key things to note: -- The image has been tagged with a hash based on the inputs to the rule. This means that we can always refer -back to this specific version of this image. -- It's generated us a `tar.gz` containing all the other files we might need to build the Docker image. - -We can run this script to build the image and push it to the docker daemon as set in our docker env: -```bash -plz run //common/docker:base -``` - -## Using our base image -Duration: 5 - -So now we have a base image, let's use it for our docker image. Create a `hello_service/k8s/Dockerfile` for our hello -service: - -``` -FROM //common/docker:base - -COPY /hello_service /hello_service - -ENTRYPOINT [ "/hello_service" ] -``` - -And then set up some build rules for that in `hello_service/k8s/BUILD`: - -``` -docker_image( - name = "image", - srcs = ["//hello_service"], - dockerfile = "Dockerfile", - base_image = "//common/docker:base", -) -``` - -Let's build this and have a look at the script it generates: - -``` -$ plz build //hello_service/k8s:image -Build finished; total time 100ms, incrementality 100.0%. Outputs: -//hello_service/k8s:image: - plz-out/bin/hello_service/k8s/image.sh - -$ cat plz-out/bin/hello_service/k8s/image.sh -#!/bin/sh -./plz-out/bin/common/docker/base.sh \ - && docker build -t please-example/image:0d45575ad71adea9861b079e5d56ff0bdc179a1868d06d6b3d102721824c1538 -f \ - Dockerfile - < plz-out/gen/hello_service/k8s/_image#docker_context.tar.gz -``` - -Note, this script takes care of building the base image for us, so we don't have to orchestrate this ourselves. - -## Creating a Kubernetes deployment -Duration: 5 - -Let's create `hello_service/k8s/deployment.yaml` for our service: -```yaml -apiVersion: apps/v1 -kind: Deployment -metadata: - name: hello - labels: - app: hello -spec: - replicas: 3 - selector: - matchLabels: - app: hello - template: - metadata: - labels: - app: hello - spec: - containers: - - name: main - image: //hello_service/k8s:image - ports: - # This must match the port we start the server on in hello-service/main.py - - containerPort: 8000 -``` - -Let's also create `hello_service/k8s/service.yaml` for good measure: -```yaml -apiVersion: v1 -kind: Service -metadata: - name: hello-svc -spec: - selector: - app: hello - ports: - - protocol: TCP - port: 8000 - targetPort: 8000 -``` - -### Kubernetes rules -Note that we've referenced the image `//hello-service/k8s:image` in the deployment. The kubernetes rules are able to -template your yaml files substituting in the image with the correct label based on the version of the image we just -built! This ties all the images and kubernetes manifests together based on the current state of the repo making the -deployment much more reproducible! - -To add the kubernetes rules, run `plz init plugin k8s`. - -Let's update `hello_service/k8s/BUILD` to build these manifests: - -```python -docker_image( - name = "image", - srcs = ["//hello_service"], - dockerfile = "Dockerfile", - base_image = "//common/docker:base", -) - -k8s_config( - name = "k8s", - srcs = [ - "deployment.yaml", - "service.yaml", - ], - containers = [":image"], -) -``` - -And check that has done the right thing: -``` -$ plz build //hello_service/k8s -Build finished; total time 90ms, incrementality 90.9%. Outputs: -//hello_service/k8s:k8s: - plz-out/gen/hello_service/k8s/templated_deployment.yaml - plz-out/gen/hello_service/k8s/templated_service.yaml - - -$ cat plz-out/gen/hello_service/k8s/templated_deployment.yaml -apiVersion: apps/v1 -kind: Deployment -metadata: - name: hello - labels: - app: hello -spec: - replicas: 3 - selector: - matchLabels: - app: hello - template: - metadata: - labels: - app: hello - spec: - containers: - - name: main - image: please-example/image:0d45575ad71adea9861b079e5d56ff0bdc179a1868d06d6b3d102721824c1538 - ports: - # This must match the port we start the server on in hello-service/main.py - - containerPort: 8000 -``` - -As you can see, this image matches the image we built earlier! These rules also provide a useful script for pushing -the manifests to kubernetes: - -``` -$ plz build //hello_service/k8s:k8s_push -Build finished; total time 140ms, incrementality 100.0%. Outputs: -//hello_service/k8s:k8s_push: - plz-out/bin/hello_service/k8s/k8s_push.sh - -$ cat plz-out/bin/hello_service/k8s/k8s_push.sh -#!/bin/sh -kubectl apply -f plz-out/gen/hello_service/k8s/templated_deployment.yaml && \ -kubectl apply -f plz-out/gen/hello_service/k8s/templated_service.yaml -``` - -## Local testing with minikube -Duration: 5 - -Let's tie this all together by deploying our service to minikube! - -### Setting up minikube -We can get Please to download minikube for us. Let's create `third_party/binary/BUILD` to do so: - -``` -remote_file ( - name = "minikube", - url = f"https://storage.googleapis.com/minikube/releases/latest/minikube-{CONFIG.OS}-{CONFIG.ARCH}", - binary = True, -) -``` - -And then we can start the cluster like so: -```bash -plz run //third_party/binary:minikube -- start -``` - -### Deploying our service - -First we need to push our images to minikube's docker. To do this we need to point `docker` at minikube: - -```bash -eval $(plz run //third_party/binary:minikube -- docker-env) -``` - -Then we can run our deployment scripts: - -```bash -plz run //hello_service/k8s:image_load && plz run //hello_service/k8s:k8s_push -``` - -And check they're working as we expected: - -``` -$ kubectl port-forward service/hello-svc 8000:8000 & curl localhost:8000 - -[1] 25986 -Hello world! - -$ pkill kubectl -[1]+ Terminated kubectl kubectl port-forward service/hello-svc 8000:8000 -``` - -## Please deploy -Duration: 5 - -Here we have learnt about the provided targets we need to run to get our changes deployed to minikube, however it's a -bit of a ritual. Let's look at consolidating this into a single command. Luckily the generated targets are labeled so -this is as simple as: - -```bash -plz run sequential --include docker-build --include k8s-push //hello_service/... -``` - -We can then set up an alias for this in `.plzconfig`: - -``` -[alias "deploy"] -cmd = run sequential --include docker-build --include k8s-push -; Enable tab completion for build labels -positionallabels = true -``` - -This is used like: - -```bash -plz deploy //hello_service/... -``` - -## Docker build and build systems -Duration: 7 - -To finish this off, it's worth talking about the challenges with building docker images from Docker files in a -file based build system. - -Integrating a build system with `docker build` is notoriously difficult. Build systems have trouble building your image -as `docker build` sends the image to a daemon running in the background. There's no easy way to get a file based artifact -out of Docker without this extra infrastructure. The built in rules produce a number of scripts to help build, load, -push and save images: - -``` -docker_image( - name = "image", - srcs = [":example"], - base_image = ":base", - run_args = "-p 8000:8000", - visibility = ["//k8s/example:all"], -) -``` - -This single target produces the following sub-targets: - -- `:image_fqn` target contains the fully qualified name of the generated image. Each image gets tagged with the hash -of its inputs so this can be relied upon to uniquely identify this image. -- `:image` & `:image_load` are the same script. This script loads the image into the local docker daemon. It will -also make sure the base image is build and loaded first. -- `:image_push` will load and push the image to the docker registry as configured by your local machines docker -environment. -- `:image_save` will load and then save the image to a `.tar` in `plz-out/gen` -- `:image_run` will run the image in the local docker env - -There are two ways we anticipate these targets to be used as part of a CI/CD pipeline: - -- The build server can be given access to the docker registry, and the images can be loaded directly with `:image_push`. -- The build server can save the images out to an offline image tarball with `:image_save`. These can be exported as -artifacts from the build server. Another stage of the CI/CD pipeline can then push these to the docker registry via -`docker load`. +id: k8s +summary: Kubernetes and Docker +description: Learn about using Please to build and deploy Docker images and Kubernetes manifests +categories: intermediate +tags: medium +status: Published +authors: Jon Poole +Feedback Link: https://github.com/thought-machine/please + +# Kubernetes and Docker +## Overview +Duration: 1 + +### Prerequisites +- You must have Please installed: [Install Please](https://please.build/quickstart.html) +- You should be comfortable using the existing build rules. +- You should be familiar with [Docker](https://docs.docker.com/get-started/) + and [Kubernetes](https://kubernetes.io/docs/tutorials/kubernetes-basics/) + +This codelab uses Golang for the example service however the language used for this service isn't that important. Just +make sure you're able to build a binary in whatever your preferred language is. + +### What you'll learn +This codelab is quite long and tries to give an idea of what a complete build pipeline might look like for a docker and +kubernetes based project. You'll learn: + +- How to build a service and bake that into docker image +- How to build a kubernetes deployment for that docker image +- Starting minikube and testing your deployment out +- Setting up aliases to streamline your dev workflow + +### What if I get stuck? + +The final result of running through this codelab can be found +[here](https://github.com/thought-machine/please-codelabs/tree/main/kubernetes_and_docker) for reference. If you really get stuck +you can find us on [gitter](https://gitter.im/please-build/Lobby)! + +## Creating a service +Duration: 5 + +First up, let's create a service to deploy. It's not really important what it does or what language we implement it in. +For this codelab, we'll make a simple hello world HTTP service in go. + +### Initialising the project +```bash +plz init +go mod init github.com/example/module +plz init plugin go +``` + +### Set up the Go plugin + +Add a go toolchain to `third_party/go/BUILD` +```go +go_toolchain( + name = "toolchain", + version = "1.20", +) +``` + +And configure the plugin: +``` +[Plugin "go"] +Target = //plugins:go +ImportPath = github.com/example/module +GoTool = //third_party/go:toolchain|go +``` + +For more information on this, check out the Go codelab. + +### Creating a Go service +Create a file `hello_service/service.go`: + +```golang +package main + +import ( + "fmt" + "log" + "net/http" +) + +func main() { + http.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintln(w, "This is my website!") + }) + + http.HandleFunc("/hello", func(w http.ResponseWriter, r *http.Request) { + fmt.Fprintln(w, "Hello, HTTP!") + }) + + if err := http.ListenAndServe(":8000", nil); err != nil { + log.Fatal("Error starting the server: ", err) + } +} +``` + +Then create a `hello_service/BUILD` file like so: +```python +go_binary( + name = "hello_service", + srcs = ["service.go"], + visibility = ["//hello_service/k8s:all"], +) +``` + +And test it works: + +```bash +plz run //hello_service:hello_service & +curl localhost:8000 +pkill hello_service +``` + +The output should look like this: +```bash +[1] 28694 +Hello, world! +[1]+ Terminated plz run //hello_service +``` + +## Building a Docker image +Duration: 5 + +Before we create a docker image for our service, it can be useful to create a base image that all our services share. +This can be used this to install language runtimes e.g. a python interpreter. If you're using a language that requires +a runtime, this is where you should install it. In this case, we're using Go so this isn't strictly necessary. + +Let's create a base docker file for our repo that all our services will use in `common/docker/Dockerfile-base`: +``` +FROM ubuntu:22.04 + +RUN apt update -y && apt upgrade -y +``` + +### Docker build rules + +To use the docker build rules, we need to install the docker plugin, as well as the shell plugin which it requires: + +```bash +plz init plugin shell && plz init plugin docker +``` + +We can then build a set of scripts that help us build, and push our docker images. Add the following to `common/docker/BUILD`: + +```python +docker_image( + name = "base", + dockerfile = "Dockerfile-base", + visibility = ["PUBLIC"], +) +``` + +And then let's build that: +``` +$ plz build //common/docker:base +Build finished; total time 80ms, incrementality 40.0%. Outputs: +//common/docker:base: + plz-out/bin/common/docker/base.sh +``` + +### So what's going on? +As promised, the output of the docker image rule is a script that can build the docker image for you. We can have a +look at what the script is doing: + +```bash +$ cat plz-out/bin/common/docker/base.sh +#!/bin/sh +docker build -t please-examples/base:0d45575ad71adea9861b079e5d56ff0bdc179a1868d06d6b3d102721824c1538 \ + -f Dockerfile-base - < plz-out/gen/common/docker/_base#docker_context.tar.gz +``` + +There's a couple key things to note: +- The image has been tagged with a hash based on the inputs to the rule. This means that we can always refer +back to this specific version of this image. +- It's generated us a `tar.gz` containing all the other files we might need to build the Docker image. + +We can run this script to build the image and push it to the docker daemon as set in our docker env: +```bash +plz run //common/docker:base +``` + +## Using our base image +Duration: 5 + +So now we have a base image, let's use it for our docker image. Create a `hello_service/k8s/Dockerfile` for our hello +service: + +``` +FROM //common/docker:base + +COPY /hello_service /hello_service + +ENTRYPOINT [ "/hello_service" ] +``` + +And then set up some build rules for that in `hello_service/k8s/BUILD`: + +``` +docker_image( + name = "image", + srcs = ["//hello_service"], + dockerfile = "Dockerfile", + base_image = "//common/docker:base", +) +``` + +Let's build this and have a look at the script it generates: + +``` +$ plz build //hello_service/k8s:image +Build finished; total time 100ms, incrementality 100.0%. Outputs: +//hello_service/k8s:image: + plz-out/bin/hello_service/k8s/image.sh + +$ cat plz-out/bin/hello_service/k8s/image.sh +#!/bin/sh +./plz-out/bin/common/docker/base.sh \ + && docker build -t please-example/image:0d45575ad71adea9861b079e5d56ff0bdc179a1868d06d6b3d102721824c1538 -f \ + Dockerfile - < plz-out/gen/hello_service/k8s/_image#docker_context.tar.gz +``` + +Note, this script takes care of building the base image for us, so we don't have to orchestrate this ourselves. + +## Creating a Kubernetes deployment +Duration: 5 + +Let's create `hello_service/k8s/deployment.yaml` for our service: +```yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: hello + labels: + app: hello +spec: + replicas: 3 + selector: + matchLabels: + app: hello + template: + metadata: + labels: + app: hello + spec: + containers: + - name: main + image: //hello_service/k8s:image + ports: + # This must match the port we start the server on in hello-service/main.py + - containerPort: 8000 +``` + +Let's also create `hello_service/k8s/service.yaml` for good measure: +```yaml +apiVersion: v1 +kind: Service +metadata: + name: hello-svc +spec: + selector: + app: hello + ports: + - protocol: TCP + port: 8000 + targetPort: 8000 +``` + +### Kubernetes rules +Note that we've referenced the image `//hello-service/k8s:image` in the deployment. The kubernetes rules are able to +template your yaml files substituting in the image with the correct label based on the version of the image we just +built! This ties all the images and kubernetes manifests together based on the current state of the repo making the +deployment much more reproducible! + +To add the kubernetes rules, run `plz init plugin k8s`. + +Let's update `hello_service/k8s/BUILD` to build these manifests: + +```python +docker_image( + name = "image", + srcs = ["//hello_service"], + dockerfile = "Dockerfile", + base_image = "//common/docker:base", +) + +k8s_config( + name = "k8s", + srcs = [ + "deployment.yaml", + "service.yaml", + ], + containers = [":image"], +) +``` + +And check that has done the right thing: +``` +$ plz build //hello_service/k8s +Build finished; total time 90ms, incrementality 90.9%. Outputs: +//hello_service/k8s:k8s: + plz-out/gen/hello_service/k8s/templated_deployment.yaml + plz-out/gen/hello_service/k8s/templated_service.yaml + + +$ cat plz-out/gen/hello_service/k8s/templated_deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: hello + labels: + app: hello +spec: + replicas: 3 + selector: + matchLabels: + app: hello + template: + metadata: + labels: + app: hello + spec: + containers: + - name: main + image: please-example/image:0d45575ad71adea9861b079e5d56ff0bdc179a1868d06d6b3d102721824c1538 + ports: + # This must match the port we start the server on in hello-service/main.py + - containerPort: 8000 +``` + +As you can see, this image matches the image we built earlier! These rules also provide a useful script for pushing +the manifests to kubernetes: + +``` +$ plz build //hello_service/k8s:k8s_push +Build finished; total time 140ms, incrementality 100.0%. Outputs: +//hello_service/k8s:k8s_push: + plz-out/bin/hello_service/k8s/k8s_push.sh + +$ cat plz-out/bin/hello_service/k8s/k8s_push.sh +#!/bin/sh +kubectl apply -f plz-out/gen/hello_service/k8s/templated_deployment.yaml && \ +kubectl apply -f plz-out/gen/hello_service/k8s/templated_service.yaml +``` + +## Local testing with minikube +Duration: 5 + +Let's tie this all together by deploying our service to minikube! + +### Setting up minikube +We can get Please to download minikube for us. Let's create `third_party/binary/BUILD` to do so: + +``` +remote_file ( + name = "minikube", + url = f"https://storage.googleapis.com/minikube/releases/latest/minikube-{CONFIG.OS}-{CONFIG.ARCH}", + binary = True, +) +``` + +And then we can start the cluster like so: +```bash +plz run //third_party/binary:minikube -- start +``` + +### Deploying our service + +First we need to push our images to minikube's docker. To do this we need to point `docker` at minikube: + +```bash +eval $(plz run //third_party/binary:minikube -- docker-env) +``` + +Then we can run our deployment scripts: + +```bash +plz run //hello_service/k8s:image_load && plz run //hello_service/k8s:k8s_push +``` + +And check they're working as we expected: + +``` +$ kubectl port-forward service/hello-svc 8000:8000 & curl localhost:8000 + +[1] 25986 +Hello world! + +$ pkill kubectl +[1]+ Terminated kubectl kubectl port-forward service/hello-svc 8000:8000 +``` + +## Please deploy +Duration: 5 + +Here we have learnt about the provided targets we need to run to get our changes deployed to minikube, however it's a +bit of a ritual. Let's look at consolidating this into a single command. Luckily the generated targets are labeled so +this is as simple as: + +```bash +plz run sequential --include docker-build --include k8s-push //hello_service/... +``` + +We can then set up an alias for this in `.plzconfig`: + +``` +[alias "deploy"] +cmd = run sequential --include docker-build --include k8s-push +; Enable tab completion for build labels +positionallabels = true +``` + +This is used like: + +```bash +plz deploy //hello_service/... +``` + +## Docker build and build systems +Duration: 7 + +To finish this off, it's worth talking about the challenges with building docker images from Docker files in a +file based build system. + +Integrating a build system with `docker build` is notoriously difficult. Build systems have trouble building your image +as `docker build` sends the image to a daemon running in the background. There's no easy way to get a file based artifact +out of Docker without this extra infrastructure. The built in rules produce a number of scripts to help build, load, +push and save images: + +``` +docker_image( + name = "image", + srcs = [":example"], + base_image = ":base", + run_args = "-p 8000:8000", + visibility = ["//k8s/example:all"], +) +``` + +This single target produces the following sub-targets: + +- `:image_fqn` target contains the fully qualified name of the generated image. Each image gets tagged with the hash +of its inputs so this can be relied upon to uniquely identify this image. +- `:image` & `:image_load` are the same script. This script loads the image into the local docker daemon. It will +also make sure the base image is build and loaded first. +- `:image_push` will load and push the image to the docker registry as configured by your local machines docker +environment. +- `:image_save` will load and then save the image to a `.tar` in `plz-out/gen` +- `:image_run` will run the image in the local docker env + +There are two ways we anticipate these targets to be used as part of a CI/CD pipeline: + +- The build server can be given access to the docker registry, and the images can be loaded directly with `:image_push`. +- The build server can save the images out to an offline image tarball with `:image_save`. These can be exported as +artifacts from the build server. Another stage of the CI/CD pipeline can then push these to the docker registry via +`docker load`. diff --git a/docs/codelabs/plz_query.md b/docs/codelabs/plz_query.md index 96a21e1be..35f4fc198 100644 --- a/docs/codelabs/plz_query.md +++ b/docs/codelabs/plz_query.md @@ -1,310 +1,310 @@ -summary: Tips and tricks - plz query -description: Tips and tricks to help you become productive with Please - using plz query to query the build graph -id: plz_query -categories: intermediate -tags: medium -status: Published -authors: Jon Poole -Feedback Link: https://github.com/thought-machine/please - -# Tips and tricks - plz query - -## Overview - -Duration: 2 - -### Prerequisites - -- You must have Please installed: [Install please](https://please.build/quickstart.html) -- You should have a basic understanding of using Please to build and test code - -### What you'll learn - -This codelab isn't exhaustive however it should give you an idea of the sort of -things the Please CLI is capable of: - -- Finding the dependencies of a target -- Including and excluding targets -- Printing information about targets as well as internal targets - -## Setting up - -Duration: 2 - -For this codelab we will be using the Please codelabs repo: - -```bash -git clone https://github.com/thought-machine/please-codelabs -``` - -The output should look something like this: - -```bash -Cloning into 'please-codelabs'... -remote: Enumerating objects: 228, done. -remote: Total 228 (delta 0), reused 0 (delta 0), pack-reused 228 -Receiving objects: 100% (228/228), 38.23 KiB | 543.00 KiB/s, done. -Resolving deltas: 100% (79/79), done. -``` - -We'll be using the getting started with go codelab for these examples: - -```bash -cd please-codelabs/getting_started_go -``` - -## Finding dependencies of a target - -Duration: 4 - -Please has a strict build graph representing each build target and their -dependencies on each other. Among many things, this graph can be interrogated -to determine the dependencies of a target: - -```bash -$ plz query deps //src/greetings:greetings_test -//src/greetings:greetings_test - //src/greetings:greetings - ///go//tools:please_go - //plugins:go - //_please:arcat - //third_party/go:toolchain - //third_party/go:testify - ///third_party/go/github.com_stretchr_testify//:installs - ///third_party/go/github.com_stretchr_testify//assert:assert - ///third_party/go/github.com_davecgh_go-spew//spew:spew - //third_party/go:github.com_davecgh_go-spew - ///third_party/go/github.com_pmezard_go-difflib//difflib:difflib - //third_party/go:github.com_pmezard_go-difflib - ///third_party/go/gopkg.in_yaml.v3//:yaml.v3 - //third_party/go:gopkg.in_yaml.v3 - ///third_party/go/github.com_stretchr_testify//require:require -``` - -This can be especially useful when trying to improve build performance. -Unnecessary dependencies between targets can cause certain rules to be rebuilt -when they don't need to be. - -### Subrepo rules - -Woah, what are these `///third_party/go/foo//:bar` targets? Targets that begin -with a `///` are subrepo targets. In this case, the third-party dependency -*testify* has been defined using a `go_repo()` rule, which downloads the go -module into plz-out, generates Please BUILD files for each of its packages, and -then builds it like any other Please project. So -`///third_party/go/github.com_stretchr_testify//assert:assert` is saying "look -in the subrepo called *third_party/go/github.com_stretchr_testify*, and retrieve -for me the build target `//assert:assert`. - -You can `plz query print` these targets just like you would any other target: - -```bash -plz query print ///third_party/go/github.com_stretchr_testify//assert:assert -``` - -This will show you the underlying build rule for that target. Or, if you prefer, -you could have a look in the plz-out directory at the generated build rule: - -```bash -$ cat plz-out/subrepos/third_party/go/github.com_stretchr_testify/assert/BUILD -subinclude("///go//build_defs:go") - -go_library( - name = "assert", - srcs = [ - "assertion_compare.go", - "assertion_compare_can_convert.go", - "assertion_format.go", - "assertion_forward.go", - "assertion_order.go", - "assertions.go", - "doc.go", - "errors.go", - "forward_assertions.go", - "http_assertions.go", - ], - visibility = ["PUBLIC"], - deps = [ - "///third_party/go/github.com_davecgh_go-spew//spew", - "///third_party/go/github.com_pmezard_go-difflib//difflib", - "///third_party/go/gopkg.in_yaml.v3//:yaml.v3", - ], -) -``` - -### Internal rules - -If you pass the `--hidden` flag to a `plz query` command, you'll come across -*internal* targets as well. These can be identified by the leading `_` in their -name. E.g. - -```bash -$ plz query deps //src/greetings:greetings --hidden -//src/greetings:greetings - //src/greetings:_greetings#import_config - //src/greetings:_greetings#pkg_info - ///go//tools:please_go - //plugins:go - //_please:arcat - //plugins:_go#download - //src/greetings:_greetings#srcs - //third_party/go:toolchain - //third_party/go:_toolchain#download -``` - -As always, we can inspect these with `plz query print`, e.g. - -```bash -$ plz query print //src/greetings:_greetings#srcs -# //src/greetings:_greetings#srcs: -filegroup( - name = '_greetings#srcs', - srcs = ['greetings.go'], - labels = [ - 'link:plz-out/go/src/${PKG}', - 'go_src', - 'go', - ], - visibility = ['//src/...'], - build_timeout = 600, - requires = ['go'], -) -``` - -This particular internal rule is a filegroup that was generated by -`go_library()` and is used to expose the Go source files that make up that -library. You shouldn't depend on these types of rules directly as they may -change between minor releases of Please. - -## Reverse dependencies - -Duration: 2 - -If you're changing a build rule that you know has a wide-reaching effect, it -might be good to run all the tests that will be affected by that change. Let's -find the reverse dependencies of our subrepo rules: - -```bash -$ plz query revdeps ///third_party/go/github.com_stretchr_testify//require:require -///third_party/go/github.com_stretchr_testify//:installs -``` - -Well that doesn't look quite right... We should see -`//src/greetings:greetings_test` too. - -Turns out finding reverse dependencies is quite a slow operation. Please limits -this to just one level so you don't accidentally lock up your terminal trying to -walk the whole build graph. You can set the level with `--level=2` or if you -want to get all reverse dependencies, you can set it to `-1`: - -```bash -$ plz query revdeps ///third_party/go/github.com_stretchr_testify//require:require --level -1 -//src/greetings:greetings_test -//third_party/go:testify -///third_party/go/github.com_stretchr_testify//:installs -``` - -Be careful, this can be slow on larger build graphs. You can use -`--include=//src/foo/...` to limit the search to a slice of your repository. -More on this later in this codelab! - -## Composing plz commands - -Duration: 2 - -So we've managed to determine which targets will be affected by our change. How -do we run these tests? Please can be instructed to listen for targets on -standard input: - -```bash -$ plz query revdeps ///third_party/go/github.com_stretchr_testify//require:require --level -1 | plz test - -//src/greetings:greetings_test 1 test run in 8ms; 1 passed -1 test target and 1 test run; 1 passed. -Total time: 6.62s real, 10ms compute. -``` - -The `-` at the end of `plz test -` indicates to Please that we will be -supplying the targets to build over standard input. - -## Including and excluding targets - -Duration: 2 - -Almost all Please commands can take in the `--include` and `--exclude` -arguments. These can be used to specifically exclude targets: - -```bash -$ plz query revdeps --exclude //src/greetings:greetings_test --level=-1 ///third_party/go/github.com_stretchr_testify//require:require | plz test - -0 test targets and 0 tests run; 0 passed. -Total time: 40ms real, 0s compute. -``` - -As you can see, we excluded the test from earlier so `plz test` didn't run it. -We can also exclude this on the test command: - -```bash -$ plz query revdeps --level=-1 ///third_party/go/github.com_stretchr_testify//require:require | plz test --exclude //src/greetings:greetings_test - -0 test targets and 0 tests run; 0 passed. -Total time: 40ms real, 0s compute. -``` - -### Including based on label - -Targets can be labeled in Please. Most of the built-in rules apply some basic -labels, e.g. the Go rules apply the `go` label to their targets. These can be -very useful to run all tests for a given language: - -```bash -plz build --include go --exclude //third_party/go/... -``` - -This will build all Go targets but will only build targets under -`//third_party/go/...` if they're a dependency of a target that needs to be built. - -You may also add custom labels to your targets. Update `srcs/greetings/BUILD` as such: - -### `src/greetings/BUILD` -```python -go_library( - name = "greetings", - srcs = ["greetings.go"], - visibility = ["//src/..."], - labels = ["my_label"], # Add a label to the library rule -) - -go_test( - name = "greetings_test", - srcs = ["greetings_test.go"], - deps = [ - ":greetings", - "//third_party/go:assert", - ], - external = True, -) -``` - -```bash -$ plz query alltargets --include=my_label -//src/greetings:greetings - -$ plz build --include=my_label -Build finished; total time 300ms, incrementality 100.0%. Outputs: -//src/greetings:greetings: - plz-out/gen/src/greetings/greetings.a -``` - -This can be especially useful for separating out slow running tests: - -```bash -plz test --exclude my_label -``` - -This will run all tests except those labeled with `my_label`. - -## What's next? - -Duration: 1 - -Hopefully this has given you a taster for what is possible with `plz query`, -however there's so much more. See the [cli](/commands.html#query) for an idea of -what's possible! +summary: Tips and tricks - plz query +description: Tips and tricks to help you become productive with Please - using plz query to query the build graph +id: plz_query +categories: intermediate +tags: medium +status: Published +authors: Jon Poole +Feedback Link: https://github.com/thought-machine/please + +# Tips and tricks - plz query + +## Overview + +Duration: 2 + +### Prerequisites + +- You must have Please installed: [Install please](https://please.build/quickstart.html) +- You should have a basic understanding of using Please to build and test code + +### What you'll learn + +This codelab isn't exhaustive however it should give you an idea of the sort of +things the Please CLI is capable of: + +- Finding the dependencies of a target +- Including and excluding targets +- Printing information about targets as well as internal targets + +## Setting up + +Duration: 2 + +For this codelab we will be using the Please codelabs repo: + +```bash +git clone https://github.com/thought-machine/please-codelabs +``` + +The output should look something like this: + +```bash +Cloning into 'please-codelabs'... +remote: Enumerating objects: 228, done. +remote: Total 228 (delta 0), reused 0 (delta 0), pack-reused 228 +Receiving objects: 100% (228/228), 38.23 KiB | 543.00 KiB/s, done. +Resolving deltas: 100% (79/79), done. +``` + +We'll be using the getting started with go codelab for these examples: + +```bash +cd please-codelabs/getting_started_go +``` + +## Finding dependencies of a target + +Duration: 4 + +Please has a strict build graph representing each build target and their +dependencies on each other. Among many things, this graph can be interrogated +to determine the dependencies of a target: + +```bash +$ plz query deps //src/greetings:greetings_test +//src/greetings:greetings_test + //src/greetings:greetings + ///go//tools:please_go + //plugins:go + //_please:arcat + //third_party/go:toolchain + //third_party/go:testify + ///third_party/go/github.com_stretchr_testify//:installs + ///third_party/go/github.com_stretchr_testify//assert:assert + ///third_party/go/github.com_davecgh_go-spew//spew:spew + //third_party/go:github.com_davecgh_go-spew + ///third_party/go/github.com_pmezard_go-difflib//difflib:difflib + //third_party/go:github.com_pmezard_go-difflib + ///third_party/go/gopkg.in_yaml.v3//:yaml.v3 + //third_party/go:gopkg.in_yaml.v3 + ///third_party/go/github.com_stretchr_testify//require:require +``` + +This can be especially useful when trying to improve build performance. +Unnecessary dependencies between targets can cause certain rules to be rebuilt +when they don't need to be. + +### Subrepo rules + +Woah, what are these `///third_party/go/foo//:bar` targets? Targets that begin +with a `///` are subrepo targets. In this case, the third-party dependency +*testify* has been defined using a `go_repo()` rule, which downloads the go +module into plz-out, generates Please BUILD files for each of its packages, and +then builds it like any other Please project. So +`///third_party/go/github.com_stretchr_testify//assert:assert` is saying "look +in the subrepo called *third_party/go/github.com_stretchr_testify*, and retrieve +for me the build target `//assert:assert`. + +You can `plz query print` these targets just like you would any other target: + +```bash +plz query print ///third_party/go/github.com_stretchr_testify//assert:assert +``` + +This will show you the underlying build rule for that target. Or, if you prefer, +you could have a look in the plz-out directory at the generated build rule: + +```bash +$ cat plz-out/subrepos/third_party/go/github.com_stretchr_testify/assert/BUILD +subinclude("///go//build_defs:go") + +go_library( + name = "assert", + srcs = [ + "assertion_compare.go", + "assertion_compare_can_convert.go", + "assertion_format.go", + "assertion_forward.go", + "assertion_order.go", + "assertions.go", + "doc.go", + "errors.go", + "forward_assertions.go", + "http_assertions.go", + ], + visibility = ["PUBLIC"], + deps = [ + "///third_party/go/github.com_davecgh_go-spew//spew", + "///third_party/go/github.com_pmezard_go-difflib//difflib", + "///third_party/go/gopkg.in_yaml.v3//:yaml.v3", + ], +) +``` + +### Internal rules + +If you pass the `--hidden` flag to a `plz query` command, you'll come across +*internal* targets as well. These can be identified by the leading `_` in their +name. E.g. + +```bash +$ plz query deps //src/greetings:greetings --hidden +//src/greetings:greetings + //src/greetings:_greetings#import_config + //src/greetings:_greetings#pkg_info + ///go//tools:please_go + //plugins:go + //_please:arcat + //plugins:_go#download + //src/greetings:_greetings#srcs + //third_party/go:toolchain + //third_party/go:_toolchain#download +``` + +As always, we can inspect these with `plz query print`, e.g. + +```bash +$ plz query print //src/greetings:_greetings#srcs +# //src/greetings:_greetings#srcs: +filegroup( + name = '_greetings#srcs', + srcs = ['greetings.go'], + labels = [ + 'link:plz-out/go/src/${PKG}', + 'go_src', + 'go', + ], + visibility = ['//src/...'], + build_timeout = 600, + requires = ['go'], +) +``` + +This particular internal rule is a filegroup that was generated by +`go_library()` and is used to expose the Go source files that make up that +library. You shouldn't depend on these types of rules directly as they may +change between minor releases of Please. + +## Reverse dependencies + +Duration: 2 + +If you're changing a build rule that you know has a wide-reaching effect, it +might be good to run all the tests that will be affected by that change. Let's +find the reverse dependencies of our subrepo rules: + +```bash +$ plz query revdeps ///third_party/go/github.com_stretchr_testify//require:require +///third_party/go/github.com_stretchr_testify//:installs +``` + +Well that doesn't look quite right... We should see +`//src/greetings:greetings_test` too. + +Turns out finding reverse dependencies is quite a slow operation. Please limits +this to just one level so you don't accidentally lock up your terminal trying to +walk the whole build graph. You can set the level with `--level=2` or if you +want to get all reverse dependencies, you can set it to `-1`: + +```bash +$ plz query revdeps ///third_party/go/github.com_stretchr_testify//require:require --level -1 +//src/greetings:greetings_test +//third_party/go:testify +///third_party/go/github.com_stretchr_testify//:installs +``` + +Be careful, this can be slow on larger build graphs. You can use +`--include=//src/foo/...` to limit the search to a slice of your repository. +More on this later in this codelab! + +## Composing plz commands + +Duration: 2 + +So we've managed to determine which targets will be affected by our change. How +do we run these tests? Please can be instructed to listen for targets on +standard input: + +```bash +$ plz query revdeps ///third_party/go/github.com_stretchr_testify//require:require --level -1 | plz test - +//src/greetings:greetings_test 1 test run in 8ms; 1 passed +1 test target and 1 test run; 1 passed. +Total time: 6.62s real, 10ms compute. +``` + +The `-` at the end of `plz test -` indicates to Please that we will be +supplying the targets to build over standard input. + +## Including and excluding targets + +Duration: 2 + +Almost all Please commands can take in the `--include` and `--exclude` +arguments. These can be used to specifically exclude targets: + +```bash +$ plz query revdeps --exclude //src/greetings:greetings_test --level=-1 ///third_party/go/github.com_stretchr_testify//require:require | plz test - +0 test targets and 0 tests run; 0 passed. +Total time: 40ms real, 0s compute. +``` + +As you can see, we excluded the test from earlier so `plz test` didn't run it. +We can also exclude this on the test command: + +```bash +$ plz query revdeps --level=-1 ///third_party/go/github.com_stretchr_testify//require:require | plz test --exclude //src/greetings:greetings_test - +0 test targets and 0 tests run; 0 passed. +Total time: 40ms real, 0s compute. +``` + +### Including based on label + +Targets can be labeled in Please. Most of the built-in rules apply some basic +labels, e.g. the Go rules apply the `go` label to their targets. These can be +very useful to run all tests for a given language: + +```bash +plz build --include go --exclude //third_party/go/... +``` + +This will build all Go targets but will only build targets under +`//third_party/go/...` if they're a dependency of a target that needs to be built. + +You may also add custom labels to your targets. Update `srcs/greetings/BUILD` as such: + +### `src/greetings/BUILD` +```python +go_library( + name = "greetings", + srcs = ["greetings.go"], + visibility = ["//src/..."], + labels = ["my_label"], # Add a label to the library rule +) + +go_test( + name = "greetings_test", + srcs = ["greetings_test.go"], + deps = [ + ":greetings", + "//third_party/go:assert", + ], + external = True, +) +``` + +```bash +$ plz query alltargets --include=my_label +//src/greetings:greetings + +$ plz build --include=my_label +Build finished; total time 300ms, incrementality 100.0%. Outputs: +//src/greetings:greetings: + plz-out/gen/src/greetings/greetings.a +``` + +This can be especially useful for separating out slow running tests: + +```bash +plz test --exclude my_label +``` + +This will run all tests except those labeled with `my_label`. + +## What's next? + +Duration: 1 + +Hopefully this has given you a taster for what is possible with `plz query`, +however there's so much more. See the [cli](/commands.html#query) for an idea of +what's possible! diff --git a/docs/codelabs/python_intro.md b/docs/codelabs/python_intro.md index 2b06cd929..b438e33c2 100644 --- a/docs/codelabs/python_intro.md +++ b/docs/codelabs/python_intro.md @@ -1,355 +1,355 @@ -summary: Getting started with Python -description: Building and testing with Python and Please, as well as managing third-party dependencies via pip -id: python_intro -categories: beginner -tags: medium -status: Published -authors: Jon Poole -Feedback Link: https://github.com/thought-machine/please - -# Getting started with Python -## Overview -Duration: 4 - -### Prerequisites -- You must have Please installed: [Install please](https://please.build/quickstart.html) -- Python must be installed: [Install Python](https://www.python.org/downloads/) - -### What you'll learn -- Configuring Please for Python using the Python plugin -- Creating an executable Python binary -- Authoring Python modules in your project -- Testing your code -- Including third-party libraries - -### What if I get stuck? - -The final result of running through this codelab can be found -[here](https://github.com/thought-machine/please-codelabs/tree/main/getting_started_python) for reference. If you really -get stuck you can find us on [gitter](https://gitter.im/please-build/Lobby)! - -## Initialising your project -Duration: 2 - -Let's create a new project: -```bash -mkdir getting_started_python && cd getting_started_python -plz init --no_prompt -plz init plugin python -``` - -### A note about your Please PATH -Please doesn't use your host system's `PATH` variable. By default, Please uses `/usr/local/bin:/usr/bin:/bin`. If Python -isn't in this path, you will need to add the following to `.plzconfig`: -``` -[build] -path = $YOUR_PYTHON_INSTALL_HERE:/usr/local/bin:/usr/bin:/bin -``` - -### So what just happened? -You will see this has created a number of files in your working folder: -``` -$ tree -a -. -├── pleasew -├── plugins -│   └── BUILD -├── .plzconfig -└── plz-out - └── log - └── build.log - -``` - -The `pleasew` script is a wrapper script that will automatically install Please if it's not already! This -means Please projects are portable and can always be built via -`git clone https://... example_module && cd example_module && ./pleasew build`. - -The `plugins/BUILD` is a file generated by `plz init plugin python` which defines a build target for the python plugin. - -The file `.plzconfig` contains the project configuration for Please. - -### `.plzconfig` -``` -[parse] -preloadsubincludes = ///python//build_defs:python - -[Plugin "python"] -Target = //plugins:python -``` - -Read the [config](/config.html) documentation and [python plugin config](/plugins.html#python.config) for more information on configuration. - -Finally, the `plz-out` directory contains artifacts built by plz. - -## Hello, world! -Duration: 3 - -Now we have a Please project, it's time to start adding some code to it! Let's create a "hello world" program: - -### `src/main.py` -```python -print('Hello, world!') -``` - -We now need to tell Please about our Python code. Please projects define metadata about the targets that are available to be -built in `BUILD` files. Let's create a `BUILD` file to build this program: - -### `src/BUILD` -```python -python_binary( - name = "main", - main = "main.py", -) -``` - -That's it! You can now run this with: -``` -$ plz run //src:main -Hello, world! -``` - -There's a lot going on here; first off, `python_binary()` is one of the [python plugin functions](/plugins.html#python). -This build function creates a "build target" in the `src` package. A package, in the Please sense, is any directory that -contains a `BUILD` file. - -Each build target can be identified by a build label in the format `//path/to/package:label`, i.e. `//src:main`. -There are a number of things you can do with a build target such e.g. `plz build //src:main`, however, as you've seen, -if the target is a binary, you may run it with `plz run`. - -## Adding modules -Duration: 4 - -Let's add a `src/greetings` package to our Python project: - -### `src/greetings/greetings.py` -```python -import random - -def greeting(): - return random.choice(["Hello", "Bonjour", "Marhabaan"]) -``` - -We then need to tell Please how to compile this library: - -### `src/greetings/BUILD` -```python -python_library( - name = "greetings", - srcs = ["greetings.py"], - visibility = ["//src/..."], -) -``` -NB: Unlike many popular build systems, Please doesn't just have one metadata file in the root of the project. Please will -typically have one `BUILD` file per [Python package](https://docs.python.org/3/tutorial/modules.html#packages). - -We can then build it like so: - -``` -$ plz build //src/greetings -Build finished; total time 290ms, incrementality 50.0%. Outputs: -//src/greetings:greetings: - plz-out/gen/src/greetings/greetings.py -``` - -Here we can see that the output of a `python_library` rule is a `.py` file which is stored in -`plz-out/gen/src/greetings/greetings.py`. - -We have also provided a `visibility` list to this rule. This is used to control where this `python_library()` rule can be -used within our project. In this case, any rule under `src`, denoted by the `...` syntax. - -NB: This syntax can also be used on the command line, e.g. `plz build //src/...`. - -### A note about `python_binary()` -If you're used to Python, one thing that might trip you up is how we package Python. The `python_binary()` rule outputs -something called a `pex`. This is very similar to the concept of a `.jar` file from the java world. All the Python files -relating to that build target are zipped up into a self-executable `.pex` file. This makes deploying and distributing -Python simple as there's only one file to distribute. - -Check it out: -``` -$ plz build //src:main -Build finished; total time 50ms, incrementality 100.0%. Outputs: -//src:main: - plz-out/bin/src/main.pex - -$ plz-out/bin/src/main.pex -Bonjour, world! -``` - -## Using our new module -Duration: 2 - -To maintain a principled model for incremental and hermetic builds, Please requires that rules are explicit about their -inputs and outputs. To use this new package in our "hello world" program, we have to add it as a dependency: - -### `src/BUILD` -```python -python_binary( - name = "main", - main = "main.py", - # NB: if the package and rule name are the same, you may omit the name i.e. this could be just //src/greetings - deps = ["//src/greetings:greetings"], -) -``` - -You can see we use a build label to refer to another rule here. Please will make sure that this rule is built before -making its outputs available to our rule here. - -Then update src/main.py: -### `src/main.py` -```python -from src.greetings import greetings - -print(greetings.greeting() + ", world!") -``` - -Give it a whirl: - -``` -$ plz run //src:main -Bonjour, world! -``` - -## Testing our code -Duration: 5 - -Let's create a very simple test for our library: -### `src/greetings/greetings_test.py` -```python -import unittest -from src.greetings import greetings - -class GreetingTest(unittest.TestCase): - - def test_greeting(self): - self.assertTrue(greetings.greeting()) - -``` - -We then need to tell Please about our tests: -### `src/greetings/BUILD` -```python -python_library( - name = "greetings", - srcs = ["greetings.py"], - visibility = ["//src/..."], -) - -python_test( - name = "greetings_test", - srcs = ["greetings_test.py"], - # Here we have used the shorthand `:greetings` label format. This format can be used to refer to a rule in the same - # package and is shorthand for `//src/greetings:greetings`. - deps = [":greetings"], -) -``` - -We've used `python_test()` to define our test target. This is a special build rule that is considered a test. These -rules can be executed as such: -``` -$ plz test //src/... -//src/greetings:greetings_test 1 test run in 3ms; 1 passed -1 test target and 1 test run in 3ms; 1 passed. Total time 90ms. -``` - -Please will run all the tests it finds under `//src/...`, and aggregate the results up. This works even across -languages allowing you to test your whole project with a single command. - -## Third-party dependencies -Duration: 7 - -### Using `pip_library()` - -Eventually, most projects need to depend on third-party code. Let's include NumPy into our package. Conventionally, -third-party dependencies live under `//third_party/...` (although they don't have to), so let's create that package: - -### `third_party/python/BUILD` -```python -package(default_visibility = ["PUBLIC"]) - -pip_library( - name = "numpy", - version = "1.23.4", - zip_safe = False, # This is because NumPy has shared object files which can't be linked to them when zipped up -) -``` - -This will download NumPy for us to use in our project. We use the `package()` built-in function to set the default -visibility for this package. This can be very useful for third-party rules to avoid having to specify -`visibility = ["PUBLIC"]` on every `pip_library()` invocation. - -NB: The visibility "PUBLIC" is a special case. Typically, items in the visibility list are labels. "PUBLIC" is equivalent -to `//...`. - -### Setting up our module path -Importing Python modules is based on the import path. That means by default, we'd import NumPy as -`import third_party.python.numpy`. To fix this, we need to tell Please where our third-party module is. Add the -following to your `.plzconfig`: - -### `.plzconfig` -``` -[plugin "python"] -ModuleDir = third_party.python -``` - -NB: if you encounter an error eg. `no such option: --system` you are likely using an operating system where you need to disable vendor flags. This can be done by adding this config. - -### `.plzconfig` -``` -[plugin "python"] -DisableVendorFlags = true -``` - -### Updating our tests - -We can now use this library in our code: - -### `src/greetings/greetings.py` -```python -from numpy import random - -def greeting(): - return random.choice(["Hello", "Bonjour", "Marhabaan"]) -``` - -And add NumPy as a dependency: -### `src/greetings/BUILD` -```python -python_library( - name = "greetings", - srcs = ["greetings.py"], - visibility = ["//src/..."], - deps = ["//third_party/python:numpy"], -) - -python_test( - name = "greetings_test", - srcs = ["greetings_test.py"], - deps = [":greetings"], -) -``` - -``` -$ plz run //src:main -Marhabaan, world! -``` - -## What next? -Duration: 1 - -Hopefully you now have an idea as to how to build Python with Please. Please is capable of so much more though! - -- [Please basics](/basics.html) - A more general introduction to Please. It covers a lot of what we have in this -tutorial in more detail. -- [Plugin rules](/plugin.html#python) - See the rest of the Python rules in the python plugin. -- [Config](/config.html#python) - See the available config options for Please, especially those relating to Python. -- [Command line interface](/commands.html) - Please has a powerful command line interface. Interrogate the build graph, -determine files changes since master, watch rules and build them automatically as things change and much more! Use -`plz help`, and explore this rich set of commands! - -Otherwise, why not try one of the other codelabs! -, watch rules and build them automatically as things change and much more! Use -`plz help`, and explore this rich set of commands! - -Otherwise, why not try one of the other codelabs! +summary: Getting started with Python +description: Building and testing with Python and Please, as well as managing third-party dependencies via pip +id: python_intro +categories: beginner +tags: medium +status: Published +authors: Jon Poole +Feedback Link: https://github.com/thought-machine/please + +# Getting started with Python +## Overview +Duration: 4 + +### Prerequisites +- You must have Please installed: [Install please](https://please.build/quickstart.html) +- Python must be installed: [Install Python](https://www.python.org/downloads/) + +### What you'll learn +- Configuring Please for Python using the Python plugin +- Creating an executable Python binary +- Authoring Python modules in your project +- Testing your code +- Including third-party libraries + +### What if I get stuck? + +The final result of running through this codelab can be found +[here](https://github.com/thought-machine/please-codelabs/tree/main/getting_started_python) for reference. If you really +get stuck you can find us on [gitter](https://gitter.im/please-build/Lobby)! + +## Initialising your project +Duration: 2 + +Let's create a new project: +```bash +mkdir getting_started_python && cd getting_started_python +plz init --no_prompt +plz init plugin python +``` + +### A note about your Please PATH +Please doesn't use your host system's `PATH` variable. By default, Please uses `/usr/local/bin:/usr/bin:/bin`. If Python +isn't in this path, you will need to add the following to `.plzconfig`: +``` +[build] +path = $YOUR_PYTHON_INSTALL_HERE:/usr/local/bin:/usr/bin:/bin +``` + +### So what just happened? +You will see this has created a number of files in your working folder: +``` +$ tree -a +. +├── pleasew +├── plugins +│   └── BUILD +├── .plzconfig +└── plz-out + └── log + └── build.log + +``` + +The `pleasew` script is a wrapper script that will automatically install Please if it's not already! This +means Please projects are portable and can always be built via +`git clone https://... example_module && cd example_module && ./pleasew build`. + +The `plugins/BUILD` is a file generated by `plz init plugin python` which defines a build target for the python plugin. + +The file `.plzconfig` contains the project configuration for Please. + +### `.plzconfig` +``` +[parse] +preloadsubincludes = ///python//build_defs:python + +[Plugin "python"] +Target = //plugins:python +``` + +Read the [config](/config.html) documentation and [python plugin config](/plugins.html#python.config) for more information on configuration. + +Finally, the `plz-out` directory contains artifacts built by plz. + +## Hello, world! +Duration: 3 + +Now we have a Please project, it's time to start adding some code to it! Let's create a "hello world" program: + +### `src/main.py` +```python +print('Hello, world!') +``` + +We now need to tell Please about our Python code. Please projects define metadata about the targets that are available to be +built in `BUILD` files. Let's create a `BUILD` file to build this program: + +### `src/BUILD` +```python +python_binary( + name = "main", + main = "main.py", +) +``` + +That's it! You can now run this with: +``` +$ plz run //src:main +Hello, world! +``` + +There's a lot going on here; first off, `python_binary()` is one of the [python plugin functions](/plugins.html#python). +This build function creates a "build target" in the `src` package. A package, in the Please sense, is any directory that +contains a `BUILD` file. + +Each build target can be identified by a build label in the format `//path/to/package:label`, i.e. `//src:main`. +There are a number of things you can do with a build target such e.g. `plz build //src:main`, however, as you've seen, +if the target is a binary, you may run it with `plz run`. + +## Adding modules +Duration: 4 + +Let's add a `src/greetings` package to our Python project: + +### `src/greetings/greetings.py` +```python +import random + +def greeting(): + return random.choice(["Hello", "Bonjour", "Marhabaan"]) +``` + +We then need to tell Please how to compile this library: + +### `src/greetings/BUILD` +```python +python_library( + name = "greetings", + srcs = ["greetings.py"], + visibility = ["//src/..."], +) +``` +NB: Unlike many popular build systems, Please doesn't just have one metadata file in the root of the project. Please will +typically have one `BUILD` file per [Python package](https://docs.python.org/3/tutorial/modules.html#packages). + +We can then build it like so: + +``` +$ plz build //src/greetings +Build finished; total time 290ms, incrementality 50.0%. Outputs: +//src/greetings:greetings: + plz-out/gen/src/greetings/greetings.py +``` + +Here we can see that the output of a `python_library` rule is a `.py` file which is stored in +`plz-out/gen/src/greetings/greetings.py`. + +We have also provided a `visibility` list to this rule. This is used to control where this `python_library()` rule can be +used within our project. In this case, any rule under `src`, denoted by the `...` syntax. + +NB: This syntax can also be used on the command line, e.g. `plz build //src/...`. + +### A note about `python_binary()` +If you're used to Python, one thing that might trip you up is how we package Python. The `python_binary()` rule outputs +something called a `pex`. This is very similar to the concept of a `.jar` file from the java world. All the Python files +relating to that build target are zipped up into a self-executable `.pex` file. This makes deploying and distributing +Python simple as there's only one file to distribute. + +Check it out: +``` +$ plz build //src:main +Build finished; total time 50ms, incrementality 100.0%. Outputs: +//src:main: + plz-out/bin/src/main.pex + +$ plz-out/bin/src/main.pex +Bonjour, world! +``` + +## Using our new module +Duration: 2 + +To maintain a principled model for incremental and hermetic builds, Please requires that rules are explicit about their +inputs and outputs. To use this new package in our "hello world" program, we have to add it as a dependency: + +### `src/BUILD` +```python +python_binary( + name = "main", + main = "main.py", + # NB: if the package and rule name are the same, you may omit the name i.e. this could be just //src/greetings + deps = ["//src/greetings:greetings"], +) +``` + +You can see we use a build label to refer to another rule here. Please will make sure that this rule is built before +making its outputs available to our rule here. + +Then update src/main.py: +### `src/main.py` +```python +from src.greetings import greetings + +print(greetings.greeting() + ", world!") +``` + +Give it a whirl: + +``` +$ plz run //src:main +Bonjour, world! +``` + +## Testing our code +Duration: 5 + +Let's create a very simple test for our library: +### `src/greetings/greetings_test.py` +```python +import unittest +from src.greetings import greetings + +class GreetingTest(unittest.TestCase): + + def test_greeting(self): + self.assertTrue(greetings.greeting()) + +``` + +We then need to tell Please about our tests: +### `src/greetings/BUILD` +```python +python_library( + name = "greetings", + srcs = ["greetings.py"], + visibility = ["//src/..."], +) + +python_test( + name = "greetings_test", + srcs = ["greetings_test.py"], + # Here we have used the shorthand `:greetings` label format. This format can be used to refer to a rule in the same + # package and is shorthand for `//src/greetings:greetings`. + deps = [":greetings"], +) +``` + +We've used `python_test()` to define our test target. This is a special build rule that is considered a test. These +rules can be executed as such: +``` +$ plz test //src/... +//src/greetings:greetings_test 1 test run in 3ms; 1 passed +1 test target and 1 test run in 3ms; 1 passed. Total time 90ms. +``` + +Please will run all the tests it finds under `//src/...`, and aggregate the results up. This works even across +languages allowing you to test your whole project with a single command. + +## Third-party dependencies +Duration: 7 + +### Using `pip_library()` + +Eventually, most projects need to depend on third-party code. Let's include NumPy into our package. Conventionally, +third-party dependencies live under `//third_party/...` (although they don't have to), so let's create that package: + +### `third_party/python/BUILD` +```python +package(default_visibility = ["PUBLIC"]) + +pip_library( + name = "numpy", + version = "1.23.4", + zip_safe = False, # This is because NumPy has shared object files which can't be linked to them when zipped up +) +``` + +This will download NumPy for us to use in our project. We use the `package()` built-in function to set the default +visibility for this package. This can be very useful for third-party rules to avoid having to specify +`visibility = ["PUBLIC"]` on every `pip_library()` invocation. + +NB: The visibility "PUBLIC" is a special case. Typically, items in the visibility list are labels. "PUBLIC" is equivalent +to `//...`. + +### Setting up our module path +Importing Python modules is based on the import path. That means by default, we'd import NumPy as +`import third_party.python.numpy`. To fix this, we need to tell Please where our third-party module is. Add the +following to your `.plzconfig`: + +### `.plzconfig` +``` +[plugin "python"] +ModuleDir = third_party.python +``` + +NB: if you encounter an error eg. `no such option: --system` you are likely using an operating system where you need to disable vendor flags. This can be done by adding this config. + +### `.plzconfig` +``` +[plugin "python"] +DisableVendorFlags = true +``` + +### Updating our tests + +We can now use this library in our code: + +### `src/greetings/greetings.py` +```python +from numpy import random + +def greeting(): + return random.choice(["Hello", "Bonjour", "Marhabaan"]) +``` + +And add NumPy as a dependency: +### `src/greetings/BUILD` +```python +python_library( + name = "greetings", + srcs = ["greetings.py"], + visibility = ["//src/..."], + deps = ["//third_party/python:numpy"], +) + +python_test( + name = "greetings_test", + srcs = ["greetings_test.py"], + deps = [":greetings"], +) +``` + +``` +$ plz run //src:main +Marhabaan, world! +``` + +## What next? +Duration: 1 + +Hopefully you now have an idea as to how to build Python with Please. Please is capable of so much more though! + +- [Please basics](/basics.html) - A more general introduction to Please. It covers a lot of what we have in this +tutorial in more detail. +- [Plugin rules](/plugin.html#python) - See the rest of the Python rules in the python plugin. +- [Config](/config.html#python) - See the available config options for Please, especially those relating to Python. +- [Command line interface](/commands.html) - Please has a powerful command line interface. Interrogate the build graph, +determine files changes since master, watch rules and build them automatically as things change and much more! Use +`plz help`, and explore this rich set of commands! + +Otherwise, why not try one of the other codelabs! +, watch rules and build them automatically as things change and much more! Use +`plz help`, and explore this rich set of commands! + +Otherwise, why not try one of the other codelabs! diff --git a/docs/codelabs/using_plugins.md b/docs/codelabs/using_plugins.md index 94df5360d..88af3fbe6 100644 --- a/docs/codelabs/using_plugins.md +++ b/docs/codelabs/using_plugins.md @@ -1,159 +1,159 @@ -summary: Using plugins -description: How to use Please's language plugins -id: using_plugins -categories: beginner -tags: medium -status: Published -authors: Sam Westmoreland -Feedback Link: https://github.com/thought-machine/please - -# Using Plugins - -## Overview - -Duration: 1 - -### Prerequisites - -- You must have Please installed: [Install please](https://please.build/quickstart.html) -- You should have a basic understanding of how to use Please to build and test code - -### What you'll learn - -Language plugins were introduced with the release of Please v17. Each plugin -contains build definitions specific to a particular language. In this codelab -we'll cover - -- Where to find plugins -- How to can install them in your project -- How to configure them to work for your repo - -## Initialising your Please repo - -Duration: 1 - -For this codelab we'll start with a clean repo. The first thing you'll need to -do is initialise it as a Please repo. We can do this with `plz init`. Now if we -check the directory we should have a config file, as well as the Please wrapper -script `pleasew`: - -```bash -plz init -tree -a -``` - -The output should look like this: -```bash -. -├── pleasew -└── .plzconfig - -1 directory, 2 files -``` - -## Where to find plugins - -Duration: 1 - -For a comprehensive list of available plugins, visit -[https://github.com/please-build/please-rules](https://github.com/please-build/please-rules). -There you'll find plugins for language build rules, plugins for various -technologies, plugins for generating protos, and tools to help you maintain -your Please project. - -### Can't find a plugin for your language? - -The plugin ecosystem was designed with extensibility in mind, so if there is a -language that you'd like to build with Please and no plugin, consider writing -one! The existing plugins should serve as helpful templates, and if you get -stuck, feel free to reach out to the Please team on Github or the Please -community on [Gitter](https://gitter.im/please-build/Lobby). There will also be -a codelab coming soon that will cover the basics of writing a new plugin. - -## How to install a plugin - -Duration: 4 - -The easy way to install a plugin in your project is to use `plz init`. We'll -use the Go plugin in this example: - -```bash -plz init plugin go -tree -a -``` - -The output should look like this: -```bash -. -├── pleasew -├── plugins -│ └── BUILD -├── .plzconfig -└── plz-out - └── log - └── build.log - -4 directories, 4 files -``` - -### `.plzconfig` - -```ini -[parse] -preloadsubincludes = ///go//build_defs:go - -[Plugin "go"] -Target = //plugins:go -``` - -In the plzconfig, we can see that two things have been added for us. The first -is a preloaded subinclude. This will ensure that whichever package we're in in -our project, the rules defined in the plugin we just installed will be available. -This is completely optional. If the intention is to only use the rules in a few -places, it might make more sense to have an explicit subinclude in those -packages to avoid the plugin being a dependency of the entire repo. - -The second thing is a section for configuration of our new plugin. `Target` is -a required field in this section. This tells Please where to look for the build -target that defines the plugin. There may be other required fields depending on -the particular plugin we've installed. More information about the various config -options is available from the plugin repository itself (e.g. [https://github.com/ -please-build/go-rules](https://github.com/please-build/go-rules)), or via `plz -help [language]`. - -### `plugins/BUILD` - -```python -plugin_repo( - name = "go", - revision = "v1.17.2", - plugin = "go-rules", - owner = "please-build", -) -``` - -A new file has also been created for us called `plugins/BUILD`. This file should -contain a `plugin_repo()` target which will download our desired plugin for us. -The plugin is actually defined as a *subrepo* under the hood, which is why when -we want to depend on the build definitions in the plugin, we reference them with -a `///` (like in the preloaded subinclude in the `.plzconfig` file). The `//` is -then used to reference build targets within that subrepo, `//build_defs:go` for -example. - -Note the revision field will be set to the most recent available version of the -plugin, but can be set to any version tag or commit hash that you require. - -The use of `plz init plugin` is entirely optional. You might prefer -to manually add the `plugin_repo()` target somewhere else if putting it in -`plugins/` doesn't fit your needs. The only requirements for using a plugin are -that there is a `plugin_repo()` target *somewhere*, and that it is referenced -in the `Target` field of the plugin's config section in the `.plzconfig` file. - -## What's next? - -Duration: 0 - -You should now be set up with a language plugin. You now have access to all of -the build definitions provided by your chosen plugin. - -Go ahead and install any other plugins that you Please, and get building! +summary: Using plugins +description: How to use Please's language plugins +id: using_plugins +categories: beginner +tags: medium +status: Published +authors: Sam Westmoreland +Feedback Link: https://github.com/thought-machine/please + +# Using Plugins + +## Overview + +Duration: 1 + +### Prerequisites + +- You must have Please installed: [Install please](https://please.build/quickstart.html) +- You should have a basic understanding of how to use Please to build and test code + +### What you'll learn + +Language plugins were introduced with the release of Please v17. Each plugin +contains build definitions specific to a particular language. In this codelab +we'll cover + +- Where to find plugins +- How to can install them in your project +- How to configure them to work for your repo + +## Initialising your Please repo + +Duration: 1 + +For this codelab we'll start with a clean repo. The first thing you'll need to +do is initialise it as a Please repo. We can do this with `plz init`. Now if we +check the directory we should have a config file, as well as the Please wrapper +script `pleasew`: + +```bash +plz init +tree -a +``` + +The output should look like this: +```bash +. +├── pleasew +└── .plzconfig + +1 directory, 2 files +``` + +## Where to find plugins + +Duration: 1 + +For a comprehensive list of available plugins, visit +[https://github.com/please-build/please-rules](https://github.com/please-build/please-rules). +There you'll find plugins for language build rules, plugins for various +technologies, plugins for generating protos, and tools to help you maintain +your Please project. + +### Can't find a plugin for your language? + +The plugin ecosystem was designed with extensibility in mind, so if there is a +language that you'd like to build with Please and no plugin, consider writing +one! The existing plugins should serve as helpful templates, and if you get +stuck, feel free to reach out to the Please team on Github or the Please +community on [Gitter](https://gitter.im/please-build/Lobby). There will also be +a codelab coming soon that will cover the basics of writing a new plugin. + +## How to install a plugin + +Duration: 4 + +The easy way to install a plugin in your project is to use `plz init`. We'll +use the Go plugin in this example: + +```bash +plz init plugin go +tree -a +``` + +The output should look like this: +```bash +. +├── pleasew +├── plugins +│ └── BUILD +├── .plzconfig +└── plz-out + └── log + └── build.log + +4 directories, 4 files +``` + +### `.plzconfig` + +```ini +[parse] +preloadsubincludes = ///go//build_defs:go + +[Plugin "go"] +Target = //plugins:go +``` + +In the plzconfig, we can see that two things have been added for us. The first +is a preloaded subinclude. This will ensure that whichever package we're in in +our project, the rules defined in the plugin we just installed will be available. +This is completely optional. If the intention is to only use the rules in a few +places, it might make more sense to have an explicit subinclude in those +packages to avoid the plugin being a dependency of the entire repo. + +The second thing is a section for configuration of our new plugin. `Target` is +a required field in this section. This tells Please where to look for the build +target that defines the plugin. There may be other required fields depending on +the particular plugin we've installed. More information about the various config +options is available from the plugin repository itself (e.g. [https://github.com/ +please-build/go-rules](https://github.com/please-build/go-rules)), or via `plz +help [language]`. + +### `plugins/BUILD` + +```python +plugin_repo( + name = "go", + revision = "v1.17.2", + plugin = "go-rules", + owner = "please-build", +) +``` + +A new file has also been created for us called `plugins/BUILD`. This file should +contain a `plugin_repo()` target which will download our desired plugin for us. +The plugin is actually defined as a *subrepo* under the hood, which is why when +we want to depend on the build definitions in the plugin, we reference them with +a `///` (like in the preloaded subinclude in the `.plzconfig` file). The `//` is +then used to reference build targets within that subrepo, `//build_defs:go` for +example. + +Note the revision field will be set to the most recent available version of the +plugin, but can be set to any version tag or commit hash that you require. + +The use of `plz init plugin` is entirely optional. You might prefer +to manually add the `plugin_repo()` target somewhere else if putting it in +`plugins/` doesn't fit your needs. The only requirements for using a plugin are +that there is a `plugin_repo()` target *somewhere*, and that it is referenced +in the `Target` field of the plugin's config section in the `.plzconfig` file. + +## What's next? + +Duration: 0 + +You should now be set up with a language plugin. You now have access to all of +the build definitions provided by your chosen plugin. + +Go ahead and install any other plugins that you Please, and get building! diff --git a/docs/commands.html b/docs/commands.html index 92f46dd0a..b6b2fcfa7 100644 --- a/docs/commands.html +++ b/docs/commands.html @@ -1,1130 +1,1130 @@ -

    Please commands

    - -

    - Please has a rich command line interface that can be used to build and test - you code; interrogate the build graph; and much more! -

    - -
    -

    Tab completion

    - -

    - To get the most our of the Please command line interface, it is highly - recommended that you enable tab-completion. Please has a sophisticated - mechanism that is aware of your build graph, all the commands and flags, and - any aliases you may have - configured. To enable Please completions, add this line to your - .bashrc or .zshrc: -

    - -
    -    
    -    
    -    source <(plz --completion_script)
    -    
    -  
    -
    - -
    -

    Common flags

    - -

    These flags are common to all (or nearly all) operations.

    - -
    -

    - Options controlling what to build & how to build it: -

    - -
      -
    • -
      -

      - -c, --config -

      - -

      - The build config to use. The effect this has depends on the - language; typically it allows swapping between a debug or an - optimised build.
      - The default is - opt to build optimised code; - dbg is accepted for C++ and Go to build - code with debugging symbols.
      - This has no effect on Python or Java rules. -

      -
      -
    • -
    • -
      -

      - -r, --repo_root -

      - -

      - Sets the location of the repo root to use. Normally plz assumes it - is within the repo somewhere and locates the root itself, this - forces it to a specific location. -

      -
      -
    • -
    • -
      -

      - -n, --num_threads -

      - -

      - Sets the number of parallel workers to use while building. The - default is the number of logical CPUs of the current machine plus - two. -

      -
      -
    • -
    • -
      -

      - -i, --include -

      - -

      - Labels of targets to include when selecting multiple targets with - :all or /.... - These apply to labels which can be set on individual targets; a - number of them are predefined, most notably for each language (go, python, java, - cc, etc).
      - Only targets with this label will be built. -

      -
      -
    • -
    • -
      -

      - -e, --exclude -

      - -

      - The inverse of - --include; labels of targets to exclude - when selecting multiple targets with - :all or - /....
      - Takes priority over - --include.
      - You can also pass build expressions to - --exclude - to exclude targets as well as by label. -

      -
      -
    • -
    • -
      -

      - -a, --arch -

      - -

      - Architecture to compile for. By default Please will build for the - host architecture, but has some support for targeting others. See - the cross-compiling docs - for more information. -

      -
      -
    • -
    • -
      -

      - -o, --override -

      - -

      - Allows overriding individual config settings on a temporary basis; - for example - -o python.testrunner:pytest. See the - config reference - for more information on what can be overridden. -

      -
      -
    • -
    • -
      -

      - --profile -

      - -

      - Defines a profile of config file to load from the repo. For example, - --profile ci - will load - .plzconfig.ci. This can be useful to - canonicalise certain settings for non-common or scripted - configurations. -

      -
      -
    • -
    -
    - -
    -

    - Options controlling output & logging: -

    - -
      -
    • -
      -

      - -v, --verbosity -

      - -

      - Sets the amount of output logged from plz; a number between 0 and - 4.
      - Each number shows all messages at the given level and above: -

      - -
        -
      1. 0. Error
      2. -
      3. 1. Warning
      4. -
      5. 2. Notice
      6. -
      7. 3. Info
      8. -
      9. 4. Debug
      10. -
      - -

      - The default is 1, for warnings and errors only. If level 4 is - requested then it will suppress interactive output. -

      -
      -
    • -
    • -
      -

      - --log_file -

      - -

      Writes all logs out into the given file.

      -
      -
    • -
    • -
      -

      - --log_file_level -

      - -

      - Level of logging to write to the file. Defaults to 2 (notice, - warning and error). -

      -
      -
    • -
    • -
      -

      - --interactive_output -

      - -

      - Forces plz to show interactive output on stderr. By default it - autodetects based on whether stderr appears to be an interactive - terminal or not, but this flag can be used to force it on in cases - where it might get it wrong. -

      -
      -
    • -
    • -
      -

      - -p, --plain_output -

      - -

      - Forces plz not to show interactive output on stderr. Can be useful - in cases where it might obscure other messages or where the output - isn't capable of interpreting the escape codes correctly. -

      -
      -
    • -
    • -
      -

      - --colour -

      - -

      - Forces coloured output from logging & shell output. Again, this - is autodetected by default, but this can be used in cases where it - would normally detect false but it will later be consumed by - something that understands the codes (e.g. CI systems like Teamcity - or Jenkins). -

      -
      -
    • -
    • -
      -

      - --nocolour -

      - -

      - Inverse of above, forces colourless output from logging & the - shell. -

      -
      -
    • -
    • -
      -

      - --trace_file -

      - -

      - File to write Chrome tracing output into.
      - This is a JSON format that contains the actions taken by plz during - the build and their timings. You can load the file up in - about:tracing - and use that to see which parts of your build were slow. -

      -
      -
    • -
    • -
      -

      - --version -

      - -

      Prints the version of the tool and exits immediately.

      -
      -
    • -
    • -
      -

      - --show_all_output -

      - -

      - Prints all output of each building process as they run. Implies - --plain_output. -

      -
      -
    • -
    • -
      -

      - --completion_script -

      - -

      - Prints the bash / zsh completion script to stdout. This can be used - in a - .bashrc or - .zshrc, e.g. - source <(plz --completion_script). -

      -
      -
    • -
    -
    - -
    -

    - Options that enable / disable certain features: -

    - -
      -
    • -
      -

      - --noupdate -

      - -

      Disables Please attempting to auto-update itself.

      -
      -
    • -
    • -
      -

      - --nohash_verification -

      - -

      - Turns hash verification errors into non-fatal warnings.
      - Obviously this is only for local development & testing, not for - 'production' use. -

      -
      -
    • -
    • -
      -

      - --nolock -

      - -

      - Don't attempt to lock the repo exclusively while building.
      - Use with care - if two instances of plz start building the same - targets simultaneously they will likely fail with very strange - errors. -

      -
      -
    • -
    • -
      -

      - --keep_workdirs -

      - -

      - Don't clean directories in plz-out/tmp after successfully building - targets.
      - They're always left in cases where targets fail. -

      -
      -
    • -
    -
    -
    - -
    -

    - plz build -

    - -

    - This is the most common and obvious command; it builds one or more targets - and all their dependencies. A plain - plz build attempts to build everything, but more - usually you can tell it to build a particular target or targets by passing - them on the command line afterwards. For example: -

    - -
      -
    • - plz build //src/core:core builds just the one - target. -
    • -
    • - plz build //src/core:all builds every target - in. -
    • -
    • - src/core/BUILD. -
    • -
    • - plz build //src/... builds every target in - src and all subdirectories. -
    • -
    -
    - -
    -

    plz test

    - -

    - This is also a very commonly used command, it builds one or more targets and - then runs their tests. Which tests to run are specified by positional - arguments as described for - plz build. -

    - -

    - After successful completion a combined test output file will be written to - plz-out/log/test_results.xml - in something approximating xUnit XML format. -

    - -

    It takes a few special flags:

    -
      -
    • -
      -

      - --num_runs -

      - -

      - Determines how many times to run each test. The default is 1, but can - be more for tests marked as flaky. -

      -
      -
    • -
    • -
      -

      - --failing_tests_ok -

      - -

      - The return value is 0 regardless of whether any tests fail or not. It - will only be nonzero if they fail to build completely.
      - This is not commonly used, it's mostly useful for CI automation which - will parse the results file to determine ultimate success / failure. -

      -
      -
    • -
    • -
      -

      - --test_results_file -

      - -

      Specifies the location to write the combined test results to.

      -
      -
    • -
    • -
      -

      - -d, --debug -

      - -

      - Turns on interactive debug mode for this test. You can only specify - one test with this flag, because it attaches an interactive debugger - to catch failures.
      - It only works for some test types, currently python (with pytest as - the test runner), C and C++.
      - It implies - -c dbg unless that flag is explicitly - passed. -

      -
      -
    • -
    • -
      -

      - --rerun -

      - -

      - Forces the rerun of a test, even if the hash has not changed. -

      -
      -
    • -
    -
    - -
    -

    - plz cover -

    - -

    - Very similar to - plz test, but also instruments tests for coverage - and collects results. Tests normally run significantly slower in this mode - (the exact amount depends on the language). -

    - -

    Coverage isn't available for C++ tests at present.

    - -

    - All the same flags from - plz test apply here as well. In addition there are - several more: -

    - -
      -
    • -
      -

      - --nocoverage_report -

      - -

      Suppresses the coverage report output to the shell.

      -
      -
    • -
    • -
      -

      - --line_coverage_report -

      - -

      Produces a line-by-line coverage display for all source files.

      -
      -
    • -
    • -
      -

      - --include_all_files -

      - -

      - Includes any transitively dependent source files in the coverage - report (the default is just files from relevant packages). -

      -
      -
    • -
    • -
      -

      - --include_file -

      - -

      - Files to include in the coverage report (the flag can be passed more - than once for multiple). -

      -
      -
    • -
    • -
      -

      - --coverage_results_file -

      - -

      - Similar to - --test_results_file, determines where to - write the aggregated coverage results to. -

      -
      -
    • -
    • -
      -

      - -d, --debug -

      - -

      - Turns on interactive debug mode for this test. You can only specify - one test with this flag, because it attaches an interactive debugger - to catch failures.
      - It only works for some test types, currently python (with pytest as - the test runner), C and C++.
      - It implies - -c dbg unless that flag is explicitly - passed. -

      -
      -
    • -
    -
    - -
    -

    plz run

    - -

    - This is essentially shorthand for calling - plz build and then running the result of whatever - target was built. It's often handy for iterating on a single target such - that one command builds and reruns it. -

    - -

    - Because of the way the target is run after, you have to provide exactly one - target to this command. The target must be marked as - binary in its rule definition (this is implicit - for the various builtin _binary rules such as - go_binary etc). -

    - -

    - If you want to pass flags to the target rather than plz itself, you must - pass them last on the command line, after a - --. This tells Please not to attempt to parse them - as its own flags. -

    - -

    - There are two optional subcommands - sequential and - parallel which allow running multiple targets in - one go. As the names suggest, they run targets either one after the other or - all in parallel.
    - In either case, the semantics are a little different to running a single - target; arguments must be passed one by one via the - -a flag, and while stdout / stderr are connected - to the current terminal, stdin is not connected (because it'd not be clear - which process would consume it). -

    -
    - -
    -

    plz exec

    - -

    - This command executes the target in a hermetic build environment, as opposed - to the plz run command. This allows for uses cases, - such as: debugging/profiling programs that may require a predictable environment, - or running E2E tests reliant on external state which doesn't fit with Please's - caching approach. -

    - -

    - The --share_network and --share_mount flags are available (Linux only) for greater control over the sandboxed environment - where the target is run. The --share_network flag is useful - in situations where the host system might want to connect to a server that the command - started. -

    - -

    - The --output_path and --out flags allow for artifacts, produced by the command executed in the sandboxed environment, - to be copied onto the host system where plz exec is being - run from. -

    - -

    - Non-binary targets are also supported, but a custom command (see above) is required since - there isn't a binary produced that can be executed by default. These targets' results can - be accessed via the $OUTS environment variable. -

    - -

    - Only a single command is supported per execution with plz exec. - Multiple can be run with plz exec sequential or plz exec parallel, - which are analogous to their plz run equivalents. -

    -
    - -
    -

    - plz watch -

    - -

    - Watches a set of targets for changes. Whenever any one of their source files - (or that of any dependency) is changed, the targets will be rebuilt. If any - of them are tests, then they will be run as well. -

    - -

    - Optionally you can pass the - --run flag if you'd like the targets to be run - (using plz run) instead of just built / tested. -

    -
    - -
    -

    - plz query -

    - -

    - This allows you to introspect various aspects of the build graph. There are - a number of subcommands identifying what you want to query for: -

    - -
      -
    • - alltargets: Lists all targets in the - graph. -
    • -
    • - filter: Filter targets based on --include and --exclude. - This is commonly used with other commands. For example, to run e2e tests separately from other tests: - plz query changes --since master > plz-out/changes, then - cat plz-out/changes | plz query filter --include e2e - | plz test -. - -
    • -
    • - changes: Queries changed targets versus a - revision or from a set of files. -
    • -
    • - completions: Prints possible completions for - a string. -
    • -
    • - deps: Queries the dependencies of a - target. -
    • -
    • - graph: Prints a JSON representation of the - build graph. -
    • -
    • - input: Prints all transitive inputs of a - target. -
    • -
    • - output: Prints all outputs of a target. -
    • -
    • - print: Prints a representation of a single - target. -
    • -
    • - reverseDeps: Queries all the reverse - dependencies of a target. -
    • -
    • - somepath: Queries for a path between two - targets. -
    • -
    • - rules: Prints out a machine-parseable - description of all currently known build rules. -
    • -
    • - - whatinputs: Prints out target(s) with provided file(s) as inputs - -
    • -
    • - - whatoutputs: Prints out target(s) responsible for outputting provided file(s) - -
    • -
    - -

    - Note that this is not the same as the query language accepted by Bazel and - Buck, if you're familiar with those; generally this is lighter weight but - less flexible and powerful. We haven't ruled out adding that in the future - but have no concrete plans to do so at present. -

    -
    - -
    -

    - plz clean -

    - -

    Cleans up output build artifacts and caches.

    - -

    - This is not normally necessary since generally incrementality detection will - ensure that targets are rebuilt if needed. It's possible though for - particularly determined rules to do something they shouldn't in which case - this might be needed, or (inconceivable though it is) a bug might exist that - led to incorrect artifacts being cached. -

    - -

    - If given no arguments this cleans the entire plz-out directory and the - directory cache, if configured. It returns immediately with the actual - removal proceeding in the background; you can invoke other plz commands - freely while that continues.
    - You can pass the - --nobackground flag if you'd prefer to wait - though. -

    - -

    - If it's given targets to clean, it will need to perform a parse to work out - what to clean, and will not return until those targets have been cleaned. -

    -
    - -
    -

    plz hash

    - -

    - This command calculates the hash of outputs for one or more targets. These - can then be passed in the - hash or - hashes attributes of those targets to verify their - output is as expected - this is useful for fetching third-party dependencies - to ensure they are not changing between builds. -

    - -

    - The relevant targets will be built in order to calculate the hash, but if - they fail because it doesn't match the one recorded in the BUILD file plz - will still exit successfully (although the output files will still not be - created). -

    - -

    - One can of course achieve the same effect via running - plz build and reading the actual hash when it - fails, but this way is generally considered nicer. -

    - -

    - The --update flag will cause Please to rewrite the - BUILD file with any changed hashes that it can find. -

    -
    - -
    -

    plz fmt

    - -

    a.k.a. plz format

    - -

    - Auto-formats existing BUILD files. You can either provide a list of files to - reformat or, if none are given, it will discover all BUILD files in the - repository. -

    - -

    - The -w flag rewrites existing files in-place; if - not passed the formatted version will be printed to stdout. -

    - -

    - The implementation is currently based on a lightly modified version of - buildifier - which supports nearly a superset of the same dialect, but lacks one or two - features such as type annotations.
    - These are relatively rarely used in BUILD files though. -

    -
    - -
    -

    plz init

    - -

    - Creates an initial (and pretty empty) - .plzconfig file in the current directory (or, if - the --dir flag is passed, somewhere else). -

    - -

    You'll be warned before overwriting an existing file.

    - -

    - It will also create a wrapper script, - pleasew which runs plz if found on the local - machine, and otherwise attempts to download a copy. This can be handy for - users who don't have it installed already. -

    - -

    - There is a - --bazel_compat flag which initialises the config - file for Bazel compatibility mode. This changes behaviour in various ways to - make it easier to begin building an existing Bazel project - although more - complex projects will still likely find things that don't translate easily. -

    -
    - -
    -

    plz generate

    - -

    - This command can be used to build generated sources and link them back into - the source tree. This can be useful for tooling that expects generated sources - to be there like linters and IDEs. -

    - -

    - To build all generated sources, simply run plz generate. -

    - -

    - Please can also update a gitignore file, ignoring all the generated files automatically: - plz generate --update_gitignore .gitignore -

    - -

    To automatically link generated sources and update .gitignore files during normal builds, see the - LinkGeneratedSources, and - UpdateGitignore config values. -

    -
    - - -
    -

    - plz update -

    - -

    - Updates plz to the appropriate version. This is quite tightly governed by - the - .plzconfig file: -

    - -
      -
    • - If selfupdate is true, then it's not normally - necessary to run this since any invocation of plz will update before - running. It will still behave as normal though if invoked - explicitly. -
    • -
    • - If the version property is set then it will - attempt to download exactly that version, and fail if it can't for some - reason. - -
    • -
    • - Otherwise it will try to find the latest available version and update - to that. -
    • -
    • - The downloadlocation property determines - where it tries to download from; by default it's the central plz site, - but you could set this to a server of your own if you'd rather be more - independent. -
    • -
    -
    - -
    -

    plz export

    - -

    - Exports a subset of a please project based on a list of targets -

    - -

    - Example: plz export //cmd:main --output plz-out/export -

    - -

    There are a few flags controlling it:

    - -
      -
    • -
      -

      - -o, --output -

      - -

      - The directory to export into -

      -
      -
    • -
    • -
      -

      - --notrim -

      -

      Disables trimming unnecessary targets from exported packages. Normally targets in exported packages that - aren't dependencies of the originally exported targets are removed.

      -

      - This trimming syntax based, so doesn't always work depending on how the build definition is authored. Passing - this flag will disable this feature, avoiding cases where these rules will be erroneously trimmed. -

      -

      - To make sure a rule works without this flag, the rule must follow the naming convention, whereby children of - :name follow the format :_name#{some-tag}. This is the - format tag(name, tag) would produce. -

      -
      -
    • -
    -
    - -
    -

    plz gc

    - -

    - Runs a basic "garbage collection" step, which attempts to identify targets - that aren't in use. This is still fairly experimental since the definition - of "not used" isn't always very clear (for example, ideally simply having a - test on a library that isn't otherwise used would not be enough to keep both - of those). Because of this it suggests a set of targets that it's pretty - sure aren't used at all, and a secondary set that it's less sure on. -

    - -

    - Right now the name is a bit misleading since it finds but doesn't collect - the garbage; ideally it'd be able to rewrite the BUILD files itself. - Deleting sources is a little trickier since you'd often want to couple that - with a VC operation (i.e.git rm) and by design plz - is unaware of the VCS in use. -

    - -

    There are a few flags controlling it:

    - -
      -
    • -
      -

      - -c, --conservative -

      - -

      - Uses a more conservative algorithm (specifically any tests will keep - their targets). -

      -
      -
    • -
    • -
      -

      - -t, --targets_only -

      - -

      - Only prints the targets to be removed (not sources). Useful to pipe - them into another program. -

      -
      -
    • -
    • -
      -

      - -t, --srcs_only -

      - -

      - Only prints the sources to be removed (not targets). Useful to pipe - them into another program. -

      -
      -
    • -
    -
    - -
    -

    plz help

    - -

    - Displays help about a particular facet of Please. It knows about built-in - build rules, config settings and a few other things. Mostly this is useful - as an instant reference; you can run - plz help topics to get a list of all the topics - that it knows about. -

    -
    - -
    -

    plz op

    - -

    Re-runs whatever the previous command was.

    -
    +

    Please commands

    + +

    + Please has a rich command line interface that can be used to build and test + you code; interrogate the build graph; and much more! +

    + +
    +

    Tab completion

    + +

    + To get the most our of the Please command line interface, it is highly + recommended that you enable tab-completion. Please has a sophisticated + mechanism that is aware of your build graph, all the commands and flags, and + any aliases you may have + configured. To enable Please completions, add this line to your + .bashrc or .zshrc: +

    + +
    +    
    +    
    +    source <(plz --completion_script)
    +    
    +  
    +
    + +
    +

    Common flags

    + +

    These flags are common to all (or nearly all) operations.

    + +
    +

    + Options controlling what to build & how to build it: +

    + +
      +
    • +
      +

      + -c, --config +

      + +

      + The build config to use. The effect this has depends on the + language; typically it allows swapping between a debug or an + optimised build.
      + The default is + opt to build optimised code; + dbg is accepted for C++ and Go to build + code with debugging symbols.
      + This has no effect on Python or Java rules. +

      +
      +
    • +
    • +
      +

      + -r, --repo_root +

      + +

      + Sets the location of the repo root to use. Normally plz assumes it + is within the repo somewhere and locates the root itself, this + forces it to a specific location. +

      +
      +
    • +
    • +
      +

      + -n, --num_threads +

      + +

      + Sets the number of parallel workers to use while building. The + default is the number of logical CPUs of the current machine plus + two. +

      +
      +
    • +
    • +
      +

      + -i, --include +

      + +

      + Labels of targets to include when selecting multiple targets with + :all or /.... + These apply to labels which can be set on individual targets; a + number of them are predefined, most notably for each language (go, python, java, + cc, etc).
      + Only targets with this label will be built. +

      +
      +
    • +
    • +
      +

      + -e, --exclude +

      + +

      + The inverse of + --include; labels of targets to exclude + when selecting multiple targets with + :all or + /....
      + Takes priority over + --include.
      + You can also pass build expressions to + --exclude + to exclude targets as well as by label. +

      +
      +
    • +
    • +
      +

      + -a, --arch +

      + +

      + Architecture to compile for. By default Please will build for the + host architecture, but has some support for targeting others. See + the cross-compiling docs + for more information. +

      +
      +
    • +
    • +
      +

      + -o, --override +

      + +

      + Allows overriding individual config settings on a temporary basis; + for example + -o python.testrunner:pytest. See the + config reference + for more information on what can be overridden. +

      +
      +
    • +
    • +
      +

      + --profile +

      + +

      + Defines a profile of config file to load from the repo. For example, + --profile ci + will load + .plzconfig.ci. This can be useful to + canonicalise certain settings for non-common or scripted + configurations. +

      +
      +
    • +
    +
    + +
    +

    + Options controlling output & logging: +

    + +
      +
    • +
      +

      + -v, --verbosity +

      + +

      + Sets the amount of output logged from plz; a number between 0 and + 4.
      + Each number shows all messages at the given level and above: +

      + +
        +
      1. 0. Error
      2. +
      3. 1. Warning
      4. +
      5. 2. Notice
      6. +
      7. 3. Info
      8. +
      9. 4. Debug
      10. +
      + +

      + The default is 1, for warnings and errors only. If level 4 is + requested then it will suppress interactive output. +

      +
      +
    • +
    • +
      +

      + --log_file +

      + +

      Writes all logs out into the given file.

      +
      +
    • +
    • +
      +

      + --log_file_level +

      + +

      + Level of logging to write to the file. Defaults to 2 (notice, + warning and error). +

      +
      +
    • +
    • +
      +

      + --interactive_output +

      + +

      + Forces plz to show interactive output on stderr. By default it + autodetects based on whether stderr appears to be an interactive + terminal or not, but this flag can be used to force it on in cases + where it might get it wrong. +

      +
      +
    • +
    • +
      +

      + -p, --plain_output +

      + +

      + Forces plz not to show interactive output on stderr. Can be useful + in cases where it might obscure other messages or where the output + isn't capable of interpreting the escape codes correctly. +

      +
      +
    • +
    • +
      +

      + --colour +

      + +

      + Forces coloured output from logging & shell output. Again, this + is autodetected by default, but this can be used in cases where it + would normally detect false but it will later be consumed by + something that understands the codes (e.g. CI systems like Teamcity + or Jenkins). +

      +
      +
    • +
    • +
      +

      + --nocolour +

      + +

      + Inverse of above, forces colourless output from logging & the + shell. +

      +
      +
    • +
    • +
      +

      + --trace_file +

      + +

      + File to write Chrome tracing output into.
      + This is a JSON format that contains the actions taken by plz during + the build and their timings. You can load the file up in + about:tracing + and use that to see which parts of your build were slow. +

      +
      +
    • +
    • +
      +

      + --version +

      + +

      Prints the version of the tool and exits immediately.

      +
      +
    • +
    • +
      +

      + --show_all_output +

      + +

      + Prints all output of each building process as they run. Implies + --plain_output. +

      +
      +
    • +
    • +
      +

      + --completion_script +

      + +

      + Prints the bash / zsh completion script to stdout. This can be used + in a + .bashrc or + .zshrc, e.g. + source <(plz --completion_script). +

      +
      +
    • +
    +
    + +
    +

    + Options that enable / disable certain features: +

    + +
      +
    • +
      +

      + --noupdate +

      + +

      Disables Please attempting to auto-update itself.

      +
      +
    • +
    • +
      +

      + --nohash_verification +

      + +

      + Turns hash verification errors into non-fatal warnings.
      + Obviously this is only for local development & testing, not for + 'production' use. +

      +
      +
    • +
    • +
      +

      + --nolock +

      + +

      + Don't attempt to lock the repo exclusively while building.
      + Use with care - if two instances of plz start building the same + targets simultaneously they will likely fail with very strange + errors. +

      +
      +
    • +
    • +
      +

      + --keep_workdirs +

      + +

      + Don't clean directories in plz-out/tmp after successfully building + targets.
      + They're always left in cases where targets fail. +

      +
      +
    • +
    +
    +
    + +
    +

    + plz build +

    + +

    + This is the most common and obvious command; it builds one or more targets + and all their dependencies. A plain + plz build attempts to build everything, but more + usually you can tell it to build a particular target or targets by passing + them on the command line afterwards. For example: +

    + +
      +
    • + plz build //src/core:core builds just the one + target. +
    • +
    • + plz build //src/core:all builds every target + in. +
    • +
    • + src/core/BUILD. +
    • +
    • + plz build //src/... builds every target in + src and all subdirectories. +
    • +
    +
    + +
    +

    plz test

    + +

    + This is also a very commonly used command, it builds one or more targets and + then runs their tests. Which tests to run are specified by positional + arguments as described for + plz build. +

    + +

    + After successful completion a combined test output file will be written to + plz-out/log/test_results.xml + in something approximating xUnit XML format. +

    + +

    It takes a few special flags:

    +
      +
    • +
      +

      + --num_runs +

      + +

      + Determines how many times to run each test. The default is 1, but can + be more for tests marked as flaky. +

      +
      +
    • +
    • +
      +

      + --failing_tests_ok +

      + +

      + The return value is 0 regardless of whether any tests fail or not. It + will only be nonzero if they fail to build completely.
      + This is not commonly used, it's mostly useful for CI automation which + will parse the results file to determine ultimate success / failure. +

      +
      +
    • +
    • +
      +

      + --test_results_file +

      + +

      Specifies the location to write the combined test results to.

      +
      +
    • +
    • +
      +

      + -d, --debug +

      + +

      + Turns on interactive debug mode for this test. You can only specify + one test with this flag, because it attaches an interactive debugger + to catch failures.
      + It only works for some test types, currently python (with pytest as + the test runner), C and C++.
      + It implies + -c dbg unless that flag is explicitly + passed. +

      +
      +
    • +
    • +
      +

      + --rerun +

      + +

      + Forces the rerun of a test, even if the hash has not changed. +

      +
      +
    • +
    +
    + +
    +

    + plz cover +

    + +

    + Very similar to + plz test, but also instruments tests for coverage + and collects results. Tests normally run significantly slower in this mode + (the exact amount depends on the language). +

    + +

    Coverage isn't available for C++ tests at present.

    + +

    + All the same flags from + plz test apply here as well. In addition there are + several more: +

    + +
      +
    • +
      +

      + --nocoverage_report +

      + +

      Suppresses the coverage report output to the shell.

      +
      +
    • +
    • +
      +

      + --line_coverage_report +

      + +

      Produces a line-by-line coverage display for all source files.

      +
      +
    • +
    • +
      +

      + --include_all_files +

      + +

      + Includes any transitively dependent source files in the coverage + report (the default is just files from relevant packages). +

      +
      +
    • +
    • +
      +

      + --include_file +

      + +

      + Files to include in the coverage report (the flag can be passed more + than once for multiple). +

      +
      +
    • +
    • +
      +

      + --coverage_results_file +

      + +

      + Similar to + --test_results_file, determines where to + write the aggregated coverage results to. +

      +
      +
    • +
    • +
      +

      + -d, --debug +

      + +

      + Turns on interactive debug mode for this test. You can only specify + one test with this flag, because it attaches an interactive debugger + to catch failures.
      + It only works for some test types, currently python (with pytest as + the test runner), C and C++.
      + It implies + -c dbg unless that flag is explicitly + passed. +

      +
      +
    • +
    +
    + +
    +

    plz run

    + +

    + This is essentially shorthand for calling + plz build and then running the result of whatever + target was built. It's often handy for iterating on a single target such + that one command builds and reruns it. +

    + +

    + Because of the way the target is run after, you have to provide exactly one + target to this command. The target must be marked as + binary in its rule definition (this is implicit + for the various builtin _binary rules such as + go_binary etc). +

    + +

    + If you want to pass flags to the target rather than plz itself, you must + pass them last on the command line, after a + --. This tells Please not to attempt to parse them + as its own flags. +

    + +

    + There are two optional subcommands + sequential and + parallel which allow running multiple targets in + one go. As the names suggest, they run targets either one after the other or + all in parallel.
    + In either case, the semantics are a little different to running a single + target; arguments must be passed one by one via the + -a flag, and while stdout / stderr are connected + to the current terminal, stdin is not connected (because it'd not be clear + which process would consume it). +

    +
    + +
    +

    plz exec

    + +

    + This command executes the target in a hermetic build environment, as opposed + to the plz run command. This allows for uses cases, + such as: debugging/profiling programs that may require a predictable environment, + or running E2E tests reliant on external state which doesn't fit with Please's + caching approach. +

    + +

    + The --share_network and --share_mount flags are available (Linux only) for greater control over the sandboxed environment + where the target is run. The --share_network flag is useful + in situations where the host system might want to connect to a server that the command + started. +

    + +

    + The --output_path and --out flags allow for artifacts, produced by the command executed in the sandboxed environment, + to be copied onto the host system where plz exec is being + run from. +

    + +

    + Non-binary targets are also supported, but a custom command (see above) is required since + there isn't a binary produced that can be executed by default. These targets' results can + be accessed via the $OUTS environment variable. +

    + +

    + Only a single command is supported per execution with plz exec. + Multiple can be run with plz exec sequential or plz exec parallel, + which are analogous to their plz run equivalents. +

    +
    + +
    +

    + plz watch +

    + +

    + Watches a set of targets for changes. Whenever any one of their source files + (or that of any dependency) is changed, the targets will be rebuilt. If any + of them are tests, then they will be run as well. +

    + +

    + Optionally you can pass the + --run flag if you'd like the targets to be run + (using plz run) instead of just built / tested. +

    +
    + +
    +

    + plz query +

    + +

    + This allows you to introspect various aspects of the build graph. There are + a number of subcommands identifying what you want to query for: +

    + +
      +
    • + alltargets: Lists all targets in the + graph. +
    • +
    • + filter: Filter targets based on --include and --exclude. + This is commonly used with other commands. For example, to run e2e tests separately from other tests: + plz query changes --since master > plz-out/changes, then + cat plz-out/changes | plz query filter --include e2e - | plz test -. + +
    • +
    • + changes: Queries changed targets versus a + revision or from a set of files. +
    • +
    • + completions: Prints possible completions for + a string. +
    • +
    • + deps: Queries the dependencies of a + target. +
    • +
    • + graph: Prints a JSON representation of the + build graph. +
    • +
    • + input: Prints all transitive inputs of a + target. +
    • +
    • + output: Prints all outputs of a target. +
    • +
    • + print: Prints a representation of a single + target. +
    • +
    • + reverseDeps: Queries all the reverse + dependencies of a target. +
    • +
    • + somepath: Queries for a path between two + targets. +
    • +
    • + rules: Prints out a machine-parseable + description of all currently known build rules. +
    • +
    • + + whatinputs: Prints out target(s) with provided file(s) as inputs + +
    • +
    • + + whatoutputs: Prints out target(s) responsible for outputting provided file(s) + +
    • +
    + +

    + Note that this is not the same as the query language accepted by Bazel and + Buck, if you're familiar with those; generally this is lighter weight but + less flexible and powerful. We haven't ruled out adding that in the future + but have no concrete plans to do so at present. +

    +
    + +
    +

    + plz clean +

    + +

    Cleans up output build artifacts and caches.

    + +

    + This is not normally necessary since generally incrementality detection will + ensure that targets are rebuilt if needed. It's possible though for + particularly determined rules to do something they shouldn't in which case + this might be needed, or (inconceivable though it is) a bug might exist that + led to incorrect artifacts being cached. +

    + +

    + If given no arguments this cleans the entire plz-out directory and the + directory cache, if configured. It returns immediately with the actual + removal proceeding in the background; you can invoke other plz commands + freely while that continues.
    + You can pass the + --nobackground flag if you'd prefer to wait + though. +

    + +

    + If it's given targets to clean, it will need to perform a parse to work out + what to clean, and will not return until those targets have been cleaned. +

    +
    + +
    +

    plz hash

    + +

    + This command calculates the hash of outputs for one or more targets. These + can then be passed in the + hash or + hashes attributes of those targets to verify their + output is as expected - this is useful for fetching third-party dependencies + to ensure they are not changing between builds. +

    + +

    + The relevant targets will be built in order to calculate the hash, but if + they fail because it doesn't match the one recorded in the BUILD file plz + will still exit successfully (although the output files will still not be + created). +

    + +

    + One can of course achieve the same effect via running + plz build and reading the actual hash when it + fails, but this way is generally considered nicer. +

    + +

    + The --update flag will cause Please to rewrite the + BUILD file with any changed hashes that it can find. +

    +
    + +
    +

    plz fmt

    + +

    a.k.a. plz format

    + +

    + Auto-formats existing BUILD files. You can either provide a list of files to + reformat or, if none are given, it will discover all BUILD files in the + repository. +

    + +

    + The -w flag rewrites existing files in-place; if + not passed the formatted version will be printed to stdout. +

    + +

    + The implementation is currently based on a lightly modified version of + buildifier + which supports nearly a superset of the same dialect, but lacks one or two + features such as type annotations.
    + These are relatively rarely used in BUILD files though. +

    +
    + +
    +

    plz init

    + +

    + Creates an initial (and pretty empty) + .plzconfig file in the current directory (or, if + the --dir flag is passed, somewhere else). +

    + +

    You'll be warned before overwriting an existing file.

    + +

    + It will also create a wrapper script, + pleasew which runs plz if found on the local + machine, and otherwise attempts to download a copy. This can be handy for + users who don't have it installed already. +

    + +

    + There is a + --bazel_compat flag which initialises the config + file for Bazel compatibility mode. This changes behaviour in various ways to + make it easier to begin building an existing Bazel project - although more + complex projects will still likely find things that don't translate easily. +

    +
    + +
    +

    plz generate

    + +

    + This command can be used to build generated sources and link them back into + the source tree. This can be useful for tooling that expects generated sources + to be there like linters and IDEs. +

    + +

    + To build all generated sources, simply run plz generate. +

    + +

    + Please can also update a gitignore file, ignoring all the generated files automatically: + plz generate --update_gitignore .gitignore +

    + +

    To automatically link generated sources and update .gitignore files during normal builds, see the + LinkGeneratedSources, and + UpdateGitignore config values. +

    +
    + + +
    +

    + plz update +

    + +

    + Updates plz to the appropriate version. This is quite tightly governed by + the + .plzconfig file: +

    + +
      +
    • + If selfupdate is true, then it's not normally + necessary to run this since any invocation of plz will update before + running. It will still behave as normal though if invoked + explicitly. +
    • +
    • + If the version property is set then it will + attempt to download exactly that version, and fail if it can't for some + reason. + +
    • +
    • + Otherwise it will try to find the latest available version and update + to that. +
    • +
    • + The downloadlocation property determines + where it tries to download from; by default it's the central plz site, + but you could set this to a server of your own if you'd rather be more + independent. +
    • +
    +
    + +
    +

    plz export

    + +

    + Exports a subset of a please project based on a list of targets +

    + +

    + Example: plz export //cmd:main --output plz-out/export +

    + +

    There are a few flags controlling it:

    + +
      +
    • +
      +

      + -o, --output +

      + +

      + The directory to export into +

      +
      +
    • +
    • +
      +

      + --notrim +

      +

      Disables trimming unnecessary targets from exported packages. Normally targets in exported packages that + aren't dependencies of the originally exported targets are removed.

      +

      + This trimming syntax based, so doesn't always work depending on how the build definition is authored. Passing + this flag will disable this feature, avoiding cases where these rules will be erroneously trimmed. +

      +

      + To make sure a rule works without this flag, the rule must follow the naming convention, whereby children of + :name follow the format :_name#{some-tag}. This is the + format tag(name, tag) would produce. +

      +
      +
    • +
    +
    + +
    +

    plz gc

    + +

    + Runs a basic "garbage collection" step, which attempts to identify targets + that aren't in use. This is still fairly experimental since the definition + of "not used" isn't always very clear (for example, ideally simply having a + test on a library that isn't otherwise used would not be enough to keep both + of those). Because of this it suggests a set of targets that it's pretty + sure aren't used at all, and a secondary set that it's less sure on. +

    + +

    + Right now the name is a bit misleading since it finds but doesn't collect + the garbage; ideally it'd be able to rewrite the BUILD files itself. + Deleting sources is a little trickier since you'd often want to couple that + with a VC operation (i.e.git rm) and by design plz + is unaware of the VCS in use. +

    + +

    There are a few flags controlling it:

    + +
      +
    • +
      +

      + -c, --conservative +

      + +

      + Uses a more conservative algorithm (specifically any tests will keep + their targets). +

      +
      +
    • +
    • +
      +

      + -t, --targets_only +

      + +

      + Only prints the targets to be removed (not sources). Useful to pipe + them into another program. +

      +
      +
    • +
    • +
      +

      + -t, --srcs_only +

      + +

      + Only prints the sources to be removed (not targets). Useful to pipe + them into another program. +

      +
      +
    • +
    +
    + +
    +

    plz help

    + +

    + Displays help about a particular facet of Please. It knows about built-in + build rules, config settings and a few other things. Mostly this is useful + as an instant reference; you can run + plz help topics to get a list of all the topics + that it knows about. +

    +
    + +
    +

    plz op

    + +

    Re-runs whatever the previous command was.

    +
    From 6bffc5b0b737f3319d0469021db636a0efab12e9 Mon Sep 17 00:00:00 2001 From: Anya Xiao <73641458+scyyx5@users.noreply.github.com> Date: Sat, 25 Oct 2025 15:09:32 +0100 Subject: [PATCH 35/38] revert changes this change will be in a new PR --- docs/codelabs/go_module.md | 586 +++++++++++++++---------------------- 1 file changed, 240 insertions(+), 346 deletions(-) diff --git a/docs/codelabs/go_module.md b/docs/codelabs/go_module.md index a4e5cbfb2..10d1d28d2 100644 --- a/docs/codelabs/go_module.md +++ b/docs/codelabs/go_module.md @@ -1,5 +1,5 @@ -summary: Third-party dependencies with Puku -description: Add, update, pin, and remove Go third-party dependencies using go get and plz puku (no go_module()) +summary: Third-party dependencies with go_module() +description: Set up gRPC and learn how to manage third party dependencies with Please id: go_module categories: beginner tags: medium @@ -7,38 +7,18 @@ status: Published authors: Jon Poole Feedback Link: https://github.com/thought-machine/please -# Third-party dependencies with Puku - +# Third-party dependencies with `go_module()` ## Overview -Duration: 2 - -Notes: `go_module()` is deprecated in Core3. This codelab teaches a practical workflow that uses standard Go tooling (`go get` / `go mod`) together with Puku to generate and maintain third-party go targets (`go_repo`). - -### Goals -- Add a new third‑party dependency with `go get` -- Sync the dependency into Please with `plz puku sync` -- Let puku update BUILD deps with `plz puku fmt` -- Upgrade, pin/exclude, and remove modules safely -- Diagnose missing import / missing subrepo issues - -You will not use `go_module()` in this guide. +Duration: 1 ### Prerequisites -- Please installed and configured: https://please.build/quickstart.html -- Go 1.20+ installed and on PATH -- Puku available in one of the following ways: - - Via Please alias: add an alias to `.plzconfig` (see below), or - - Installed locally (if the first doesn't work, try the second): - - `go install github.com/please-build/puku/cmd/puku@latest` - - `go get github.com/please-build/puku/cmd/puku` - -### What you’ll learn -- Add and upgrade dependencies with `go get` -- Sync `go.mod` into `third_party/go/BUILD` with `plz puku sync` -- Let `plz puku fmt` add third-party deps to your BUILD targets -- Diagnose missing imports and missing subrepos -- Pin or exclude dependency versions with `go mod edit` -- Remove third-party modules safely +- You must have Please installed: [Install Please](https://please.build/quickstart.html) + +### What you'll learn +In this codelab, we'll be setting up Please to compile third party go modules. You'll learn how to: +- Use go_module() to download and compile third party go modules +- Download and compile the library and binary parts of a module separately +- Resolving cyclical dependencies between modules ### What if I get stuck? @@ -46,396 +26,310 @@ The final result of running through this codelab can be found [here](https://github.com/thought-machine/please-codelabs/tree/main/go_modules) for reference. If you really get stuck you can find us on [gitter](https://gitter.im/please-build/Lobby)! -## Initialising your project and running puku with please -Duration: 5 +## Initialising your project +Duration: 2 The easiest way to get started is from an existing Go module: -```bash -mkdir puku_sync && cd puku_sync -go mod init example_module -plz init --no_prompt -plz init plugin go -``` - -Define a valid Puku version number as a build configuration string in `.plzconfig`: - ``` -[BuildConfig] -puku-version = "1.17.0" +$ mkdir go_module && cd go_module +$ go mod init example_module +$ plz init --no_prompt +$ plz init plugin go ``` -Uncomment and edit the following lines in your `.plzconfig` to set up `please` version: - +### A note about your Please PATH +Please doesn't use your host system's `PATH` variable. If where you installed Go isn't in this default path, you will +need to add the following to `.plzconfig`: ``` -[please] -version = 17.22.0 +[build] +path = $YOUR_GO_INSTALL_HERE:/usr/local/bin:/usr/bin:/bin ``` -Configure a Please alias for Puku (optional but convenient): +You can find out where Go is installed with `dirname $(which go)`. -``` -[Alias "puku"] -Cmd = run //third_party/binary:puku -- -PositionalLabels = true -Desc = A tool to update BUILD files in Go packages -``` +## Dependencies in Please vs. go build +Duration: 3 -With the alias, you can use `plz puku` instead of `plz run //third_party/binary:puku`. +If you're coming from a language specific build system like `go build`, Please can feel a bit alien. Please is language +agnostic so can't parse you source code to automatically update its BUILD files when you add a new import like +`go mod edit` would for `go build`. -Then download that version of Puku in `third_party/binary/BUILD`: +Instead, you must strictly define all the dependencies of each module. This allows Please to build go modules in a +controlled and reproducible way without actually having to understand go itself. However, it does take a little more +work to set up. -```python -remote_file( - name = "puku", - url = f"https://github.com/please-build/puku/releases/download/v{CONFIG.PUKU_VERSION}/puku-{CONFIG.PUKU_VERSION}-{CONFIG.OS}_{CONFIG.ARCH}", - binary = True, -) -``` +A basic `go_module()` usage might look like: -Configure the Go plugin to point at your go.mod (recommended). Create a repo-root `BUILD` with a filegroup for go.mod: - -1) Add a filegroup for go.mod at `BUILD` in repo root: +### `third_party/go/BUILD` ```python -filegroup( - name = "gomod", - srcs = ["go.mod"], - visibility = ["PUBLIC"], +go_module( + name = "protobuf_go", + # By default, we only install the top level package i.e. golang.org/x/sys. To + # compile everything, use this wildcard. + install = ["..."], + module = "google.golang.org/protobuf", + version = "v1.25.0", + # We must tell Please that :protobuf_go depends on :cmp so we can link to it. + deps = [":cmp"], ) -``` - -2) Update your `.plzconfig`: -``` -[Plugin "go"] -Target = //plugins:go -ModFile = //:gomod -``` - -This lets Puku use standard `go get` to resolve modules, then sync them into `third_party/go/BUILD`. - -### Configuring the PATH for Go - -By default, Please looks for Go in the following locations: -``` -/usr/local/bin:/usr/bin:/bin -``` -If you installed Go elsewhere (e.g., via Homebrew on macOS, or a custom location), you must configure the path in `.plzconfig`. - -First, find where your Go binary is located: -```bash -which go -``` - -Then add the path to `.plzconfig`. For example, if Go is at `/opt/homebrew/bin/go`: - -```ini -[Build] -Path = /opt/homebrew/bin:/usr/local/bin:/usr/bin:/bin -``` - -Or if it's at `/usr/local/go/bin/go`: - -```ini -[Build] -Path = /usr/local/go/bin:/usr/local/bin:/usr/bin:/bin +go_module( + name = "cmp", + install = ["cmp/..."], + module = "github.com/google/go-cmp", + version = "v0.5.5", +) ``` -**Note:** On Windows, use `where.exe go` to find the Go installation path. - -### Installing the Go standard library (Go 1.20+) +### A note on install +We talk about installing a package. This nomenclature comes from `go install` which would compile a package and +install it in the go path. In Please terms, this means compiling and storing the result in `plz-out`. We're not +installing anything system wide. -From Go version 1.20 onwards, the standard library is no longer included by default with the Go distribution. You must install it manually: +The install list can contain exact packages, or could contain wildcards: -```bash -GODEBUG="installgoroot=all" go install std +### `third_party/go/BUILD` +```python +go_module( + name = "module", + module = "example.com/some/module", + version = "v1.0.0", + install = [ + ".", # Refers to the root package of the module. This is the default if no install list is provided. + "...", # Refers to everything in the module + "foo/...", # installs example.com/some/module/foo and everything under it + "foo/bar", # installs example.com/some/module/foo/bar only + ] +) ``` -## Adding and updating modules +## go_mod_download() Duration: 5 -Let's add a new third-party dependency using `go get` and sync it with Puku. - -### Adding a new module +For most modules, you can get away with compiling them in one pass. Sometimes it can be useful to split this out into +separate rules. There are many reasons to do this, for example: to resolve cyclic dependencies; download from a fork +of a repo; or to vendor a module. -First, let's create a simple Go program that uses a third-party library. Create a file `src/hello/hello.go`: +Another common case is when modules have a `main` package but can also act as a library. One example of this is +`github.com/golang/protobuf` which contains the protobuf library, as well as the protoc plugin for go. We might want to +have a binary rule for the protoc plugin, so we can refer to that in our proto config in our `.plzconfig`. -```go -package main +To do this, we create a `go_mod_download()` rule that will download our sources for us: -import ( - "fmt" - "github.com/google/uuid" +### `third_party/go/BUILD` +```python +go_mod_download( + name = "protobuf_download", + module = "github.com/golang/protobuf", + version = "v1.4.3", ) - -func main() { - id := uuid.New() - fmt.Printf("Generated UUID: %s\n", id.String()) -} ``` -Now add the dependency with `go get`: - -```bash -GOTOOLCHAIN=local go get github.com/google/uuid -``` - -Sync the changes to `third_party/go/BUILD`: - -```bash -plz puku sync -w -``` - -This creates a `go_repo()` rule in `third_party/go/BUILD` for the `uuid` module. You may need to create the `third_party/go/BUILD` file if it doesn't exist. - -### Creating the BUILD file - -Create `src/hello/BUILD`: - +We can then create a rule to compile the library like so: ```python -go_binary( - name = "hello", - srcs = ["hello.go"], +go_module( + name = "protobuf", + # Depend on our download rule instead of providing a version + download = ":protobuf_download", + install = ["..."], + module = "github.com/golang/protobuf", + # Let's skip compiling this package which as we're compiling this separately. + strip = ["protoc-gen-go"], + deps = [":protobuf_download"], ) ``` -Now let Puku automatically add the dependency: - -```bash -plz puku fmt //src/hello -``` - -Puku will update your BUILD file to include the dependency on `//third_party/go:google-uuid` (or the subrepo format). - -Build and run your program: - -```bash -plz run //src/hello -``` - -### Updating an existing module - -To update a module to a specific version: - -```bash -GOTOOLCHAIN=local go get github.com/google/uuid@v1.6.0 -plz puku sync -w -``` - -To update to the latest version: - -```bash -GOTOOLCHAIN=local go get -u github.com/google/uuid -plz puku sync -w -``` - -After syncing, rebuild your targets to use the updated version. - -### Troubleshooting - -**Missing import error?** If you see `could not import ... (open : no such file or directory)`, the module providing that package is missing. Add it with: - -```bash -go get -plz puku sync -w -``` - -**Missing subrepo error?** If you see `Subrepo ... is not defined`, you need to add or migrate the module: - -```bash -go get -plz puku sync -w -``` - -## Stop a module from updating -Duration: 3 - -Sometimes you need to prevent a module from being updated due to breaking changes or compatibility issues. - -### Excluding a specific version - -Use the `exclude` directive to prevent a specific version from being used: - -```bash -go mod edit -exclude github.com/example/module@v2.0.0 -plz puku sync -w -``` - -This prevents version `v2.0.0` from being selected. Go will use the next highest non-excluded version. - -To remove an exclusion: - -```bash -go mod edit -dropexclude github.com/example/module@v2.0.0 -plz puku sync -w -``` - -### Pinning to a specific version - -Use the `replace` directive to pin a module to a specific version: - -```bash -go mod edit -replace github.com/example/module=github.com/example/module@v1.5.0 -plz puku sync -w -``` - -This pins the module to `v1.5.0` regardless of what other dependencies require. - -To unpin (and upgrade at the same time): - -```bash -go mod edit -dropreplace github.com/example/module -go get -u github.com/example/module -plz puku sync -w +And then compile the main package under `github.com/golang/protobuf/protoc-gen-go` like so: +```python +go_module( + name = "protoc-gen-go", + # Mark this as binary so Please knows it can be executed + binary = True, + # Depend on our download rule instead of providing a version + download = ":protobuf_download", + install = ["protoc-gen-go"], + module = "github.com/golang/protobuf", + deps = [":protobuf_go"], +) ``` -**Warning:** Pinning modules can cause compatibility issues with other dependencies. Use sparingly and resolve as soon as possible. - -### Example scenario +## Resolving cyclic dependencies +Duration: 5 -Let's say a new version of `uuid` has a breaking change. Pin it to a working version: +While go packages can't be cyclically dependent on each other, go modules can. For the most part, this is considered +bad practice and is quite rare, however the `google.golang.org/grpc` and `google.golang.org/genproto` modules are one +such example. -```bash -go mod edit -replace github.com/google/uuid=github.com/google/uuid@v1.3.0 -plz puku sync -w -plz build //src/hello -``` +In order to solve this, we need to figure out what parts of the modules actually depend on each other. We can then +download that module and compile these two parts separately. We will use `go_mod_download()` to achieve this. -## Removing modules -Duration: 3 +N.B. To run a gRPC service written in go, you will have to install almost all of `google.golang.org/grpc`. For the sake +of brevity, this example only install the subset that `google.golang.org/genproto` needs. You may want to complete this +by adding `go_module()` rules for the rest of the modules `google.golang.org/grpc` depends on. -Before removing a module, ensure it's not used anywhere in your codebase. +### Installing gRPC's deps `third_party/go/BUILD` +First we must install the dependencies of `google.golang.org/grpc`: +```python +go_module( + name = "xsys", + module = "golang.org/x/sys", + install = ["..."], + version = "v0.0.0-20210415045647-66c3f260301c", +) -### Steps to remove a module +go_module( + name = "net", + install = ["..."], + module = "golang.org/x/net", + version = "136a25c244d3019482a795d728110278d6ba09a4", + deps = [ + ":crypto", + ":text", + ], +) -1. **Verify no dependencies exist:** +go_module( + name = "text", + install = [ + "secure/...", + "unicode/...", + "transform", + "encoding/...", + ], + module = "golang.org/x/text", + version = "v0.3.5", +) -```bash -plz query revdeps //third_party/go:module_name --level=-1 | grep -v //third_party/go +go_module( + name = "crypto", + install = [ + "ssh/terminal", + "cast5", + ], + module = "golang.org/x/crypto", + version = "7b85b097bf7527677d54d3220065e966a0e3b613", +) ``` -If this returns no results, the module is safe to remove. - -2. **Remove the `go_repo()` target from `third_party/go/BUILD`:** - -Open `third_party/go/BUILD` and delete the corresponding `go_repo()` rule. - -3. **Remove from `go.mod` and `go.sum`:** +### Finding out what gRPC needs `third_party/go/BUILD` -```bash -go mod edit -droprequire github.com/example/module -go mod tidy +Next let's try and compile gRPC. We know it has a dependency on some of genproto, but let's set that aside for now: +```python +go_module( + name = "grpc", + module = "google.golang.org/grpc", + version = "v1.34.0", + # Installing just a subset of stuff to reduce the complexity of this example. You may want to just install "...", + # and add the rest of the dependencies. + install = [ + ".", + "codes", + "status", + ], + deps = [ + # ":genproto", + ":cmp", + ":protobuf", + ":xsys", + ":net", + ":protobuf_go", + ], +) ``` -4. **Sync the changes:** - -```bash -plz puku sync -w +If we attempt to compile this, we will get an exception along the lines of: ``` - -**Note:** Puku does not currently automate module removal, so this process is manual. - -### Example - -Let's say we want to remove an unused module: - -```bash -# Check for dependencies -plz query revdeps //third_party/go:unused_module --level=-1 | grep -v //third_party/go - -# If safe, remove from go.mod -go mod edit -droprequire github.com/unused/module -go mod tidy - -# Manually delete the go_repo() rule from third_party/go/BUILD -# Then sync -plz puku sync -w +google.golang.org/grpc/internal/status/status.go, line 36, column 2: can't find import: "google.golang.org/genproto/googleapis/rpc/status" ``` -## Using new modules -Duration: 4 - -Once you've added a module with `go get` and `plz puku sync`, you can use it in your code. - -### Automatic dependency management - -The easiest way is to let Puku handle dependencies automatically: - -1. Import the package in your `.go` file -2. Run `plz puku fmt //your/package` - -Puku will parse your imports and add the necessary dependencies to your BUILD file. - -### Manual dependency specification - -There are two ways to specify dependencies on third-party packages: +So let's add `google.golang.org/genproto/googleapis/rpc/...` as a dependency: +```python +go_mod_download( + name = "genproto_download", + module = "google.golang.org/genproto", + version = "v0.0.0-20210315173758-2651cd453018", +) -**1. Subrepo convention (recommended):** +go_module( + name = "genproto_rpc", + download = ":genproto_download", + install = [ + "googleapis/rpc/...", + ], + module = "google.golang.org/genproto", + deps = [ + ":protobuf", + ], +) -```python -go_library( - name = "mylib", - srcs = ["mylib.go"], +go_module( + name = "genproto_api", + download = ":genproto_download", + install = [ + "googleapis/api/...", + ], + module = "google.golang.org/genproto", deps = [ - "///third_party/go/github.com_google_uuid//", + ":grpc", + ":protobuf", ], ) ``` -The subrepo format is: `///third_party/go///` - -**2. Install list (go_module style):** - -Add packages to the `install` list on the `go_repo()` target: - +And update our `:grpc` rule to add `:genproto_rpc` as a dependency: ```python -go_repo( - name = "google-uuid", - module = "github.com/google/uuid", - version = "v1.6.0", - install = ["."], # Installs the root package +go_module( + name = "grpc", + module = "google.golang.org/grpc", + version = "v1.34.0", + # Installing just a subset of stuff to reduce the complexity of this example. You may want to just install "...", + # and add the rest of the dependencies. + install = [ + ".", + "codes", + "status", + ], + deps = [ + ":genproto_rpc", + ":cmp", + ":protobuf", + ":xsys", + ":net", + ":protobuf_go", + ], ) ``` -Then depend on it like: +And if we compile that with `plz build //third_party/go:grpc //third_party/go:genproto_api` we should see they build +now. + +## Using third party libraries +Third party dependencies can be depended on in the same way as `go_library()` rules: +### `third_party/go/BUILD` ```python go_library( - name = "mylib", - srcs = ["mylib.go"], - deps = ["//third_party/go:google-uuid"], + name = "service", + srcs = ["service.go"], + deps = ["//third_party/go:net"], ) ``` -### Watch mode - -For active development, use watch mode to automatically update BUILD files as you code: - -```bash -plz puku watch //src/... -``` - -This watches for changes to `.go` files and updates dependencies automatically. - -### Best practices - -- Use `plz puku fmt` to keep dependencies up to date -- Use the subrepo format for better build incrementality -- Review changes before committing to avoid unexpected version changes -- Run `plz test` after adding/updating dependencies to catch issues early +For more information on writing go code with Please, check out the [go](/codelabs/go_intro) codelab. ## What's next? Duration: 1 -Congratulations! You now know how to manage Go third-party dependencies using `go get` and Puku. - -### Learn more +Hopefully you now have an idea as to how to build Go modules with Please. Please is capable of so much more though! -- [Puku GitHub repository](https://github.com/please-build/puku) - Complete Puku reference -- [Please basics](/basics.html) - A more general introduction to Please. It covers a lot of what we have in this tutorial in more detail. -- [Go plugin rules](/plugins.html#go) - See the rest of the Go plugin rules and config. +- [Please basics](/basics.html) - A more general introduction to Please. It covers a lot of what we have in this +tutorial in more detail. +- [go plugin rules](/plugins.html#go) - See the rest of the Go plugin rules and config. - [Built-in rules](/lexicon.html#go) - See the rest of the built in rules. - [Config](/config.html) - See the available config options for Please. -- [Command line interface](/commands.html) - Please has a powerful command line interface. Interrogate the build graph, determine file changes since master, watch rules and build them automatically as things change, and much more! Use `plz help`, and explore this rich set of commands! +- [Command line interface](/commands.html) - Please has a powerful command line interface. Interrogate the build graph, +determine file changes since master, watch rules and build them automatically as things change, and much more! Use +`plz help`, and explore this rich set of commands! Otherwise, why not try one of the other codelabs! From 839de559b29241e13778434922c5ee27b37114be Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Mon, 10 Nov 2025 06:28:54 +0000 Subject: [PATCH 36/38] change format --- docs/codelabs/k8s.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/codelabs/k8s.md b/docs/codelabs/k8s.md index 85bd922c3..ea268fb47 100644 --- a/docs/codelabs/k8s.md +++ b/docs/codelabs/k8s.md @@ -107,8 +107,7 @@ go_binary( And test it works: ```bash -plz run //hello_service:hello_service & -curl localhost:8000 +plz run //hello_service:hello_service & curl localhost:8000 pkill hello_service ``` From f7ccaf429b2b2e0fc23d6425f44eebf041ee300f Mon Sep 17 00:00:00 2001 From: Ubuntu Date: Mon, 10 Nov 2025 07:59:36 +0000 Subject: [PATCH 37/38] fix typo --- docs/codelabs/k8s.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/codelabs/k8s.md b/docs/codelabs/k8s.md index ea268fb47..7c33439b4 100644 --- a/docs/codelabs/k8s.md +++ b/docs/codelabs/k8s.md @@ -107,7 +107,7 @@ go_binary( And test it works: ```bash -plz run //hello_service:hello_service & curl localhost:8000 +plz run //hello_service:hello_service && curl localhost:8000 pkill hello_service ``` @@ -380,7 +380,7 @@ plz run //hello_service/k8s:image_load && plz run //hello_service/k8s:k8s_push And check they're working as we expected: ``` -$ kubectl port-forward service/hello-svc 8000:8000 & curl localhost:8000 +$ kubectl port-forward service/hello-svc 8000:8000 && curl localhost:8000 [1] 25986 Hello world! From 123c45a650ad9da0f7404416793f89ddeac72edd Mon Sep 17 00:00:00 2001 From: "Xiao, Yujiao" Date: Wed, 18 Feb 2026 10:11:55 +0000 Subject: [PATCH 38/38] Remove redundant text from Python codelab conclusion --- docs/codelabs/python_intro.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/docs/codelabs/python_intro.md b/docs/codelabs/python_intro.md index b438e33c2..bb1d1882b 100644 --- a/docs/codelabs/python_intro.md +++ b/docs/codelabs/python_intro.md @@ -349,7 +349,3 @@ determine files changes since master, watch rules and build them automatically a `plz help`, and explore this rich set of commands! Otherwise, why not try one of the other codelabs! -, watch rules and build them automatically as things change and much more! Use -`plz help`, and explore this rich set of commands! - -Otherwise, why not try one of the other codelabs!