diff --git a/databuild/rules.bzl b/databuild/rules.bzl index 05cf69d..8471a22 100644 --- a/databuild/rules.bzl +++ b/databuild/rules.bzl @@ -295,6 +295,8 @@ def databuild_graph(name, jobs, lookup, visibility = None): _databuild_graph_service( name = "%s.service" % name, lookup = "%s.lookup" % name, + analyze = "%s.analyze" % name, + exec = "%s.exec" % name, jobs = jobs, graph_label = "//%s:%s" % (native.package_name(), name), visibility = visibility, @@ -699,30 +701,53 @@ def _databuild_graph_service_impl(ctx): env_setup = """ export DATABUILD_CANDIDATE_JOBS="{candidate_jobs}" export DATABUILD_JOB_LOOKUP_PATH=$(rlocation _main/{lookup_path}) - -# Default service arguments (can be overridden by user) -DEFAULT_ARGS="--port {port} --event-log {db} --graph-label {graph_label} --job-lookup-path $(rlocation _main/{lookup_path})" - -# Use user args if provided, otherwise use defaults -if [ "$#" -eq 0 ]; then - set -- $DEFAULT_ARGS -fi +export DATABUILD_ANALYZE_BINARY=$(rlocation _main/{analyze_path}) +export DATABUILD_EXECUTE_BINARY=$(rlocation _main/{exec_path}) """.format( candidate_jobs = config_paths_str, lookup_path = ctx.attr.lookup.files_to_run.executable.short_path, - port = default_port, - db = default_db, - graph_label = ctx.attr.graph_label, + analyze_path = ctx.attr.analyze.files_to_run.executable.short_path, + exec_path = ctx.attr.exec.files_to_run.executable.short_path, ) - ctx.actions.expand_template( - template = ctx.file._template, + # Generate a custom script instead of using the template to handle the external binary correctly + script_content = RUNFILES_PREFIX + env_setup + """ +EXECUTABLE_BINARY="$(rlocation "databuild+/databuild/build_graph_service")" + +# Always pass graph-specific configuration, allow user args to override defaults like port/host +# Graph-specific args that should always be set: +GRAPH_ARGS=( + "--graph-label" "{graph_label}" + "--job-lookup-path" "$(rlocation _main/{lookup_path})" + "--event-log" "{db}" +) + +# Add default port if no port specified in user args +if [[ ! "$*" =~ --port ]]; then + GRAPH_ARGS+=("--port" "{port}") +fi + +# Add default host if no host specified in user args +if [[ ! "$*" =~ --host ]]; then + GRAPH_ARGS+=("--host" "0.0.0.0") +fi + +# Run the service with graph-specific args + user args +if [[ -n "${{EXECUTABLE_SUBCOMMAND:-}}" ]]; then + exec "${{EXECUTABLE_BINARY}}" "${{EXECUTABLE_SUBCOMMAND}}" "${{GRAPH_ARGS[@]}}" "$@" +else + exec "${{EXECUTABLE_BINARY}}" "${{GRAPH_ARGS[@]}}" "$@" +fi +""".format( + graph_label = ctx.attr.graph_label, + lookup_path = ctx.attr.lookup.files_to_run.executable.short_path, + db = default_db, + port = default_port, + ) + + ctx.actions.write( output = script, - substitutions = { - "%{EXECUTABLE_PATH}": ctx.attr._service.files_to_run.executable.path, - "%{RUNFILES_PREFIX}": RUNFILES_PREFIX, - "%{PREFIX}": env_setup, - }, + content = script_content, is_executable = True, ) @@ -733,8 +758,9 @@ fi ] runfiles = ctx.runfiles( - files = [ctx.executable.lookup, ctx.executable._service] + configure_executables, + files = [ctx.executable.lookup, ctx.executable._service, ctx.executable.analyze, ctx.executable.exec] + configure_executables, ).merge(ctx.attr.lookup.default_runfiles).merge(ctx.attr._service.default_runfiles).merge( + ctx.attr.analyze.default_runfiles).merge(ctx.attr.exec.default_runfiles).merge( ctx.attr._bash_runfiles.default_runfiles ).merge_all([job.default_runfiles for job in ctx.attr.jobs]) @@ -763,6 +789,18 @@ _databuild_graph_service = rule( doc = "The list of jobs that are candidates for building partitions in this databuild graph", allow_empty = False, ), + "analyze": attr.label( + doc = "Target that implements the graph analysis logic", + mandatory = True, + executable = True, + cfg = "target", + ), + "exec": attr.label( + doc = "Target that implements the graph execution logic", + mandatory = True, + executable = True, + cfg = "target", + ), "graph_label": attr.string( doc = "The label of this graph for service identification", mandatory = True,