Big update
This commit is contained in:
parent
5403d2ebfc
commit
db993ca747
29 changed files with 7543 additions and 111 deletions
9
.bazelproject
Normal file
9
.bazelproject
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
|
||||
directories:
|
||||
./
|
||||
|
||||
targets:
|
||||
//runtime/...
|
||||
//:all
|
||||
//job/...
|
||||
//graph/...
|
||||
|
|
@ -1,3 +1,5 @@
|
|||
load("@rules_python//python:pip.bzl", "compile_pip_requirements")
|
||||
|
||||
filegroup(
|
||||
name = "jq",
|
||||
srcs = ["//runtime:jq"],
|
||||
|
|
@ -9,3 +11,9 @@ filegroup(
|
|||
srcs = ["databuild.schema.json"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
compile_pip_requirements(
|
||||
name = "py_requirements",
|
||||
src = "requirements.in",
|
||||
requirements_txt = "requirements_lock.txt",
|
||||
)
|
||||
|
|
|
|||
18
MODULE.bazel
18
MODULE.bazel
|
|
@ -6,3 +6,21 @@ module(
|
|||
bazel_dep(name = "bazel_skylib", version = "1.7.1")
|
||||
bazel_dep(name = "platforms", version = "0.0.11")
|
||||
bazel_dep(name = "rules_shell", version = "0.4.0")
|
||||
|
||||
bazel_dep(name = "rules_go", version = "0.46.0")
|
||||
|
||||
bazel_dep(name = "rules_python", version = "1.3.0")
|
||||
|
||||
python = use_extension("@rules_python//python/extensions:python.bzl", "python")
|
||||
python.toolchain(
|
||||
python_version = "3.13",
|
||||
)
|
||||
|
||||
pip = use_extension("@rules_python//python/extensions:pip.bzl", "pip")
|
||||
pip.parse(
|
||||
hub_name = "pypi",
|
||||
python_version = "3.13",
|
||||
requirements_lock = "//:requirements_lock.txt",
|
||||
)
|
||||
|
||||
use_repo(pip, "pypi")
|
||||
|
|
|
|||
3458
MODULE.bazel.lock
3458
MODULE.bazel.lock
File diff suppressed because one or more lines are too long
|
|
@ -4,17 +4,17 @@ load("@rules_java//java:defs.bzl", "java_binary")
|
|||
databuild_graph(
|
||||
name = "basic_graph",
|
||||
jobs = [
|
||||
":generate_number_job",
|
||||
":sum_job",
|
||||
"//:generate_number_job",
|
||||
"//:sum_job",
|
||||
],
|
||||
plan = ":basic_graph_plan",
|
||||
lookup = ":job_lookup",
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
py_binary(
|
||||
name = "basic_graph_plan",
|
||||
srcs = ["basic_graph.py"],
|
||||
main = "basic_graph.py",
|
||||
name = "job_lookup",
|
||||
srcs = ["job_lookup.py"],
|
||||
main = "job_lookup.py",
|
||||
)
|
||||
|
||||
databuild_job(
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ public class GenerateExecute {
|
|||
System.out.println("Generated random number " + randomNumber + " for partition " + partitionRef);
|
||||
|
||||
// Write the random number to the output file
|
||||
String outputPath = BASE_PATH + partitionRef;
|
||||
String outputPath = BASE_PATH + "generated_number/" + partitionRef;
|
||||
System.out.println("Writing random number " + randomNumber + " to " + outputPath);
|
||||
// Ensure dir exists
|
||||
new java.io.File(outputPath).getParentFile().mkdirs();
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -27,7 +27,7 @@ public class SumExecute {
|
|||
}
|
||||
System.out.println("Sum of " + args.length + " partitions: " + sum);
|
||||
// Write the sum to the output file
|
||||
String outPath = BASE_PATH + outputRef;
|
||||
String outPath = BASE_PATH + "sum/" + outputRef;
|
||||
System.out.println("Writing sum " + sum + " to " + outPath);
|
||||
try (java.io.FileWriter writer = new java.io.FileWriter(outPath)) {
|
||||
writer.write(String.valueOf(sum));
|
||||
|
|
|
|||
27
examples/basic_graph/job_lookup.py
Normal file
27
examples/basic_graph/job_lookup.py
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
|
||||
import sys
|
||||
import json
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
def main():
|
||||
output_refs = sys.argv[1:]
|
||||
assert len(output_refs) > 0, "Need at least 1 ref to lookup"
|
||||
|
||||
result = defaultdict(list)
|
||||
|
||||
# Partition output prefix makes it obvious which job should fulfill
|
||||
for ref in output_refs:
|
||||
body, tail = ref.rsplit("/", 1)
|
||||
if "generated_number" in body:
|
||||
result["//:generate_number_job"].append(ref)
|
||||
elif "sum" in body:
|
||||
result["//:sum_job"].append(ref)
|
||||
else:
|
||||
raise ValueError(f"No job found for ref `{ref}`")
|
||||
|
||||
print(json.dumps({k: v for k, v in result.items() if v}))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -15,3 +15,12 @@ sh_test(
|
|||
"//:sum_job.exec",
|
||||
],
|
||||
)
|
||||
|
||||
sh_test(
|
||||
name = "graph_test",
|
||||
srcs = ["graph_test.sh"],
|
||||
data = [
|
||||
"//:basic_graph.lookup",
|
||||
"//:basic_graph.analyze",
|
||||
],
|
||||
)
|
||||
|
|
|
|||
|
|
@ -6,9 +6,9 @@ generate_number_job.cfg pippin salem sadie
|
|||
# Test run
|
||||
generate_number_job.cfg pippin | generate_number_job.exec
|
||||
# Validate that contents of pippin is 43
|
||||
if [[ "$(cat /tmp/databuild/examples/basic_graph/pippin)" != "43" ]]; then
|
||||
if [[ "$(cat /tmp/databuild/examples/basic_graph/generated_number/pippin)" != "43" ]]; then
|
||||
echo "Assertion failed: File does not contain 43"
|
||||
cat /tmp/databuild/examples/basic_graph/pippin
|
||||
cat /tmp/databuild/examples/basic_graph/generated_number/pippin
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
|
|
|||
5
examples/basic_graph/test/graph_test.sh
Executable file
5
examples/basic_graph/test/graph_test.sh
Executable file
|
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
basic_graph.lookup /tmp/databuild/examples/basic_graph/generated_number/pippin_salem_sadie
|
||||
|
||||
basic_graph.analyze /tmp/databuild/examples/basic_graph/generated_number/pippin_salem_sadie
|
||||
|
|
@ -4,14 +4,14 @@
|
|||
sum_job.cfg pippin_salem_sadie
|
||||
|
||||
# Test run
|
||||
echo -n 43 > /tmp/databuild/examples/basic_graph/pippin
|
||||
echo -n 56 > /tmp/databuild/examples/basic_graph/salem
|
||||
echo -n 40 > /tmp/databuild/examples/basic_graph/sadie
|
||||
echo -n 43 > /tmp/databuild/examples/basic_graph/generated_number/pippin
|
||||
echo -n 56 > /tmp/databuild/examples/basic_graph/generated_number/salem
|
||||
echo -n 40 > /tmp/databuild/examples/basic_graph/generated_number/sadie
|
||||
sum_job.cfg pippin_salem_sadie | sum_job.exec
|
||||
# Validate that contents of pippin is 43
|
||||
if [[ "$(cat /tmp/databuild/examples/basic_graph/pippin_salem_sadie)" != "139" ]]; then
|
||||
echo "Assertion failed: File does not contain 139"
|
||||
cat /tmp/databuild/examples/basic_graph/pippin_salem_sadie
|
||||
cat /tmp/databuild/examples/basic_graph/sum/pippin_salem_sadie
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@
|
|||
"https://bcr.bazel.build/modules/abseil-cpp/20230802.1/MODULE.bazel": "fa92e2eb41a04df73cdabeec37107316f7e5272650f81d6cc096418fe647b915",
|
||||
"https://bcr.bazel.build/modules/abseil-cpp/20240116.1/MODULE.bazel": "37bcdb4440fbb61df6a1c296ae01b327f19e9bb521f9b8e26ec854b6f97309ed",
|
||||
"https://bcr.bazel.build/modules/abseil-cpp/20240116.1/source.json": "9be551b8d4e3ef76875c0d744b5d6a504a27e3ae67bc6b28f46415fd2d2957da",
|
||||
"https://bcr.bazel.build/modules/bazel_features/1.1.0/MODULE.bazel": "cfd42ff3b815a5f39554d97182657f8c4b9719568eb7fded2b9135f084bf760b",
|
||||
"https://bcr.bazel.build/modules/bazel_features/1.1.1/MODULE.bazel": "27b8c79ef57efe08efccbd9dd6ef70d61b4798320b8d3c134fd571f78963dbcd",
|
||||
"https://bcr.bazel.build/modules/bazel_features/1.11.0/MODULE.bazel": "f9382337dd5a474c3b7d334c2f83e50b6eaedc284253334cf823044a26de03e8",
|
||||
"https://bcr.bazel.build/modules/bazel_features/1.15.0/MODULE.bazel": "d38ff6e517149dc509406aca0db3ad1efdd890a85e049585b7234d04238e2a4d",
|
||||
|
|
@ -34,6 +35,10 @@
|
|||
"https://bcr.bazel.build/modules/bazel_skylib/1.7.1/source.json": "f121b43eeefc7c29efbd51b83d08631e2347297c95aac9764a701f2a6a2bb953",
|
||||
"https://bcr.bazel.build/modules/buildozer/7.1.2/MODULE.bazel": "2e8dd40ede9c454042645fd8d8d0cd1527966aa5c919de86661e62953cd73d84",
|
||||
"https://bcr.bazel.build/modules/buildozer/7.1.2/source.json": "c9028a501d2db85793a6996205c8de120944f50a0d570438fcae0457a5f9d1f8",
|
||||
"https://bcr.bazel.build/modules/gazelle/0.32.0/MODULE.bazel": "b499f58a5d0d3537f3cf5b76d8ada18242f64ec474d8391247438bf04f58c7b8",
|
||||
"https://bcr.bazel.build/modules/gazelle/0.33.0/MODULE.bazel": "a13a0f279b462b784fb8dd52a4074526c4a2afe70e114c7d09066097a46b3350",
|
||||
"https://bcr.bazel.build/modules/gazelle/0.34.0/MODULE.bazel": "abdd8ce4d70978933209db92e436deb3a8b737859e9354fb5fd11fb5c2004c8a",
|
||||
"https://bcr.bazel.build/modules/gazelle/0.34.0/source.json": "cdf0182297e3adabbdea2da88d5b930b2ee5e56511c3e7d6512069db6315a1f7",
|
||||
"https://bcr.bazel.build/modules/google_benchmark/1.8.2/MODULE.bazel": "a70cf1bba851000ba93b58ae2f6d76490a9feb74192e57ab8e8ff13c34ec50cb",
|
||||
"https://bcr.bazel.build/modules/googletest/1.11.0/MODULE.bazel": "3a83f095183f66345ca86aa13c58b59f9f94a2f81999c093d4eeaa2d262d12f4",
|
||||
"https://bcr.bazel.build/modules/googletest/1.14.0.bcr.1/MODULE.bazel": "22c31a561553727960057361aa33bf20fb2e98584bc4fec007906e27053f80c6",
|
||||
|
|
@ -54,9 +59,12 @@
|
|||
"https://bcr.bazel.build/modules/protobuf/27.0/MODULE.bazel": "7873b60be88844a0a1d8f80b9d5d20cfbd8495a689b8763e76c6372998d3f64c",
|
||||
"https://bcr.bazel.build/modules/protobuf/27.1/MODULE.bazel": "703a7b614728bb06647f965264967a8ef1c39e09e8f167b3ca0bb1fd80449c0d",
|
||||
"https://bcr.bazel.build/modules/protobuf/29.0-rc2/MODULE.bazel": "6241d35983510143049943fc0d57937937122baf1b287862f9dc8590fc4c37df",
|
||||
"https://bcr.bazel.build/modules/protobuf/29.0-rc3/MODULE.bazel": "33c2dfa286578573afc55a7acaea3cada4122b9631007c594bf0729f41c8de92",
|
||||
"https://bcr.bazel.build/modules/protobuf/29.0/MODULE.bazel": "319dc8bf4c679ff87e71b1ccfb5a6e90a6dbc4693501d471f48662ac46d04e4e",
|
||||
"https://bcr.bazel.build/modules/protobuf/29.0/source.json": "b857f93c796750eef95f0d61ee378f3420d00ee1dd38627b27193aa482f4f981",
|
||||
"https://bcr.bazel.build/modules/protobuf/3.19.0/MODULE.bazel": "6b5fbb433f760a99a22b18b6850ed5784ef0e9928a72668b66e4d7ccd47db9b0",
|
||||
"https://bcr.bazel.build/modules/protobuf/3.19.2/MODULE.bazel": "532ffe5f2186b69fdde039efe6df13ba726ff338c6bc82275ad433013fa10573",
|
||||
"https://bcr.bazel.build/modules/protobuf/3.19.6/MODULE.bazel": "9233edc5e1f2ee276a60de3eaa47ac4132302ef9643238f23128fea53ea12858",
|
||||
"https://bcr.bazel.build/modules/pybind11_bazel/2.11.1/MODULE.bazel": "88af1c246226d87e65be78ed49ecd1e6f5e98648558c14ce99176da041dc378e",
|
||||
"https://bcr.bazel.build/modules/pybind11_bazel/2.11.1/source.json": "be4789e951dd5301282729fe3d4938995dc4c1a81c2ff150afc9f1b0504c6022",
|
||||
"https://bcr.bazel.build/modules/re2/2023-09-01/MODULE.bazel": "cb3d511531b16cfc78a225a9e2136007a48cf8a677e4264baeab57fe78a80206",
|
||||
|
|
@ -78,6 +86,10 @@
|
|||
"https://bcr.bazel.build/modules/rules_foreign_cc/0.9.0/MODULE.bazel": "c9e8c682bf75b0e7c704166d79b599f93b72cfca5ad7477df596947891feeef6",
|
||||
"https://bcr.bazel.build/modules/rules_fuzzing/0.5.2/MODULE.bazel": "40c97d1144356f52905566c55811f13b299453a14ac7769dfba2ac38192337a8",
|
||||
"https://bcr.bazel.build/modules/rules_fuzzing/0.5.2/source.json": "c8b1e2c717646f1702290959a3302a178fb639d987ab61d548105019f11e527e",
|
||||
"https://bcr.bazel.build/modules/rules_go/0.41.0/MODULE.bazel": "55861d8e8bb0e62cbd2896f60ff303f62ffcb0eddb74ecb0e5c0cbe36fc292c8",
|
||||
"https://bcr.bazel.build/modules/rules_go/0.42.0/MODULE.bazel": "8cfa875b9aa8c6fce2b2e5925e73c1388173ea3c32a0db4d2b4804b453c14270",
|
||||
"https://bcr.bazel.build/modules/rules_go/0.46.0/MODULE.bazel": "3477df8bdcc49e698b9d25f734c4f3a9f5931ff34ee48a2c662be168f5f2d3fd",
|
||||
"https://bcr.bazel.build/modules/rules_go/0.46.0/source.json": "fbf0e50e8ed487272e5c0977c0b67c74cbe97e1880b45bbeff44a3338dc8a08e",
|
||||
"https://bcr.bazel.build/modules/rules_java/4.0.0/MODULE.bazel": "5a78a7ae82cd1a33cef56dc578c7d2a46ed0dca12643ee45edbb8417899e6f74",
|
||||
"https://bcr.bazel.build/modules/rules_java/5.3.5/MODULE.bazel": "a4ec4f2db570171e3e5eb753276ee4b389bae16b96207e9d3230895c99644b86",
|
||||
"https://bcr.bazel.build/modules/rules_java/6.0.0/MODULE.bazel": "8a43b7df601a7ec1af61d79345c17b31ea1fedc6711fd4abfd013ea612978e39",
|
||||
|
|
@ -90,6 +102,8 @@
|
|||
"https://bcr.bazel.build/modules/rules_java/7.6.1/MODULE.bazel": "2f14b7e8a1aa2f67ae92bc69d1ec0fa8d9f827c4e17ff5e5f02e91caa3b2d0fe",
|
||||
"https://bcr.bazel.build/modules/rules_java/8.11.0/MODULE.bazel": "c3d280bc5ff1038dcb3bacb95d3f6b83da8dd27bba57820ec89ea4085da767ad",
|
||||
"https://bcr.bazel.build/modules/rules_java/8.11.0/source.json": "302b52a39259a85aa06ca3addb9787864ca3e03b432a5f964ea68244397e7544",
|
||||
"https://bcr.bazel.build/modules/rules_java/8.3.2/MODULE.bazel": "7336d5511ad5af0b8615fdc7477535a2e4e723a357b6713af439fe8cf0195017",
|
||||
"https://bcr.bazel.build/modules/rules_java/8.5.1/MODULE.bazel": "d8a9e38cc5228881f7055a6079f6f7821a073df3744d441978e7a43e20226939",
|
||||
"https://bcr.bazel.build/modules/rules_jvm_external/4.4.2/MODULE.bazel": "a56b85e418c83eb1839819f0b515c431010160383306d13ec21959ac412d2fe7",
|
||||
"https://bcr.bazel.build/modules/rules_jvm_external/5.1/MODULE.bazel": "33f6f999e03183f7d088c9be518a63467dfd0be94a11d0055fe2d210f89aa909",
|
||||
"https://bcr.bazel.build/modules/rules_jvm_external/5.2/MODULE.bazel": "d9351ba35217ad0de03816ef3ed63f89d411349353077348a45348b096615036",
|
||||
|
|
@ -119,7 +133,8 @@
|
|||
"https://bcr.bazel.build/modules/rules_python/0.31.0/MODULE.bazel": "93a43dc47ee570e6ec9f5779b2e64c1476a6ce921c48cc9a1678a91dd5f8fd58",
|
||||
"https://bcr.bazel.build/modules/rules_python/0.4.0/MODULE.bazel": "9208ee05fd48bf09ac60ed269791cf17fb343db56c8226a720fbb1cdf467166c",
|
||||
"https://bcr.bazel.build/modules/rules_python/0.40.0/MODULE.bazel": "9d1a3cd88ed7d8e39583d9ffe56ae8a244f67783ae89b60caafc9f5cf318ada7",
|
||||
"https://bcr.bazel.build/modules/rules_python/0.40.0/source.json": "939d4bd2e3110f27bfb360292986bb79fd8dcefb874358ccd6cdaa7bda029320",
|
||||
"https://bcr.bazel.build/modules/rules_python/1.3.0/MODULE.bazel": "8361d57eafb67c09b75bf4bbe6be360e1b8f4f18118ab48037f2bd50aa2ccb13",
|
||||
"https://bcr.bazel.build/modules/rules_python/1.3.0/source.json": "25932f917cd279c7baefa6cb1d3fa8750a7a29de522024449b19af6eab51f4a0",
|
||||
"https://bcr.bazel.build/modules/rules_shell/0.2.0/MODULE.bazel": "fda8a652ab3c7d8fee214de05e7a9916d8b28082234e8d2c0094505c5268ed3c",
|
||||
"https://bcr.bazel.build/modules/rules_shell/0.4.0/MODULE.bazel": "0f8f11bb3cd11755f0b48c1de0bbcf62b4b34421023aa41a2fc74ef68d9584f0",
|
||||
"https://bcr.bazel.build/modules/rules_shell/0.4.0/source.json": "1d7fa7f941cd41dc2704ba5b4edc2e2230eea1cc600d80bd2b65838204c50b95",
|
||||
|
|
@ -128,15 +143,200 @@
|
|||
"https://bcr.bazel.build/modules/stardoc/0.5.6/MODULE.bazel": "c43dabc564990eeab55e25ed61c07a1aadafe9ece96a4efabb3f8bf9063b71ef",
|
||||
"https://bcr.bazel.build/modules/stardoc/0.7.0/MODULE.bazel": "05e3d6d30c099b6770e97da986c53bd31844d7f13d41412480ea265ac9e8079c",
|
||||
"https://bcr.bazel.build/modules/stardoc/0.7.1/MODULE.bazel": "3548faea4ee5dda5580f9af150e79d0f6aea934fc60c1cc50f4efdd9420759e7",
|
||||
"https://bcr.bazel.build/modules/stardoc/0.7.1/source.json": "b6500ffcd7b48cd72c29bb67bcac781e12701cc0d6d55d266a652583cfcdab01",
|
||||
"https://bcr.bazel.build/modules/stardoc/0.7.2/MODULE.bazel": "fc152419aa2ea0f51c29583fab1e8c99ddefd5b3778421845606ee628629e0e5",
|
||||
"https://bcr.bazel.build/modules/stardoc/0.7.2/source.json": "58b029e5e901d6802967754adf0a9056747e8176f017cfe3607c0851f4d42216",
|
||||
"https://bcr.bazel.build/modules/upb/0.0.0-20220923-a547704/MODULE.bazel": "7298990c00040a0e2f121f6c32544bab27d4452f80d9ce51349b1a28f3005c43",
|
||||
"https://bcr.bazel.build/modules/zlib/1.2.11/MODULE.bazel": "07b389abc85fdbca459b69e2ec656ae5622873af3f845e1c9d80fe179f3effa0",
|
||||
"https://bcr.bazel.build/modules/zlib/1.2.12/MODULE.bazel": "3b1a8834ada2a883674be8cbd36ede1b6ec481477ada359cd2d3ddc562340b27",
|
||||
"https://bcr.bazel.build/modules/zlib/1.3.1.bcr.3/MODULE.bazel": "af322bc08976524477c79d1e45e241b6efbeb918c497e8840b8ab116802dda79",
|
||||
"https://bcr.bazel.build/modules/zlib/1.3.1.bcr.3/source.json": "2be409ac3c7601245958cd4fcdff4288be79ed23bd690b4b951f500d54ee6e7d",
|
||||
"https://bcr.bazel.build/modules/zlib/1.3.1/MODULE.bazel": "751c9940dcfe869f5f7274e1295422a34623555916eb98c174c1e945594bf198"
|
||||
},
|
||||
"selectedYankedVersions": {},
|
||||
"moduleExtensions": {
|
||||
"@@rules_go+//go:extensions.bzl%go_sdk": {
|
||||
"os:osx,arch:aarch64": {
|
||||
"bzlTransitiveDigest": "jBP0cRKOr+A42aPGunoasOD+vrmMLJIJ8Jwi65DdelE=",
|
||||
"usagesDigest": "9WUdtwMNxQMIp54gOxEBVws3WnIUdQcvX9pRfBtrtvQ=",
|
||||
"recordedFileInputs": {},
|
||||
"recordedDirentsInputs": {},
|
||||
"envVariables": {},
|
||||
"generatedRepoSpecs": {
|
||||
"io_bazel_rules_nogo": {
|
||||
"repoRuleId": "@@rules_go+//go/private:nogo.bzl%go_register_nogo",
|
||||
"attributes": {
|
||||
"nogo": "@io_bazel_rules_go//:default_nogo",
|
||||
"includes": [
|
||||
"'@@//:__subpackages__'"
|
||||
],
|
||||
"excludes": []
|
||||
}
|
||||
},
|
||||
"go_default_sdk": {
|
||||
"repoRuleId": "@@rules_go+//go/private:sdk.bzl%go_download_sdk_rule",
|
||||
"attributes": {
|
||||
"goos": "",
|
||||
"goarch": "",
|
||||
"sdks": {},
|
||||
"experiments": [],
|
||||
"patches": [],
|
||||
"patch_strip": 0,
|
||||
"urls": [
|
||||
"https://dl.google.com/go/{}"
|
||||
],
|
||||
"version": "1.21.1",
|
||||
"strip_prefix": "go"
|
||||
}
|
||||
},
|
||||
"rules_go__download_0_darwin_amd64": {
|
||||
"repoRuleId": "@@rules_go+//go/private:sdk.bzl%go_download_sdk_rule",
|
||||
"attributes": {
|
||||
"goos": "",
|
||||
"goarch": "",
|
||||
"sdks": {},
|
||||
"urls": [
|
||||
"https://dl.google.com/go/{}"
|
||||
],
|
||||
"version": "1.21.1"
|
||||
}
|
||||
},
|
||||
"rules_go__download_0_linux_amd64": {
|
||||
"repoRuleId": "@@rules_go+//go/private:sdk.bzl%go_download_sdk_rule",
|
||||
"attributes": {
|
||||
"goos": "",
|
||||
"goarch": "",
|
||||
"sdks": {},
|
||||
"urls": [
|
||||
"https://dl.google.com/go/{}"
|
||||
],
|
||||
"version": "1.21.1"
|
||||
}
|
||||
},
|
||||
"rules_go__download_0_linux_arm64": {
|
||||
"repoRuleId": "@@rules_go+//go/private:sdk.bzl%go_download_sdk_rule",
|
||||
"attributes": {
|
||||
"goos": "",
|
||||
"goarch": "",
|
||||
"sdks": {},
|
||||
"urls": [
|
||||
"https://dl.google.com/go/{}"
|
||||
],
|
||||
"version": "1.21.1"
|
||||
}
|
||||
},
|
||||
"rules_go__download_0_windows_amd64": {
|
||||
"repoRuleId": "@@rules_go+//go/private:sdk.bzl%go_download_sdk_rule",
|
||||
"attributes": {
|
||||
"goos": "",
|
||||
"goarch": "",
|
||||
"sdks": {},
|
||||
"urls": [
|
||||
"https://dl.google.com/go/{}"
|
||||
],
|
||||
"version": "1.21.1"
|
||||
}
|
||||
},
|
||||
"rules_go__download_0_windows_arm64": {
|
||||
"repoRuleId": "@@rules_go+//go/private:sdk.bzl%go_download_sdk_rule",
|
||||
"attributes": {
|
||||
"goos": "",
|
||||
"goarch": "",
|
||||
"sdks": {},
|
||||
"urls": [
|
||||
"https://dl.google.com/go/{}"
|
||||
],
|
||||
"version": "1.21.1"
|
||||
}
|
||||
},
|
||||
"go_host_compatible_sdk_label": {
|
||||
"repoRuleId": "@@rules_go+//go/private:extensions.bzl%host_compatible_toolchain",
|
||||
"attributes": {
|
||||
"toolchain": "@go_default_sdk//:ROOT"
|
||||
}
|
||||
},
|
||||
"go_toolchains": {
|
||||
"repoRuleId": "@@rules_go+//go/private:sdk.bzl%go_multiple_toolchains",
|
||||
"attributes": {
|
||||
"prefixes": [
|
||||
"_0000_go_default_sdk_",
|
||||
"_0001_rules_go__download_0_darwin_amd64_",
|
||||
"_0002_rules_go__download_0_linux_amd64_",
|
||||
"_0003_rules_go__download_0_linux_arm64_",
|
||||
"_0004_rules_go__download_0_windows_amd64_",
|
||||
"_0005_rules_go__download_0_windows_arm64_"
|
||||
],
|
||||
"geese": [
|
||||
"",
|
||||
"darwin",
|
||||
"linux",
|
||||
"linux",
|
||||
"windows",
|
||||
"windows"
|
||||
],
|
||||
"goarchs": [
|
||||
"",
|
||||
"amd64",
|
||||
"amd64",
|
||||
"arm64",
|
||||
"amd64",
|
||||
"arm64"
|
||||
],
|
||||
"sdk_repos": [
|
||||
"go_default_sdk",
|
||||
"rules_go__download_0_darwin_amd64",
|
||||
"rules_go__download_0_linux_amd64",
|
||||
"rules_go__download_0_linux_arm64",
|
||||
"rules_go__download_0_windows_amd64",
|
||||
"rules_go__download_0_windows_arm64"
|
||||
],
|
||||
"sdk_types": [
|
||||
"remote",
|
||||
"remote",
|
||||
"remote",
|
||||
"remote",
|
||||
"remote",
|
||||
"remote"
|
||||
],
|
||||
"sdk_versions": [
|
||||
"1.21.1",
|
||||
"1.21.1",
|
||||
"1.21.1",
|
||||
"1.21.1",
|
||||
"1.21.1",
|
||||
"1.21.1"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"recordedRepoMappingEntries": [
|
||||
[
|
||||
"bazel_features+",
|
||||
"bazel_features_globals",
|
||||
"bazel_features++version_extension+bazel_features_globals"
|
||||
],
|
||||
[
|
||||
"bazel_features+",
|
||||
"bazel_features_version",
|
||||
"bazel_features++version_extension+bazel_features_version"
|
||||
],
|
||||
[
|
||||
"rules_go+",
|
||||
"bazel_tools",
|
||||
"bazel_tools"
|
||||
],
|
||||
[
|
||||
"rules_go+",
|
||||
"io_bazel_rules_go",
|
||||
"rules_go+"
|
||||
],
|
||||
[
|
||||
"rules_go+",
|
||||
"io_bazel_rules_go_bazel_features",
|
||||
"bazel_features+"
|
||||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"@@rules_kotlin+//src/main/starlark/core/repositories:bzlmod_setup.bzl%rules_kotlin_extensions": {
|
||||
"general": {
|
||||
"bzlTransitiveDigest": "sFhcgPbDQehmbD1EOXzX4H1q/CD5df8zwG4kp4jbvr8=",
|
||||
|
|
@ -200,6 +400,42 @@
|
|||
]
|
||||
]
|
||||
}
|
||||
},
|
||||
"@@rules_python+//python/uv:uv.bzl%uv": {
|
||||
"general": {
|
||||
"bzlTransitiveDigest": "Xpqjnjzy6zZ90Es9Wa888ZLHhn7IsNGbph/e6qoxzw8=",
|
||||
"usagesDigest": "vJ5RHUxAnV24M5swNGiAnkdxMx3Hp/iOLmNANTC5Xc8=",
|
||||
"recordedFileInputs": {},
|
||||
"recordedDirentsInputs": {},
|
||||
"envVariables": {},
|
||||
"generatedRepoSpecs": {
|
||||
"uv": {
|
||||
"repoRuleId": "@@rules_python+//python/uv/private:uv_toolchains_repo.bzl%uv_toolchains_repo",
|
||||
"attributes": {
|
||||
"toolchain_type": "'@@rules_python+//python/uv:uv_toolchain_type'",
|
||||
"toolchain_names": [
|
||||
"none"
|
||||
],
|
||||
"toolchain_implementations": {
|
||||
"none": "'@@rules_python+//python:none'"
|
||||
},
|
||||
"toolchain_compatible_with": {
|
||||
"none": [
|
||||
"@platforms//:incompatible"
|
||||
]
|
||||
},
|
||||
"toolchain_target_settings": {}
|
||||
}
|
||||
}
|
||||
},
|
||||
"recordedRepoMappingEntries": [
|
||||
[
|
||||
"rules_python+",
|
||||
"platforms",
|
||||
"platforms"
|
||||
]
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
22
graph/BUILD.bazel
Normal file
22
graph/BUILD.bazel
Normal file
|
|
@ -0,0 +1,22 @@
|
|||
load("@pypi//:requirements.bzl", "requirement")
|
||||
load("@rules_go//go:def.bzl", "go_binary")
|
||||
|
||||
exports_files([
|
||||
"go_analyze_wrapper.sh.tpl",
|
||||
])
|
||||
|
||||
py_binary(
|
||||
name = "graph_analyze",
|
||||
srcs = ["analyze.py"],
|
||||
main = "analyze.py",
|
||||
deps = [
|
||||
requirement("pydantic"),
|
||||
],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
go_binary(
|
||||
name = "go_analyze",
|
||||
srcs = ["analyze.go"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
279
graph/analyze.go
Normal file
279
graph/analyze.go
Normal file
|
|
@ -0,0 +1,279 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"strings"
|
||||
"log"
|
||||
)
|
||||
|
||||
// DataDepType represents the type of data dependency
|
||||
type DataDepType string
|
||||
|
||||
const (
|
||||
Query DataDepType = "query"
|
||||
Materialize DataDepType = "materialize"
|
||||
)
|
||||
|
||||
// DataDep represents a data dependency
|
||||
type DataDep struct {
|
||||
DepType DataDepType `json:"depType"`
|
||||
Ref string `json:"ref"`
|
||||
}
|
||||
|
||||
// JobConfig represents the configuration for a job
|
||||
type JobConfig struct {
|
||||
Inputs []DataDep `json:"inputs"`
|
||||
Outputs []string `json:"outputs"`
|
||||
Args []string `json:"args"`
|
||||
Env map[string]string `json:"env"`
|
||||
}
|
||||
|
||||
// Task represents a job task
|
||||
type Task struct {
|
||||
JobLabel string `json:"jobLabel"`
|
||||
Config JobConfig `json:"config"`
|
||||
}
|
||||
|
||||
// JobGraph represents a graph of jobs
|
||||
type JobGraph struct {
|
||||
Outputs []string `json:"outputs"`
|
||||
Nodes []Task `json:"nodes"`
|
||||
}
|
||||
|
||||
// PartitionManifest represents a partition manifest
|
||||
type PartitionManifest struct {
|
||||
Outputs string `json:"outputs"`
|
||||
Inputs []DataDep `json:"inputs"`
|
||||
StartTime int `json:"startTime"`
|
||||
EndTime int `json:"endTime"`
|
||||
Config JobConfig `json:"config"`
|
||||
}
|
||||
|
||||
// jobLabelToCfgPath converts a job label to a configuration path
|
||||
func jobLabelToCfgPath(jobLabel string) string {
|
||||
return "." + strings.Replace(strings.Replace(jobLabel, "//", "", -1), ":", "/", -1) + ".cfg"
|
||||
}
|
||||
|
||||
// configure configures the specified job to produce the desired outputs
|
||||
func configure(jobLabel string, outputRefs []string) ([]Task, error) {
|
||||
candidateJobsStr := os.Getenv("DATABUILD_CANDIDATE_JOBS")
|
||||
var jobPathMap map[string]string
|
||||
if err := json.Unmarshal([]byte(candidateJobsStr), &jobPathMap); err != nil {
|
||||
return nil, fmt.Errorf("failed to parse DATABUILD_CANDIDATE_JOBS: %v", err)
|
||||
}
|
||||
|
||||
// Look up the executable path for this job
|
||||
execPath, ok := jobPathMap[jobLabel]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("job %s is not a candidate job", jobLabel)
|
||||
}
|
||||
|
||||
// Check if executable exists
|
||||
if _, err := os.Stat(execPath); err != nil {
|
||||
if os.IsNotExist(err) {
|
||||
return nil, fmt.Errorf("executable not found at path: %s", execPath)
|
||||
}
|
||||
return nil, fmt.Errorf("error checking executable: %v", err)
|
||||
}
|
||||
|
||||
log.Printf("Executing job configuration: %s %v", execPath, outputRefs)
|
||||
cmd := exec.Command(execPath, outputRefs...)
|
||||
|
||||
var stdout, stderr strings.Builder
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
log.Printf("Job configuration failed: %s", stderr.String())
|
||||
return nil, fmt.Errorf("Failed to run job config: %s", stderr.String())
|
||||
}
|
||||
log.Printf("Job configuration succeeded for %s", jobLabel)
|
||||
|
||||
// Parse the job configurations
|
||||
var jobConfigs []JobConfig
|
||||
err = json.Unmarshal([]byte(stdout.String()), &jobConfigs)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing job configs for %s: %s. `%s`", jobLabel, err, stdout.String())
|
||||
return nil, fmt.Errorf("Failed to parse job configs: %s", err)
|
||||
}
|
||||
|
||||
// Create tasks
|
||||
tasks := make([]Task, len(jobConfigs))
|
||||
for i, cfg := range jobConfigs {
|
||||
tasks[i] = Task{
|
||||
JobLabel: jobLabel,
|
||||
Config: cfg,
|
||||
}
|
||||
}
|
||||
|
||||
log.Printf("Created %d tasks for job %s", len(tasks), jobLabel)
|
||||
return tasks, nil
|
||||
}
|
||||
|
||||
// resolve produces a mapping of required job refs to the partitions it produces
|
||||
func resolve(outputRefs []string) (map[string][]string, error) {
|
||||
lookupPath := os.Getenv("DATABUILD_JOB_LOOKUP_PATH")
|
||||
|
||||
// Run the job lookup
|
||||
log.Printf("Executing job lookup: %s %v", lookupPath, outputRefs)
|
||||
cmd := exec.Command(lookupPath, outputRefs...)
|
||||
var stdout, stderr strings.Builder
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
log.Printf("Job lookup failed: %s", stderr.String())
|
||||
return nil, fmt.Errorf("Failed to run job lookup: %s", stderr.String())
|
||||
}
|
||||
log.Printf("Job lookup succeeded for %d output refs", len(outputRefs))
|
||||
|
||||
// Parse the result
|
||||
var result map[string][]string
|
||||
err = json.Unmarshal([]byte(stdout.String()), &result)
|
||||
if err != nil {
|
||||
log.Printf("Error parsing job lookup result: %s", err)
|
||||
return nil, fmt.Errorf("Failed to parse job lookup result: %s", err)
|
||||
}
|
||||
|
||||
log.Printf("Job lookup found %d job mappings", len(result))
|
||||
for job, refs := range result {
|
||||
log.Printf(" Job %s produces %d refs", job, len(refs))
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// plan creates a job graph for given output references
|
||||
func plan(outputRefs []string) (*JobGraph, error) {
|
||||
log.Printf("Starting planning for %d output refs: %v", len(outputRefs), outputRefs)
|
||||
unhandledRefs := make(map[string]bool)
|
||||
for _, ref := range outputRefs {
|
||||
unhandledRefs[ref] = true
|
||||
}
|
||||
epoch := 0
|
||||
var nodes []Task
|
||||
|
||||
for len(unhandledRefs) > 0 {
|
||||
if epoch >= 1000 {
|
||||
log.Printf("Planning timeout: still planning after %d epochs, giving up", epoch)
|
||||
return nil, fmt.Errorf("Still planning after %d epochs, giving up", epoch)
|
||||
}
|
||||
|
||||
log.Printf("Planning epoch %d with %d unhandled refs", epoch, len(unhandledRefs))
|
||||
// Resolve jobs for all unhandled refs
|
||||
unhandledRefsList := make([]string, 0, len(unhandledRefs))
|
||||
for ref := range unhandledRefs {
|
||||
unhandledRefsList = append(unhandledRefsList, ref)
|
||||
}
|
||||
jobRefs, err := resolve(unhandledRefsList)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Configure jobs
|
||||
var newNodes []Task
|
||||
for jobLabel, producedRefs := range jobRefs {
|
||||
tasks, err := configure(jobLabel, producedRefs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
newNodes = append(newNodes, tasks...)
|
||||
|
||||
// Remove handled refs
|
||||
for _, ref := range producedRefs {
|
||||
delete(unhandledRefs, ref)
|
||||
}
|
||||
}
|
||||
|
||||
if len(unhandledRefs) > 0 {
|
||||
log.Printf("Error: Still have unhandled refs after configuration phase: %v", unhandledRefs)
|
||||
return nil, fmt.Errorf("Should have no unhandled refs after configuration phase, but had: %v", unhandledRefs)
|
||||
}
|
||||
epoch++
|
||||
|
||||
// Add new nodes to the graph
|
||||
nodes = append(nodes, newNodes...)
|
||||
log.Printf("Planning epoch %d completed: added %d new nodes, total nodes: %d", epoch, len(newNodes), len(nodes))
|
||||
|
||||
// Plan next epoch
|
||||
newUnhandledCount := 0
|
||||
for _, task := range newNodes {
|
||||
for _, input := range task.Config.Inputs {
|
||||
if input.DepType == Materialize {
|
||||
if !unhandledRefs[input.Ref] {
|
||||
newUnhandledCount++
|
||||
}
|
||||
unhandledRefs[input.Ref] = true
|
||||
}
|
||||
}
|
||||
}
|
||||
if newUnhandledCount > 0 {
|
||||
log.Printf("Added %d new unhandled refs for next planning epoch", newUnhandledCount)
|
||||
}
|
||||
}
|
||||
|
||||
if len(nodes) > 0 {
|
||||
log.Printf("Planning complete: created graph with %d nodes for %d output refs", len(nodes), len(outputRefs))
|
||||
return &JobGraph{
|
||||
Outputs: outputRefs,
|
||||
Nodes: nodes,
|
||||
}, nil
|
||||
} else {
|
||||
log.Printf("Planning failed: no nodes created for output refs %v", outputRefs)
|
||||
return nil, fmt.Errorf("Unknown failure in graph planning")
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
mode := os.Getenv("DATABUILD_MODE")
|
||||
log.Printf("Starting analyze.go in mode: %s", mode)
|
||||
|
||||
if mode == "plan" {
|
||||
// Get output refs from command line arguments
|
||||
outputRefs := os.Args[1:]
|
||||
graph, err := plan(outputRefs)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error: %s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Output the job graph as JSON
|
||||
jsonData, err := json.Marshal(graph)
|
||||
if err != nil {
|
||||
log.Printf("Error marshaling job graph: %s", err)
|
||||
fmt.Fprintf(os.Stderr, "Error marshaling job graph: %s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
log.Printf("Successfully generated job graph with %d nodes", len(graph.Nodes))
|
||||
fmt.Println(string(jsonData))
|
||||
} else if mode == "lookup" {
|
||||
// Get output refs from command line arguments
|
||||
outputRefs := os.Args[1:]
|
||||
result, err := resolve(outputRefs)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error: %s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Output the result as JSON
|
||||
jsonData, err := json.Marshal(result)
|
||||
if err != nil {
|
||||
log.Printf("Error marshaling lookup result: %s", err)
|
||||
fmt.Fprintf(os.Stderr, "Error marshaling lookup result: %s\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
log.Printf("Successfully completed lookup for %d output refs with %d job mappings", len(outputRefs), len(result))
|
||||
fmt.Println(string(jsonData))
|
||||
} else if mode == "import_test" {
|
||||
log.Printf("Running in import_test mode")
|
||||
fmt.Println("ok :)")
|
||||
log.Printf("Import test completed successfully")
|
||||
} else {
|
||||
log.Printf("Error: Unknown mode '%s'", mode)
|
||||
fmt.Fprintf(os.Stderr, "Unknown MODE `%s`\n", mode)
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
144
graph/analyze.py
Normal file
144
graph/analyze.py
Normal file
|
|
@ -0,0 +1,144 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
from pydantic import TypeAdapter
|
||||
from pydantic.dataclasses import dataclass
|
||||
from typing_extensions import Literal
|
||||
import subprocess
|
||||
|
||||
MODE = os.environ["DATABUILD_MODE"]
|
||||
LOOKUP_PATH = os.environ["DATABUILD_JOB_LOOKUP_PATH"]
|
||||
CANDIDATE_JOBS = os.environ["DATABUILD_CANDIDATE_JOBS"].split(",")
|
||||
|
||||
|
||||
class ImmutableConfig:
|
||||
frozen = True
|
||||
|
||||
|
||||
@dataclass(config=ImmutableConfig)
|
||||
class DataDep:
|
||||
depType: Literal["query", "materialize"]
|
||||
ref: str
|
||||
|
||||
|
||||
@dataclass(config=ImmutableConfig)
|
||||
class JobConfig:
|
||||
inputs: list[DataDep]
|
||||
outputs: list[str]
|
||||
args: list[str]
|
||||
env: dict[str, str]
|
||||
|
||||
def __hash__(self):
|
||||
return hash((
|
||||
tuple(self.inputs),
|
||||
tuple(self.outputs),
|
||||
tuple(self.args),
|
||||
tuple(sorted(self.env.items()))
|
||||
))
|
||||
|
||||
|
||||
@dataclass(config=ImmutableConfig)
|
||||
class Task:
|
||||
jobLabel: str
|
||||
config: JobConfig
|
||||
|
||||
|
||||
@dataclass(config=ImmutableConfig)
|
||||
class JobGraph:
|
||||
outputs: list[str]
|
||||
nodes: list[Task]
|
||||
|
||||
|
||||
@dataclass(config=ImmutableConfig)
|
||||
class PartitionManifest:
|
||||
outputs: str
|
||||
inputs: list[DataDep]
|
||||
startTime: int
|
||||
endTime: int
|
||||
# TODO should be a runnable?
|
||||
config: JobConfig
|
||||
|
||||
|
||||
def job_label_to_cfg_path(job_label: str) -> str:
|
||||
return job_label.replace(":", "/") + ".cfg"
|
||||
|
||||
|
||||
def configure(job_label: str, output_refs: list[str]) -> list[Task]:
|
||||
"""
|
||||
Configures the specified job to produce the desired outputs, returning the list of tasks that will achieve this.
|
||||
"""
|
||||
assert job_label in CANDIDATE_JOBS, f"Job `{job_label}` is not a candidate job"
|
||||
proc = subprocess.run(
|
||||
[job_label_to_cfg_path(job_label), *output_refs],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError(f"Failed to run job config: {proc.stderr}")
|
||||
job_configs = TypeAdapter(list[JobConfig]).validate_json(proc.stdout)
|
||||
return [Task(jobLabel = job_label, config = cfg) for cfg in job_configs]
|
||||
|
||||
|
||||
def resolve(output_refs: set[str]) -> dict[str, list[str]]:
|
||||
"""
|
||||
# Produces a mapping of required job refs to the partitions it produces
|
||||
"""
|
||||
proc = subprocess.run(
|
||||
[LOOKUP_PATH, *output_refs],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
)
|
||||
if proc.returncode != 0:
|
||||
raise RuntimeError(f"Failed to run job lookup: {proc.stderr}")
|
||||
return TypeAdapter(dict[str, list[str]]).validate_json(proc.stdout)
|
||||
|
||||
|
||||
def plan(output_refs: list[str]) -> JobGraph:
|
||||
unhandled_refs = set(output_refs)
|
||||
epoch = 0
|
||||
nodes = set()
|
||||
while unhandled_refs:
|
||||
if epoch >= 1000:
|
||||
raise RuntimeError(f"Still planning after {epoch} epochs, giving up")
|
||||
|
||||
# Resolve jobs for all
|
||||
new_nodes = set()
|
||||
for job_label, produced_refs in resolve(output_refs=unhandled_refs).items():
|
||||
# TODO do we need to try and merge jobs later? E.g. discovering a partition ref in a later epoch that can
|
||||
# be produced by an earlier resolved job config
|
||||
new_nodes += configure(job_label=job_label, output_refs=produced_refs)
|
||||
unhandled_refs -= set(produced_refs)
|
||||
|
||||
assert not unhandled_refs, f"Should have unhandled refs after configuration phase, but had: {unhandled_refs}"
|
||||
epoch += 1
|
||||
|
||||
# Plan next epoch
|
||||
unhandled_refs = set(
|
||||
input_dep.ref
|
||||
for task in new_nodes
|
||||
for input_dep in task.config.inputs
|
||||
if input_dep.depType == "materialize"
|
||||
)
|
||||
|
||||
if nodes:
|
||||
return JobGraph(
|
||||
outputs=output_refs,
|
||||
nodes=list(nodes),
|
||||
)
|
||||
else:
|
||||
raise RuntimeError("Unknown failure in graph planning")
|
||||
|
||||
|
||||
def main():
|
||||
if MODE == "plan":
|
||||
print(TypeAdapter(JobGraph).dump_json(plan(sys.argv[1:])))
|
||||
elif MODE == "lookup":
|
||||
print(TypeAdapter(dict[str, list[str]]).dump_json(resolve(sys.argv[1:])))
|
||||
elif MODE == "import_test":
|
||||
print("ok :)")
|
||||
else:
|
||||
raise RuntimeError(f"Unknown MODE `{MODE}`")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
11
graph/go_analyze_wrapper.sh.tpl
Executable file
11
graph/go_analyze_wrapper.sh.tpl
Executable file
|
|
@ -0,0 +1,11 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
%{RUNFILES_PREFIX}
|
||||
|
||||
%{PREFIX}
|
||||
|
||||
EXECUTABLE_BINARY="$(rlocation "databuild+/graph/$(basename "%{EXECUTABLE_PATH}")_")/go_analyze"
|
||||
|
||||
# Run the configuration
|
||||
exec "${EXECUTABLE_BINARY}" "$@"
|
||||
5
graph/test/BUILD.bazel
Normal file
5
graph/test/BUILD.bazel
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
sh_test(
|
||||
name = "analyze_test",
|
||||
srcs = ["analyze_test.sh"],
|
||||
data = ["//graph:go_analyze"],
|
||||
)
|
||||
3
graph/test/analyze_test.sh
Executable file
3
graph/test/analyze_test.sh
Executable file
|
|
@ -0,0 +1,3 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
DATABUILD_MODE=import_test DATABUILD_JOB_LOOKUP_PATH=foo DATABUILD_CANDIDATE_JOBS=bar graph/go_analyze
|
||||
|
|
@ -1,4 +1,3 @@
|
|||
exports_files([
|
||||
"configure_wrapper.sh.tpl",
|
||||
"execute_wrapper.sh.tpl",
|
||||
])
|
||||
|
|
|
|||
|
|
@ -1,35 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# TODO should this be extracted to shared init script
|
||||
# Get the directory where the script is located
|
||||
if [[ -z "${RUNFILES_DIR:-}" ]]; then
|
||||
SCRIPT_DIR="$(readlink -f "${BASH_SOURCE[0]}")"
|
||||
# Set RUNFILES_DIR relative to the script location
|
||||
export RUNFILES_DIR="${SCRIPT_DIR}.runfiles"
|
||||
fi
|
||||
|
||||
# --- begin runfiles.bash initialization v3 ---
|
||||
# Copy-pasted from the Bazel Bash runfiles library v3.
|
||||
set -uo pipefail; set +e; f=bazel_tools/tools/bash/runfiles/runfiles.bash
|
||||
source "${RUNFILES_DIR:-/dev/null}/$f" 2>/dev/null || \
|
||||
source "$(grep -sm1 "^$f " "${RUNFILES_MANIFEST_FILE:-/dev/null}" | cut -f2- -d' ')" 2>/dev/null || \
|
||||
source "$0.runfiles/$f" 2>/dev/null || \
|
||||
source "$(grep -sm1 "^$f " "$0.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
|
||||
source "$(grep -sm1 "^$f " "$0.exe.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
|
||||
{ echo>&2 "ERROR: cannot find $f"; exit 1; }; f=; set -e
|
||||
# --- end runfiles.bash initialization v3 ---
|
||||
|
||||
# Set up JAVA_RUNFILES if not already set
|
||||
if [[ -z "${JAVA_RUNFILES:-}" ]]; then
|
||||
if [[ -d "$0.runfiles" ]]; then
|
||||
export JAVA_RUNFILES="$0.runfiles"
|
||||
elif [[ -f "${RUNFILES_MANIFEST_FILE:-}" ]]; then
|
||||
export JAVA_RUNFILES="$(dirname "${RUNFILES_MANIFEST_FILE}")"
|
||||
fi
|
||||
fi
|
||||
|
||||
CONFIGURE_BINARY="$(rlocation "_main/$(basename "%{CONFIGURE_PATH}")")"
|
||||
|
||||
# Run the configuration
|
||||
exec "${CONFIGURE_BINARY}" "$@"
|
||||
|
|
@ -1,33 +1,7 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# TODO should this be extracted to shared init script
|
||||
# Get the directory where the script is located
|
||||
if [[ -z "${RUNFILES_DIR:-}" ]]; then
|
||||
SCRIPT_DIR="$(readlink -f "${BASH_SOURCE[0]}")"
|
||||
# Set RUNFILES_DIR relative to the script location
|
||||
export RUNFILES_DIR="${SCRIPT_DIR}.runfiles"
|
||||
fi
|
||||
|
||||
# --- begin runfiles.bash initialization v3 ---
|
||||
# Copy-pasted from the Bazel Bash runfiles library v3.
|
||||
set -uo pipefail; set +e; f=bazel_tools/tools/bash/runfiles/runfiles.bash
|
||||
source "${RUNFILES_DIR:-/dev/null}/$f" 2>/dev/null || \
|
||||
source "$(grep -sm1 "^$f " "${RUNFILES_MANIFEST_FILE:-/dev/null}" | cut -f2- -d' ')" 2>/dev/null || \
|
||||
source "$0.runfiles/$f" 2>/dev/null || \
|
||||
source "$(grep -sm1 "^$f " "$0.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
|
||||
source "$(grep -sm1 "^$f " "$0.exe.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
|
||||
{ echo>&2 "ERROR: cannot find $f"; exit 1; }; f=; set -e
|
||||
# --- end runfiles.bash initialization v3 ---
|
||||
|
||||
# Set up JAVA_RUNFILES if not already set
|
||||
if [[ -z "${JAVA_RUNFILES:-}" ]]; then
|
||||
if [[ -d "$0.runfiles" ]]; then
|
||||
export JAVA_RUNFILES="$0.runfiles"
|
||||
elif [[ -f "${RUNFILES_MANIFEST_FILE:-}" ]]; then
|
||||
export JAVA_RUNFILES="$(dirname "${RUNFILES_MANIFEST_FILE}")"
|
||||
fi
|
||||
fi
|
||||
%{RUNFILES_PREFIX}
|
||||
|
||||
EXECUTE_BINARY="$(rlocation "_main/$(basename "%{EXECUTE_PATH}")")"
|
||||
JQ="$(rlocation "databuild+/runtime/$(basename "%{JQ_PATH}")")"
|
||||
|
|
|
|||
1
requirements.in
Normal file
1
requirements.in
Normal file
|
|
@ -0,0 +1 @@
|
|||
pydantic==2.11.3
|
||||
126
requirements_lock.txt
Normal file
126
requirements_lock.txt
Normal file
|
|
@ -0,0 +1,126 @@
|
|||
#
|
||||
# This file is autogenerated by pip-compile with Python 3.13
|
||||
# by the following command:
|
||||
#
|
||||
# bazel run //:py_requirements.update
|
||||
#
|
||||
annotated-types==0.7.0 \
|
||||
--hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \
|
||||
--hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89
|
||||
# via pydantic
|
||||
pydantic==2.11.3 \
|
||||
--hash=sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3 \
|
||||
--hash=sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f
|
||||
# via -r requirements.in
|
||||
pydantic-core==2.33.1 \
|
||||
--hash=sha256:0483847fa9ad5e3412265c1bd72aad35235512d9ce9d27d81a56d935ef489672 \
|
||||
--hash=sha256:048831bd363490be79acdd3232f74a0e9951b11b2b4cc058aeb72b22fdc3abe1 \
|
||||
--hash=sha256:048c01eee07d37cbd066fc512b9d8b5ea88ceeb4e629ab94b3e56965ad655add \
|
||||
--hash=sha256:049e0de24cf23766f12cc5cc71d8abc07d4a9deb9061b334b62093dedc7cb068 \
|
||||
--hash=sha256:08530b8ac922003033f399128505f513e30ca770527cc8bbacf75a84fcc2c74b \
|
||||
--hash=sha256:0fb935c5591573ae3201640579f30128ccc10739b45663f93c06796854405505 \
|
||||
--hash=sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8 \
|
||||
--hash=sha256:177d50460bc976a0369920b6c744d927b0ecb8606fb56858ff542560251b19e5 \
|
||||
--hash=sha256:1a28239037b3d6f16916a4c831a5a0eadf856bdd6d2e92c10a0da3a59eadcf3e \
|
||||
--hash=sha256:1b30d92c9412beb5ac6b10a3eb7ef92ccb14e3f2a8d7732e2d739f58b3aa7544 \
|
||||
--hash=sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4 \
|
||||
--hash=sha256:1d20eb4861329bb2484c021b9d9a977566ab16d84000a57e28061151c62b349a \
|
||||
--hash=sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a \
|
||||
--hash=sha256:25626fb37b3c543818c14821afe0fd3830bc327a43953bc88db924b68c5723f1 \
|
||||
--hash=sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266 \
|
||||
--hash=sha256:2ea62419ba8c397e7da28a9170a16219d310d2cf4970dbc65c32faf20d828c83 \
|
||||
--hash=sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764 \
|
||||
--hash=sha256:2f9284e11c751b003fd4215ad92d325d92c9cb19ee6729ebd87e3250072cdcde \
|
||||
--hash=sha256:3077cfdb6125cc8dab61b155fdd714663e401f0e6883f9632118ec12cf42df26 \
|
||||
--hash=sha256:32cd11c5914d1179df70406427097c7dcde19fddf1418c787540f4b730289896 \
|
||||
--hash=sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18 \
|
||||
--hash=sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939 \
|
||||
--hash=sha256:398a38d323f37714023be1e0285765f0a27243a8b1506b7b7de87b647b517e48 \
|
||||
--hash=sha256:3a371dc00282c4b84246509a5ddc808e61b9864aa1eae9ecc92bb1268b82db4a \
|
||||
--hash=sha256:3a64e81e8cba118e108d7126362ea30e021291b7805d47e4896e52c791be2761 \
|
||||
--hash=sha256:3ab2d36e20fbfcce8f02d73c33a8a7362980cff717926bbae030b93ae46b56c7 \
|
||||
--hash=sha256:3f1fdb790440a34f6ecf7679e1863b825cb5ffde858a9197f851168ed08371e5 \
|
||||
--hash=sha256:3f2648b9262607a7fb41d782cc263b48032ff7a03a835581abbf7a3bec62bcf5 \
|
||||
--hash=sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d \
|
||||
--hash=sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e \
|
||||
--hash=sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3 \
|
||||
--hash=sha256:5183e4f6a2d468787243ebcd70cf4098c247e60d73fb7d68d5bc1e1beaa0c4db \
|
||||
--hash=sha256:5277aec8d879f8d05168fdd17ae811dd313b8ff894aeeaf7cd34ad28b4d77e33 \
|
||||
--hash=sha256:52928d8c1b6bda03cc6d811e8923dffc87a2d3c8b3bfd2ce16471c7147a24850 \
|
||||
--hash=sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde \
|
||||
--hash=sha256:5773da0ee2d17136b1f1c6fbde543398d452a6ad2a7b54ea1033e2daa739b8d2 \
|
||||
--hash=sha256:5ab77f45d33d264de66e1884fca158bc920cb5e27fd0764a72f72f5756ae8bdb \
|
||||
--hash=sha256:5c834f54f8f4640fd7e4b193f80eb25a0602bba9e19b3cd2fc7ffe8199f5ae02 \
|
||||
--hash=sha256:5ccd429694cf26af7997595d627dd2637e7932214486f55b8a357edaac9dae8c \
|
||||
--hash=sha256:681d65e9011f7392db5aa002b7423cc442d6a673c635668c227c6c8d0e5a4f77 \
|
||||
--hash=sha256:694ad99a7f6718c1a498dc170ca430687a39894a60327f548e02a9c7ee4b6504 \
|
||||
--hash=sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516 \
|
||||
--hash=sha256:6e966fc3caaf9f1d96b349b0341c70c8d6573bf1bac7261f7b0ba88f96c56c24 \
|
||||
--hash=sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a \
|
||||
--hash=sha256:723c5630c4259400818b4ad096735a829074601805d07f8cafc366d95786d331 \
|
||||
--hash=sha256:7965c13b3967909a09ecc91f21d09cfc4576bf78140b988904e94f130f188396 \
|
||||
--hash=sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c \
|
||||
--hash=sha256:7edbc454a29fc6aeae1e1eecba4f07b63b8d76e76a748532233c4c167b4cb9ea \
|
||||
--hash=sha256:7fb66263e9ba8fea2aa85e1e5578980d127fb37d7f2e292773e7bc3a38fb0c7b \
|
||||
--hash=sha256:87d3776f0001b43acebfa86f8c64019c043b55cc5a6a2e313d728b5c95b46969 \
|
||||
--hash=sha256:8ab581d3530611897d863d1a649fb0644b860286b4718db919bfd51ece41f10b \
|
||||
--hash=sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea \
|
||||
--hash=sha256:8ffab8b2908d152e74862d276cf5017c81a2f3719f14e8e3e8d6b83fda863927 \
|
||||
--hash=sha256:902dbc832141aa0ec374f4310f1e4e7febeebc3256f00dc359a9ac3f264a45dc \
|
||||
--hash=sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e \
|
||||
--hash=sha256:91815221101ad3c6b507804178a7bb5cb7b2ead9ecd600041669c8d805ebd595 \
|
||||
--hash=sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d \
|
||||
--hash=sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498 \
|
||||
--hash=sha256:9d3da303ab5f378a268fa7d45f37d7d85c3ec19769f28d2cc0c61826a8de21fe \
|
||||
--hash=sha256:9f466e8bf0a62dc43e068c12166281c2eca72121dd2adc1040f3aa1e21ef8599 \
|
||||
--hash=sha256:9fea9c1869bb4742d174a57b4700c6dadea951df8b06de40c2fedb4f02931c2e \
|
||||
--hash=sha256:a0d5f3acc81452c56895e90643a625302bd6be351e7010664151cc55b7b97f89 \
|
||||
--hash=sha256:a3edde68d1a1f9af1273b2fe798997b33f90308fb6d44d8550c89fc6a3647cf6 \
|
||||
--hash=sha256:a62c3c3ef6a7e2c45f7853b10b5bc4ddefd6ee3cd31024754a1a5842da7d598d \
|
||||
--hash=sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523 \
|
||||
--hash=sha256:ab0277cedb698749caada82e5d099dc9fed3f906a30d4c382d1a21725777a1e5 \
|
||||
--hash=sha256:ad05b683963f69a1d5d2c2bdab1274a31221ca737dbbceaa32bcb67359453cdd \
|
||||
--hash=sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d \
|
||||
--hash=sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a \
|
||||
--hash=sha256:bae370459da6a5466978c0eacf90690cb57ec9d533f8e63e564ef3822bfa04fe \
|
||||
--hash=sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df \
|
||||
--hash=sha256:bdc84017d28459c00db6f918a7272a5190bec3090058334e43a76afb279eac7c \
|
||||
--hash=sha256:bfd0adeee563d59c598ceabddf2c92eec77abcb3f4a391b19aa7366170bd9e30 \
|
||||
--hash=sha256:c566dd9c5f63d22226409553531f89de0cac55397f2ab8d97d6f06cfce6d947e \
|
||||
--hash=sha256:c91dbb0ab683fa0cd64a6e81907c8ff41d6497c346890e26b23de7ee55353f96 \
|
||||
--hash=sha256:c964fd24e6166420d18fb53996d8c9fd6eac9bf5ae3ec3d03015be4414ce497f \
|
||||
--hash=sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3 \
|
||||
--hash=sha256:d100e3ae783d2167782391e0c1c7a20a31f55f8015f3293647544df3f9c67824 \
|
||||
--hash=sha256:d3a07fadec2a13274a8d861d3d37c61e97a816beae717efccaa4b36dfcaadcde \
|
||||
--hash=sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d \
|
||||
--hash=sha256:de9e06abe3cc5ec6a2d5f75bc99b0bdca4f5c719a5b34026f8c57efbdecd2ee3 \
|
||||
--hash=sha256:df6a94bf9452c6da9b5d76ed229a5683d0306ccb91cca8e1eea883189780d568 \
|
||||
--hash=sha256:e100c52f7355a48413e2999bfb4e139d2977a904495441b374f3d4fb4a170961 \
|
||||
--hash=sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4 \
|
||||
--hash=sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda \
|
||||
--hash=sha256:e3de2777e3b9f4d603112f78006f4ae0acb936e95f06da6cb1a45fbad6bdb4b5 \
|
||||
--hash=sha256:e7aaba1b4b03aaea7bb59e1b5856d734be011d3e6d98f5bcaa98cb30f375f2ad \
|
||||
--hash=sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db \
|
||||
--hash=sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd \
|
||||
--hash=sha256:ed3eb16d51257c763539bde21e011092f127a2202692afaeaccb50db55a31383 \
|
||||
--hash=sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40 \
|
||||
--hash=sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f \
|
||||
--hash=sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b \
|
||||
--hash=sha256:f59295ecc75a1788af8ba92f2e8c6eeaa5a94c22fc4d151e8d9638814f85c8fc \
|
||||
--hash=sha256:f995719707e0e29f0f41a8aa3bcea6e761a36c9136104d3189eafb83f5cec5e5 \
|
||||
--hash=sha256:f99aeda58dce827f76963ee87a0ebe75e648c72ff9ba1174a253f6744f518f65 \
|
||||
--hash=sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39 \
|
||||
--hash=sha256:fc903512177361e868bc1f5b80ac8c8a6e05fcdd574a5fb5ffeac5a9982b9e89 \
|
||||
--hash=sha256:fe44d56aa0b00d66640aa84a3cbe80b7a3ccdc6f0b1ca71090696a6d4777c091
|
||||
# via pydantic
|
||||
typing-extensions==4.13.2 \
|
||||
--hash=sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c \
|
||||
--hash=sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef
|
||||
# via
|
||||
# pydantic
|
||||
# pydantic-core
|
||||
# typing-inspection
|
||||
typing-inspection==0.4.0 \
|
||||
--hash=sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f \
|
||||
--hash=sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122
|
||||
# via pydantic
|
||||
220
rules.bzl
220
rules.bzl
|
|
@ -1,3 +1,30 @@
|
|||
|
||||
RUNFILES_PREFIX = """
|
||||
# ================= BEGIN RUNFILES INIT =================
|
||||
|
||||
# TODO should this be extracted to shared init script
|
||||
# Get the directory where the script is located
|
||||
if [[ -z "${RUNFILES_DIR:-}" ]]; then
|
||||
SCRIPT_DIR="$(readlink -f "${BASH_SOURCE[0]}")"
|
||||
# Set RUNFILES_DIR relative to the script location
|
||||
export RUNFILES_DIR="${SCRIPT_DIR}.runfiles"
|
||||
fi
|
||||
|
||||
# --- begin runfiles.bash initialization v3 ---
|
||||
# Copy-pasted from the Bazel Bash runfiles library v3.
|
||||
set -uo pipefail; set +e; f=bazel_tools/tools/bash/runfiles/runfiles.bash
|
||||
source "${RUNFILES_DIR:-/dev/null}/$f" 2>/dev/null || \
|
||||
source "$(grep -sm1 "^$f " "${RUNFILES_MANIFEST_FILE:-/dev/null}" | cut -f2- -d' ')" 2>/dev/null || \
|
||||
source "$0.runfiles/$f" 2>/dev/null || \
|
||||
source "$(grep -sm1 "^$f " "$0.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
|
||||
source "$(grep -sm1 "^$f " "$0.exe.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
|
||||
{ echo>&2 "ERROR: cannot find $f"; exit 1; }; f=; set -e
|
||||
# --- end runfiles.bash initialization v3 ---
|
||||
|
||||
# ================== END RUNFILES INIT ==================
|
||||
|
||||
"""
|
||||
|
||||
def databuild_job(
|
||||
name,
|
||||
configure,
|
||||
|
|
@ -44,7 +71,9 @@ def _databuild_job_cfg_impl(ctx):
|
|||
template = ctx.file._template,
|
||||
output = script,
|
||||
substitutions = {
|
||||
"%{CONFIGURE_PATH}": configure_path,
|
||||
"%{EXECUTABLE_PATH}": configure_path,
|
||||
"%{RUNFILES_PREFIX}": RUNFILES_PREFIX,
|
||||
"%{PREFIX}": "",
|
||||
},
|
||||
is_executable = True,
|
||||
)
|
||||
|
|
@ -68,11 +97,11 @@ _databuild_job_cfg_rule = rule(
|
|||
"configure": attr.label(
|
||||
doc = "Target that implements the configuration logic",
|
||||
executable = True,
|
||||
cfg = "exec",
|
||||
cfg = "target",
|
||||
mandatory = True,
|
||||
),
|
||||
"_template": attr.label(
|
||||
default = "@databuild//job:configure_wrapper.sh.tpl",
|
||||
default = "@databuild//runtime:simple_executable_wrapper.sh.tpl",
|
||||
allow_single_file = True,
|
||||
),
|
||||
"_bash_runfiles": attr.label(
|
||||
|
|
@ -99,6 +128,7 @@ def _databuild_job_exec_impl(ctx):
|
|||
substitutions = {
|
||||
"%{JQ_PATH}": jq_path,
|
||||
"%{EXECUTE_PATH}": execute_path,
|
||||
"%{RUNFILES_PREFIX}": RUNFILES_PREFIX,
|
||||
},
|
||||
is_executable = True,
|
||||
)
|
||||
|
|
@ -133,7 +163,7 @@ _databuild_job_exec_rule = rule(
|
|||
doc = "Target that implements the execution logic",
|
||||
mandatory = True,
|
||||
executable = True,
|
||||
cfg = "exec",
|
||||
cfg = "target",
|
||||
),
|
||||
"_template": attr.label(
|
||||
default = "@databuild//job:execute_wrapper.sh.tpl",
|
||||
|
|
@ -142,7 +172,7 @@ _databuild_job_exec_rule = rule(
|
|||
"_jq": attr.label(
|
||||
default = "@databuild//runtime:jq",
|
||||
executable = True,
|
||||
cfg = "exec",
|
||||
cfg = "target",
|
||||
),
|
||||
"_bash_runfiles": attr.label(
|
||||
default = Label("@bazel_tools//tools/bash/runfiles"),
|
||||
|
|
@ -158,27 +188,7 @@ def _databuild_job_impl(ctx):
|
|||
ctx.actions.write(
|
||||
output = script,
|
||||
is_executable = True,
|
||||
content = """#!/bin/bash
|
||||
|
||||
# TODO should this be extracted to shared init script
|
||||
# Get the directory where the script is located
|
||||
if [[ -z "${{RUNFILES_DIR:-}}" ]]; then
|
||||
SCRIPT_DIR="$(readlink -f "${{BASH_SOURCE[0]}}")"
|
||||
# Set RUNFILES_DIR relative to the script location
|
||||
export RUNFILES_DIR="${{SCRIPT_DIR}}.runfiles"
|
||||
fi
|
||||
|
||||
# --- begin runfiles.bash initialization v3 ---
|
||||
# Copy-pasted from the Bazel Bash runfiles library v3.
|
||||
set -uo pipefail; set +e; f=bazel_tools/tools/bash/runfiles/runfiles.bash
|
||||
source "${{RUNFILES_DIR:-/dev/null}}/$f" 2>/dev/null || \
|
||||
source "$(grep -sm1 "^$f " "${{RUNFILES_MANIFEST_FILE:-/dev/null}}" | cut -f2- -d' ')" 2>/dev/null || \
|
||||
source "$0.runfiles/$f" 2>/dev/null || \
|
||||
source "$(grep -sm1 "^$f " "$0.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
|
||||
source "$(grep -sm1 "^$f " "$0.exe.runfiles_manifest" | cut -f2- -d' ')" 2>/dev/null || \
|
||||
{{ echo>&2 "ERROR: cannot find $f"; exit 1; }}; f=; set -e
|
||||
# --- end runfiles.bash initialization v3 ---
|
||||
|
||||
content = RUNFILES_PREFIX + """
|
||||
$(rlocation _main/{configure_path}) $@ | $(rlocation _main/{execute_path})
|
||||
""".format(
|
||||
configure_path = ctx.attr.configure.files_to_run.executable.short_path,
|
||||
|
|
@ -215,14 +225,168 @@ _databuild_job_rule = rule(
|
|||
doc = "Target that implements the execution logic",
|
||||
mandatory = True,
|
||||
executable = True,
|
||||
cfg = "exec",
|
||||
cfg = "target",
|
||||
),
|
||||
},
|
||||
executable = True,
|
||||
)
|
||||
|
||||
def databuild_graph(name, jobs, plan, visibility = None):
|
||||
def databuild_graph(name, jobs, lookup, visibility = None):
|
||||
"""Creates a databuild graph target."""
|
||||
_databuild_graph_lookup(
|
||||
name = "%s.lookup" % name,
|
||||
lookup = lookup,
|
||||
visibility = visibility,
|
||||
)
|
||||
_databuild_graph_analyze(
|
||||
name = "%s.analyze" % name,
|
||||
lookup = "%s.lookup" % name,
|
||||
jobs = jobs,
|
||||
visibility = visibility,
|
||||
)
|
||||
|
||||
|
||||
# TODO there feels like a lot of boilerplate around wrapping a target with a script - can this be simplified?
|
||||
def _databuild_graph_lookup_impl(ctx):
|
||||
script = ctx.actions.declare_file(ctx.label.name)
|
||||
|
||||
ctx.actions.expand_template(
|
||||
template = ctx.file._template,
|
||||
output = script,
|
||||
substitutions = {
|
||||
"%{RUNFILES_PREFIX}": RUNFILES_PREFIX,
|
||||
"%{PREFIX}": "",
|
||||
"%{EXECUTABLE_PATH}": ctx.attr.lookup.files_to_run.executable.path,
|
||||
},
|
||||
is_executable = True,
|
||||
)
|
||||
|
||||
runfiles = ctx.runfiles(
|
||||
files = [ctx.executable.lookup],
|
||||
).merge(ctx.attr.lookup.default_runfiles).merge(
|
||||
ctx.attr._bash_runfiles.default_runfiles,
|
||||
)
|
||||
|
||||
return [
|
||||
DefaultInfo(
|
||||
executable = script,
|
||||
runfiles = runfiles,
|
||||
),
|
||||
]
|
||||
|
||||
_databuild_graph_lookup = rule(
|
||||
implementation = _databuild_graph_lookup_impl,
|
||||
attrs = {
|
||||
"lookup": attr.label(
|
||||
doc = "Target that implements job lookup for desired partition refs",
|
||||
mandatory = True,
|
||||
executable = True,
|
||||
cfg = "exec",
|
||||
),
|
||||
"_template": attr.label(
|
||||
default = "@databuild//runtime:simple_executable_wrapper.sh.tpl",
|
||||
allow_single_file = True,
|
||||
),
|
||||
"_bash_runfiles": attr.label(
|
||||
default = Label("@bazel_tools//tools/bash/runfiles"),
|
||||
allow_files = True,
|
||||
),
|
||||
},
|
||||
executable = True,
|
||||
)
|
||||
|
||||
def _databuild_graph_analyze_impl(ctx):
|
||||
script = ctx.actions.declare_file(ctx.label.name)
|
||||
|
||||
config_paths = {
|
||||
"//" + job.label.package + ":" +job.label.name:
|
||||
"$(rlocation _main/" + job[DataBuildJobInfo].configure.files_to_run.executable.short_path + ")"
|
||||
for job in ctx.attr.jobs
|
||||
}
|
||||
config_paths_str = "{" + ",".join(['\\"%s\\":\\"%s\\"' % (k, v) for k, v in config_paths.items()]) + "}"
|
||||
|
||||
candidate_job_env_var = "'" + ",".join([
|
||||
"//" + target.label.package + ":" +target.label.name
|
||||
for target in ctx.attr.jobs
|
||||
]) + "'"
|
||||
|
||||
env_setup = """
|
||||
export DATABUILD_CANDIDATE_JOBS="{candidate_job_env_var}"
|
||||
export DATABUILD_MODE=plan
|
||||
export DATABUILD_JOB_LOOKUP_PATH=$(rlocation _main/{lookup_path})
|
||||
""".format(
|
||||
candidate_job_env_var = config_paths_str,
|
||||
lookup_path = ctx.attr.lookup.files_to_run.executable.short_path,
|
||||
)
|
||||
|
||||
script_prefix = env_setup
|
||||
|
||||
ctx.actions.expand_template(
|
||||
template = ctx.file._template,
|
||||
output = script,
|
||||
substitutions = {
|
||||
"%{EXECUTABLE_PATH}": ctx.attr._analyze.files_to_run.executable.path,
|
||||
"%{RUNFILES_PREFIX}": RUNFILES_PREFIX,
|
||||
"%{PREFIX}": script_prefix,
|
||||
},
|
||||
is_executable = True,
|
||||
)
|
||||
|
||||
# Gather the configure executables
|
||||
configure_executables = [
|
||||
job[DataBuildJobInfo].configure.files_to_run.executable
|
||||
for job in ctx.attr.jobs
|
||||
]
|
||||
|
||||
runfiles = ctx.runfiles(
|
||||
files = [ctx.executable.lookup, ctx.executable._analyze] + configure_executables,
|
||||
).merge(ctx.attr.lookup.default_runfiles).merge(ctx.attr._analyze.default_runfiles).merge(
|
||||
ctx.attr._bash_runfiles.default_runfiles
|
||||
).merge_all([job.default_runfiles for job in ctx.attr.jobs])
|
||||
|
||||
# Merge runfiles from all configure targets
|
||||
for job in ctx.attr.jobs:
|
||||
configure_target = job[DataBuildJobInfo].configure
|
||||
runfiles = runfiles.merge(configure_target.default_runfiles)
|
||||
|
||||
return [
|
||||
DefaultInfo(
|
||||
executable = script,
|
||||
runfiles = runfiles,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
_databuild_graph_analyze = rule(
|
||||
implementation = _databuild_graph_analyze_impl,
|
||||
attrs = {
|
||||
"lookup": attr.label(
|
||||
doc = "Target that implements job lookup for desired partition refs",
|
||||
mandatory = True,
|
||||
executable = True,
|
||||
cfg = "exec",
|
||||
),
|
||||
"jobs": attr.label_list(
|
||||
doc = "The list of jobs that are candidates for building partitions in this databuild graph",
|
||||
allow_empty = False,
|
||||
),
|
||||
"_template": attr.label(
|
||||
default = "@databuild//graph:go_analyze_wrapper.sh.tpl",
|
||||
allow_single_file = True,
|
||||
),
|
||||
"_bash_runfiles": attr.label(
|
||||
default = Label("@bazel_tools//tools/bash/runfiles"),
|
||||
allow_files = True,
|
||||
),
|
||||
"_analyze": attr.label(
|
||||
default = "@databuild//graph:go_analyze",
|
||||
executable = True,
|
||||
cfg = "target",
|
||||
# cfg = "exec",
|
||||
)
|
||||
},
|
||||
executable = True,
|
||||
)
|
||||
|
||||
#def _graph_impl(name):
|
||||
# """
|
||||
|
|
|
|||
|
|
@ -1,6 +1,10 @@
|
|||
# In modules/jq/BUILD.bazel
|
||||
load("@bazel_skylib//lib:selects.bzl", "selects")
|
||||
|
||||
exports_files([
|
||||
"simple_executable_wrapper.sh.tpl",
|
||||
])
|
||||
|
||||
# Platform detection
|
||||
config_setting(
|
||||
name = "darwin",
|
||||
|
|
|
|||
11
runtime/simple_executable_wrapper.sh.tpl
Executable file
11
runtime/simple_executable_wrapper.sh.tpl
Executable file
|
|
@ -0,0 +1,11 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
%{RUNFILES_PREFIX}
|
||||
|
||||
%{PREFIX}
|
||||
|
||||
EXECUTABLE_BINARY="$(rlocation "_main/$(basename "%{EXECUTABLE_PATH}")")"
|
||||
|
||||
# Run the configuration
|
||||
exec "${EXECUTABLE_BINARY}" "$@"
|
||||
Loading…
Reference in a new issue