(cherry picked from commit be8eb9d00d6a3fcac520bc7dfd5740c0b3ade746)
XJISJGZLGWLCTN6EZWVVHKQFCDVCSXTNYL7YE2UB74AOA2XGYOCAC GPZYBCJCAPRTCMMNE3LVNESGVT2YJG2DSNCUONG5ZKE2Z4NFRJFQC CYVWW32QHVPOSZWEIJ53235TMH64EVXNAX2JHVBRIR6M3HGXW2YAC A6POFA3HERMQBEOHDJPP7IFLAQGR22562YLNDFAEUQ4TFT6W4ZOQC VMZDEQQ5Z3UY7YUR3DOCBYZ3U5GMNX7OODGB55DGZS7PVGIKXKLQC CXKN2W4BUREOIATFL5KYH7KMJWP2R3IG4V5GDQMOPPUKXUF2GOYAC JSBIUNY6LDQEOZMPZWSIFABSI7KPTFURQE5WNFNGSWTIAWF57GZQC YRXR3WDPUCCRY26ABDYTNNYTOQYYAPLW7MB4TSM3EYPXGQT7MQPQC WV4SSAIYM4SVBQ2VISDTAXQJCPKRGTLSVFH44CQFEMC4COWG5OKQC 4N5APGRGHTKFMEJ7THSJX6TSYYAP3BUZQG73AJBKCQLXOOEHPATQC M3A5PZIHA3LDVTBTKGTLCYGJSMNJOFXNO5GBPAUDHO5XGHLKYYPQC 4LWGZL33NOCTVZXUZZM5P6BRBHDUMEAJPVPNB52PN3UZVC7DMSZAC 4GTOWRFWCVZJHG6ZQTXI5XMQO3QGHBGAV52IQKEDS6NYW5CI343AC STZE4KKRL2AGCPP2FOMBNZHJTRTKJQZSEKALWEV6FK5BII6H7EDAC ZVSHXE3KY43J5BBDGERORR2FAVDG4ARANUUSGIYDHUN767SQ4GAQC 5MP35ORVVILU3GXYITYXD754QFZCOZ3X45QO54K2GW5UKNHX66SAC A63IHCMXH3F4V56HDXJLJVVHKXRSJCJMT2PWXXI2IW3J734J6SGQC IMQRX4MPANNOXDP6THT2FA4576CDFY44LGNCS74BHV6DWLOCXQOQC HJYRK37ANZKUPQY3RVUZNM7TU27R3OGB42WU7Y6K7Z5JFIT64TKAC 5DYPEBSIG2F2DY44Z52YQWDL6N4G4TDUCINMFAIA3G7PTEOPSAZQC HVXL2XUZTOWJPESBHBFD74B4QXNMG5SX4LOXDLPJ7732IU4SUYWQC KD5237CUR4X3VRUD46H3CVU22NYS7LOPF3WXENWHNSF7GKEIQZ6QC AEKIREIHNEHL4CQEHRZJGRWMCV2XO6ICZK2OM342IKAJV6MUMDOAC YNGIYQRFBKRKSRPSYSIG3R3U73ANOO5MC6C7VO4XOTDNBQHCKL3AC FGUL3HAZDA6JGTAZNCJZIIKALZ4VMVTXYWFNSXWDYR5LXNGCO3RAC AEFNBIIWLCTYPU6PDYG6D7OYGVP3E7DAGMYPIXVKCVVHJAM63FNAC OC4Q4PXCUDQHZT6XPYN5KOI7HSJHTVAVGJJ2MUFFAG3T5K52GBBQC 3PNG7NIBQQURUUPRVQXYL342OT7JUUYOMY2JJNP6YDX7SYJDZMYAC PMNWRTGJ4GVSMSSAWSUD57B26PCRAHMZIQ5SIWJIK7A74ENKEQLAC L4LBF7UFTG7SVPSZNHINTDQRXEOW7IX4AUASALYHDGMMAWRX6U7QC I2HYJBMLEXCUGUWZML5LVR2SR6TI3XAS4MSCRYOK6T44M4MYTDCAC ZTQEU5QSAJYC66QQABJLVR7EEZWEIW6UMQMSIEP2KRZH5PGEB53QC JTHWA6AM6UU4ZVWBH2JKUVFR743LVIMRU7IMDUVDLN4M3K7MR4XQC FTPCV25MOLQUNR5CAR453W7T7QTUZRLPLEOSDZ5HSDFAXQZVHOYQC WJTP6VZILETQ4GWUNE7QPHQK3HWJJ6J5ILX2E7KYKUANAA4DBF4QC HNGPGVBTFCEYF4YCAUTDGADAU7IXQBAAUNE6RIFUD3HXFGU5QPBAC D6VR5FYG7ALAXJVNUQNRT25FCSX3ZVV4ETNPCSYUQUWTHJXJIB6QC 77VF5TC64OAL6CNIDZL7RO3UAAB4Y7CIMWBZGDKL7Z3AGI4DRGGAC DKJFD6JNNK5LJMRGQABMJZKMFZLGY3ADKJWF6J4BBHEUPY3NB67QC 7YCFGMZBCWDOOO3DHSJ3ZWPUWFMEO6OLQGDZWLULAPER3YZOYUJAC SYN2GC3OYBGGITLINWUPQAZQGTDUM3Y4P4H4SOP22OA43MMWO6EQC VTNP5KDOVQPWGRARMFJJ3MTU7IAZT4NP7ZFY4HJYT3KYJUSAZEZAC M5BEPXTEW5BTPKV4PIJAVFLWE6CBHZ7FF2UFE5QIOPL2BQE3RDTAC SM5M2J3AGBS4IHD7TBFBVNMXTUBVA5OIFIIJJDAU6KUCPRU4S74AC 7GKAIP3VYKM4MGGHOGBB3DR2M6B6SENBR5IZQVJYO65AWT4BW7LAC RLXDJAG2XXKG7RG5OFO5O7XIBEKCKSFCXHZWCFUI7KHL5OOGUQ6QC 52JSLNC6VRUY6JFXWYMH7LR6NUBPC3ENL4VI75NOMFRNDWT3T2JQC HP5WJLQUNP7PXGVBYOMWRCZ4QKDECULS2EAFX2FPHRUXV2CMMYCAC FUB37KFUYUROEDXICXDZJTQCE6K5GOTRMSWZHIVDRPUYJR2MMO5QC VTMBJVIS6JEZVGC3QHM45LVJDSEOSSTOXSYEBDZE3BK3SHOM2TUQC BGMBEXS6DE4ITS76GOC2ELLAKF7NDCQFUIN3KFDLCZGQHTDQRLKAC static void findJobs(EvalState & state, JSONObject & top,Bindings & autoArgs, Value & v, const string & attrPath);
MyArgs() : MixCommonArgs("hydra-eval-jobs"){mkFlag().longName("help").description("show usage information").handler([&]() {printHelp(programName, std::cout);throw Exit();});
static string queryMetaStrings(EvalState & state, DrvInfo & drv, const string & name, const string & subAttribute)
mkFlag().longName("dry-run").description("don't create store derivations").set(&dryRun, true);expectArg("expr", &releaseExpr);}};static MyArgs myArgs;static std::string queryMetaStrings(EvalState & state, DrvInfo & drv, const string & name, const string & subAttribute)
static void findJobsWrapped(EvalState & state, JSONObject & top,Bindings & autoArgs, Value & vIn, const string & attrPath){if (lastAttrPath != "" && lte(attrPath, lastAttrPath)) return;
if (auto drv = getDerivation(state, *v, false)) {
auto drv = getDerivation(state, v, false);
job["nixName"] = drv->queryName();job["system"] =drv->querySystem();job["drvPath"] = drvPath;job["description"] = drv->queryMetaString("description");job["license"] = queryMetaStrings(state, *drv, "license", "shortName");job["homepage"] = drv->queryMetaString("homepage");job["maintainers"] = queryMetaStrings(state, *drv, "maintainers", "email");job["schedulingPriority"] = drv->queryMetaInt("schedulingPriority", 100);job["timeout"] = drv->queryMetaInt("timeout", 36000);job["maxSilent"] = drv->queryMetaInt("maxSilent", 7200);job["isChannel"] = drv->queryMetaBool("isHydraChannel", false);
if (drv) {Path drvPath;
/* If this is an aggregate, then get its constituents. */auto a = v->attrs->get(state.symbols.create("_hydraAggregate"));if (a && state.forceBool(*a->value, *a->pos)) {auto a = v->attrs->get(state.symbols.create("constituents"));if (!a)throw EvalError("derivation must have a ‘constituents’ attribute");
if (drv->querySystem() == "unknown")throw EvalError("derivation must have a ‘system’ attribute");if (comma) { std::cout << ","; comma = false; }{auto res = top.object(attrPath);res.attr("nixName", drv->queryName());res.attr("system", drv->querySystem());res.attr("drvPath", drvPath = drv->queryDrvPath());res.attr("description", drv->queryMetaString("description"));res.attr("license", queryMetaStrings(state, *drv, "license", "shortName"));res.attr("homepage", drv->queryMetaString("homepage"));res.attr("maintainers", queryMetaStrings(state, *drv, "maintainers", "email"));res.attr("schedulingPriority", drv->queryMetaInt("schedulingPriority", 100));res.attr("timeout", drv->queryMetaInt("timeout", 36000));res.attr("maxSilent", drv->queryMetaInt("maxSilent", 7200));res.attr("isChannel", drv->queryMetaBool("isHydraChannel", false));
PathSet context;state.coerceToString(*a->pos, *a->value, context, true, false);for (auto & i : context)if (i.at(0) == '!') {size_t index = i.find("!", 1);job["constituents"].push_back(string(i, index + 1));}
/* If this is an aggregate, then get its constituents. */Bindings::iterator a = v.attrs->find(state.symbols.create("_hydraAggregate"));if (a != v.attrs->end() && state.forceBool(*a->value, *a->pos)) {Bindings::iterator a = v.attrs->find(state.symbols.create("constituents"));if (a == v.attrs->end())throw EvalError("derivation must have a ‘constituents’ attribute");PathSet context;state.coerceToString(*a->pos, *a->value, context, true, false);PathSet drvs;for (auto & i : context)if (i.at(0) == '!') {size_t index = i.find("!", 1);drvs.insert(string(i, index + 1));
state.forceList(*a->value, *a->pos);for (unsigned int n = 0; n < a->value->listSize(); ++n) {auto v = a->value->listElems()[n];state.forceValue(*v);if (v->type == tString)job["namedConstituents"].push_back(state.forceStringNoCtx(*v));
/* Register the derivation as a GC root. !!! Thisregisters roots for jobs that we may have alreadydone. */auto localStore = state.store.dynamic_pointer_cast<LocalFSStore>();if (gcRootsDir != "" && localStore) {Path root = gcRootsDir + "/" + std::string(baseNameOf(drvPath));if (!pathExists(root))localStore->addPermRoot(localStore->parseStorePath(drvPath), root, false);}auto res2 = res.object("outputs");for (auto & j : outputs)res2.attr(j.first, j.second);}
/* Register the derivation as a GC root. !!! Thisregisters roots for jobs that we may have alreadydone. */auto localStore = state.store.dynamic_pointer_cast<LocalFSStore>();if (gcRootsDir != "" && localStore) {Path root = gcRootsDir + "/" + std::string(baseNameOf(drvPath));if (!pathExists(root))localStore->addPermRoot(localStore->parseStorePath(drvPath), root, false);}
else {if (!state.isDerivation(v)) {for (auto & i : v.attrs->lexicographicOrder()) {
else if (v->type == tAttrs) {auto attrs = nlohmann::json::array();StringSet ss;for (auto & i : v->attrs->lexicographicOrder()) {
else if (v.type == tNull) {// allow null values, meaning 'do nothing'
writeLine(to.get(), reply.dump());/* If our RSS exceeds the maximum, exit. The master willstart a new process. */struct rusage r;getrusage(RUSAGE_SELF, &r);if ((size_t) r.ru_maxrss > maxMemorySize * 1024) break;
static void findJobs(EvalState & state, JSONObject & top,Bindings & autoArgs, Value & v, const string & attrPath){try {findJobsWrapped(state, top, autoArgs, v, attrPath);} catch (EvalError & e) {if (comma) { std::cout << ","; comma = false; }auto res = top.object(attrPath);res.attr("error", filterANSIEscapes(e.msg(), true));}}
assert(lte("abc", "def"));assert(lte("abc", "def.foo"));assert(!lte("def", "abc"));assert(lte("nixpkgs.hello", "nixpkgs"));assert(lte("nixpkgs.hello", "nixpkgs.hellooo"));assert(lte("gitAndTools.git-annex.x86_64-darwin", "gitAndTools.git-annex.x86_64-linux"));assert(lte("gitAndTools.git-annex.x86_64-linux", "gitAndTools.git-annex-remote-b2.aarch64-linux"));
/* Then make sure the maximum heap size will be bigger than the initial heap size. */if (initialHeapSizeInt > maxHeapSize) {printInfo("warning: evaluator_initial_heap_size (%d) bigger than evaluator_max_heap_size (%d).", initialHeapSizeInt, maxHeapSize);maxHeapSize = initialHeapSizeInt * 1.1;printInfo(" evaluator_max_heap_size now set to %d.", maxHeapSize);}
/* FIXME: The build hook in conjunction with import-from-derivation is causing "unexpected EOF" during eval */settings.builders = "";
struct MyArgs : LegacyArgs, MixEvalArgs
if (myArgs.dryRun) settings.readOnlyMode = true;if (myArgs.releaseExpr == "") throw UsageError("no expression specified");if (gcRootsDir == "") printMsg(lvlError, "warning: `--gc-roots-dir' not specified");struct State
MyArgs myArgs(baseNameOf(argv[0]), [&](Strings::iterator & arg, const Strings::iterator & end) {if (*arg == "--gc-roots-dir")gcRootsDir = getArg(*arg, arg, end);else if (*arg == "--dry-run")settings.readOnlyMode = true;else if (*arg != "" && arg->at(0) == '-')return false;elsereleaseExpr = *arg;return true;});
std::condition_variable wakeup;Sync<State> state_;/* Start a handler thread per worker process. */auto handler = [&](){try {pid_t pid = -1;AutoCloseFD from, to;while (true) {
myArgs.parseCmdline(argvToStrings(argc, argv));
/* Start a new worker process if necessary. */if (pid == -1) {Pipe toPipe, fromPipe;toPipe.create();fromPipe.create();pid = startProcess([&,to{std::make_shared<AutoCloseFD>(std::move(fromPipe.writeSide))},from{std::make_shared<AutoCloseFD>(std::move(toPipe.readSide))}](){try {EvalState state(myArgs.searchPath, openStore());Bindings & autoArgs = *myArgs.getAutoArgs(state);worker(state, autoArgs, *to, *from);} catch (std::exception & e) {nlohmann::json err;err["error"] = e.what();writeLine(to->get(), err.dump());}},ProcessOptions { .allowVfork = false });from = std::move(fromPipe.readSide);to = std::move(toPipe.writeSide);debug("created worker process %d", pid);}
JSONObject json(std::cout, true);std::cout.flush();
/* Check whether the existing worker process is still there. */auto s = readLine(from.get());if (s == "restart") {pid = -1;continue;} else if (s != "next") {auto json = nlohmann::json::parse(s);throw Error("worker error: %s", (std::string) json["error"]);}
Pipe pipe;pipe.create();
while (true) {checkInterrupt();auto state(state_.lock());if ((state->todo.empty() && state->active.empty()) || state->exc) {writeLine(to.get(), "exit");return;}if (!state->todo.empty()) {attrPath = *state->todo.begin();state->todo.erase(state->todo.begin());state->active.insert(attrPath);break;} elsestate.wait(wakeup);}
/* FIXME: The build hook in conjunction with import-from-derivation is causing "unexpected EOF" during eval */settings.builders = "";
if (response.find("error") != response.end()) {auto state(state_.lock());state->jobs[attrPath]["error"] = response["error"];}
/* Prevent access to paths outside of the Nix search path andto the environment. */evalSettings.restrictEval = true;
/* Add newly discovered job names to the queue. */{auto state(state_.lock());state->active.erase(attrPath);for (auto & s : newAttrs)state->todo.insert(s);wakeup.notify_all();}}} catch (...) {auto state(state_.lock());state->exc = std::current_exception();wakeup.notify_all();}};
Value v;state.evalFile(lookupFileArg(state, releaseExpr), v);
/* For aggregate jobs that have named consistuents(i.e. constituents that are a job name rather than aderivation), look up the referenced job and add it to thedependencies of the aggregate derivation. */auto store = openStore();
writeFull(pipe.writeSide.get(), lastAttrPath);
if (myArgs.dryRun) {for (std::string jobName2 : *named) {auto job2 = state->jobs.find(jobName2);if (job2 == state->jobs.end())throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2);std::string drvPath2 = (*job2)["drvPath"];job["constituents"].push_back(drvPath2);}} else {std::string drvPath = job["drvPath"];auto drv = readDerivation(*store, drvPath);
exit(0);}, options);
for (std::string jobName2 : *named) {auto job2 = state->jobs.find(jobName2);if (job2 == state->jobs.end())throw Error("aggregate job '%s' references non-existent job '%s'", jobName, jobName2);std::string drvPath2 = (*job2)["drvPath"];auto drv2 = readDerivation(*store, drvPath2);job["constituents"].push_back(drvPath2);drv.inputDrvs[store->parseStorePath(drvPath2)] = {drv2.outputs.begin()->first};}
GC_atfork_parent();
std::string drvName(store->parseStorePath(drvPath).name());assert(hasSuffix(drvName, drvExtension));drvName.resize(drvName.size() - drvExtension.size());auto h = hashDerivationModulo(*store, drv, true);auto outPath = store->makeOutputPath("out", h, drvName);drv.env["out"] = store->printStorePath(outPath);drv.outputs.insert_or_assign("out", DerivationOutput(outPath.clone(), "", ""));auto newDrvPath = store->printStorePath(writeDerivation(store, drv, drvName));