From 9abcc68ad1b1eabd03c56969d9df8b1330039817 Mon Sep 17 00:00:00 2001 From: Tristan Ross Date: Fri, 19 Sep 2025 09:48:08 -0700 Subject: [PATCH 001/213] libstore-c: add nix_store_get_fs_closure --- src/libstore-c/nix_api_store.cc | 30 ++++++++++++++++++++++++++++++ src/libstore-c/nix_api_store.h | 24 ++++++++++++++++++++++++ 2 files changed, 54 insertions(+) diff --git a/src/libstore-c/nix_api_store.cc b/src/libstore-c/nix_api_store.cc index c4c17f127..6ee792fc3 100644 --- a/src/libstore-c/nix_api_store.cc +++ b/src/libstore-c/nix_api_store.cc @@ -126,6 +126,36 @@ StorePath * nix_store_parse_path(nix_c_context * context, Store * store, const c NIXC_CATCH_ERRS_NULL } +nix_err nix_store_get_fs_closure( + nix_c_context * context, + Store * store, + const StorePath * store_path, + bool flip_direction, + bool include_outputs, + bool include_derivers, + void * userdata, + void (*callback)(nix_c_context * context, void * userdata, const StorePath * store_path)) +{ + if (context) + context->last_err_code = NIX_OK; + try { + const auto nixStore = store->ptr; + + nix::StorePathSet set; + nixStore->computeFSClosure(store_path->path, set, flip_direction, include_outputs, include_derivers); + + if (callback) { + for (const auto & path : set) { + const StorePath tmp{path}; + callback(context, userdata, &tmp); + if (context && context->last_err_code != NIX_OK) + return context->last_err_code; + } + } + } + NIXC_CATCH_ERRS +} + nix_err nix_store_realise( nix_c_context * context, Store * store, diff --git a/src/libstore-c/nix_api_store.h b/src/libstore-c/nix_api_store.h index e76e376b4..fd7ce068a 100644 --- a/src/libstore-c/nix_api_store.h +++ b/src/libstore-c/nix_api_store.h @@ -245,6 +245,30 @@ void nix_derivation_free(nix_derivation * drv); */ nix_err nix_store_copy_closure(nix_c_context * context, Store * srcStore, Store * dstStore, StorePath * path); +/** + * @brief Gets the closure of a specific store path + * + * @note The callback borrows each StorePath only for the duration of the call. + * + * @param[out] context Optional, stores error information + * @param[in] store nix store reference + * @param[in] store_path The path to compute from + * @param[in] flip_direction + * @param[in] include_outputs + * @param[in] include_derivers + * @param[in] callback The function to call for every store path, in no particular order + * @param[in] userdata The userdata to pass to the callback + */ +nix_err nix_store_get_fs_closure( + nix_c_context * context, + Store * store, + const StorePath * store_path, + bool flip_direction, + bool include_outputs, + bool include_derivers, + void * userdata, + void (*callback)(nix_c_context * context, void * userdata, const StorePath * store_path)); + // cffi end #ifdef __cplusplus } From aace1fb5d698e763c7f4e3ebd04ea737631adc62 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 15 Oct 2025 13:27:09 +0200 Subject: [PATCH 002/213] C API: test nix_store_get_fs_closure --- src/libstore-tests/nix_api_store.cc | 240 ++++++++++++++++++++++++++++ 1 file changed, 240 insertions(+) diff --git a/src/libstore-tests/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc index dfd554ec1..6d6017f1f 100644 --- a/src/libstore-tests/nix_api_store.cc +++ b/src/libstore-tests/nix_api_store.cc @@ -218,6 +218,66 @@ struct LambdaAdapter } }; +class NixApiStoreTestWithRealisedPath : public nix_api_store_test_base +{ +public: + StorePath * drvPath = nullptr; + nix_derivation * drv = nullptr; + Store * store = nullptr; + StorePath * outPath = nullptr; + + void SetUp() override + { + nix_api_store_test_base::SetUp(); + + nix::experimentalFeatureSettings.set("extra-experimental-features", "ca-derivations"); + nix::settings.substituters = {}; + + store = open_local_store(); + + std::filesystem::path unitTestData{getenv("_NIX_TEST_UNIT_DATA")}; + std::ifstream t{unitTestData / "derivation/ca/self-contained.json"}; + std::stringstream buffer; + buffer << t.rdbuf(); + + drv = nix_derivation_from_json(ctx, store, buffer.str().c_str()); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + drvPath = nix_add_derivation(ctx, store, drv); + assert_ctx_ok(); + ASSERT_NE(drvPath, nullptr); + + auto cb = LambdaAdapter{.fun = [&](const char * outname, const StorePath * outPath_) { + auto is_valid_path = nix_store_is_valid_path(ctx, store, outPath_); + ASSERT_EQ(is_valid_path, true); + ASSERT_STREQ(outname, "out") << "Expected single 'out' output"; + ASSERT_EQ(outPath, nullptr) << "Output path callback should only be called once"; + outPath = nix_store_path_clone(outPath_); + }}; + + auto ret = nix_store_realise( + ctx, store, drvPath, static_cast(&cb), decltype(cb)::call_void); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + ASSERT_NE(outPath, nullptr) << "Derivation should have produced an output"; + } + + void TearDown() override + { + if (drvPath) + nix_store_path_free(drvPath); + if (outPath) + nix_store_path_free(outPath); + if (drv) + nix_derivation_free(drv); + if (store) + nix_store_free(store); + + nix_api_store_test_base::TearDown(); + } +}; + TEST_F(nix_api_store_test_base, build_from_json) { // FIXME get rid of these @@ -256,4 +316,184 @@ TEST_F(nix_api_store_test_base, build_from_json) nix_store_free(store); } +TEST_F(NixApiStoreTestWithRealisedPath, nix_store_get_fs_closure_with_outputs) +{ + // Test closure computation with include_outputs on a derivation path + struct CallbackData + { + std::set * paths; + }; + + std::set closure_paths; + CallbackData data{&closure_paths}; + + auto ret = nix_store_get_fs_closure( + ctx, + store, + drvPath, // Use derivation path + false, // flip_direction + true, // include_outputs - include the outputs in the closure + false, // include_derivers + &data, + [](nix_c_context * context, void * userdata, const StorePath * path) { + auto * data = static_cast(userdata); + std::string path_str; + nix_store_path_name(path, OBSERVE_STRING(path_str)); + auto [it, inserted] = data->paths->insert(path_str); + ASSERT_TRUE(inserted) << "Duplicate path in closure: " << path_str; + }); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + + // The closure should contain the derivation and its outputs + ASSERT_GE(closure_paths.size(), 2); + + // Verify the output path is in the closure + std::string outPathName; + nix_store_path_name(outPath, OBSERVE_STRING(outPathName)); + ASSERT_EQ(closure_paths.count(outPathName), 1); +} + +TEST_F(NixApiStoreTestWithRealisedPath, nix_store_get_fs_closure_without_outputs) +{ + // Test closure computation WITHOUT include_outputs on a derivation path + struct CallbackData + { + std::set * paths; + }; + + std::set closure_paths; + CallbackData data{&closure_paths}; + + auto ret = nix_store_get_fs_closure( + ctx, + store, + drvPath, // Use derivation path + false, // flip_direction + false, // include_outputs - do NOT include the outputs + false, // include_derivers + &data, + [](nix_c_context * context, void * userdata, const StorePath * path) { + auto * data = static_cast(userdata); + std::string path_str; + nix_store_path_name(path, OBSERVE_STRING(path_str)); + auto [it, inserted] = data->paths->insert(path_str); + ASSERT_TRUE(inserted) << "Duplicate path in closure: " << path_str; + }); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + + // Verify the output path is NOT in the closure + std::string outPathName; + nix_store_path_name(outPath, OBSERVE_STRING(outPathName)); + ASSERT_EQ(closure_paths.count(outPathName), 0) << "Output path should not be in closure when includeOutputs=false"; +} + +TEST_F(NixApiStoreTestWithRealisedPath, nix_store_get_fs_closure_flip_direction) +{ + // Test closure computation with flip_direction on a derivation path + // When flip_direction=true, we get the reverse dependencies (what depends on this path) + // For a derivation, this should NOT include outputs even with include_outputs=true + struct CallbackData + { + std::set * paths; + }; + + std::set closure_paths; + CallbackData data{&closure_paths}; + + auto ret = nix_store_get_fs_closure( + ctx, + store, + drvPath, // Use derivation path + true, // flip_direction - get reverse dependencies + true, // include_outputs + false, // include_derivers + &data, + [](nix_c_context * context, void * userdata, const StorePath * path) { + auto * data = static_cast(userdata); + std::string path_str; + nix_store_path_name(path, OBSERVE_STRING(path_str)); + auto [it, inserted] = data->paths->insert(path_str); + ASSERT_TRUE(inserted) << "Duplicate path in closure: " << path_str; + }); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + + // Verify the output path is NOT in the closure when direction is flipped + std::string outPathName; + nix_store_path_name(outPath, OBSERVE_STRING(outPathName)); + ASSERT_EQ(closure_paths.count(outPathName), 0) << "Output path should not be in closure when flip_direction=true"; +} + +TEST_F(NixApiStoreTestWithRealisedPath, nix_store_get_fs_closure_include_derivers) +{ + // Test closure computation with include_derivers on an output path + // This should include the derivation that produced the output + struct CallbackData + { + std::set * paths; + }; + + std::set closure_paths; + CallbackData data{&closure_paths}; + + auto ret = nix_store_get_fs_closure( + ctx, + store, + outPath, // Use output path (not derivation) + false, // flip_direction + false, // include_outputs + true, // include_derivers - include the derivation + &data, + [](nix_c_context * context, void * userdata, const StorePath * path) { + auto * data = static_cast(userdata); + std::string path_str; + nix_store_path_name(path, OBSERVE_STRING(path_str)); + auto [it, inserted] = data->paths->insert(path_str); + ASSERT_TRUE(inserted) << "Duplicate path in closure: " << path_str; + }); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + + // Verify the derivation path is in the closure + // Deriver is nasty stateful, and this assertion is only guaranteed because + // we're using an empty store as our starting point. Otherwise, if the + // output happens to exist, the deriver could be anything. + std::string drvPathName; + nix_store_path_name(drvPath, OBSERVE_STRING(drvPathName)); + ASSERT_EQ(closure_paths.count(drvPathName), 1) << "Derivation should be in closure when include_derivers=true"; +} + +TEST_F(NixApiStoreTestWithRealisedPath, nix_store_get_fs_closure_error_propagation) +{ + // Test that errors in the callback abort the closure computation + struct CallbackData + { + int * count; + }; + + int call_count = 0; + CallbackData data{&call_count}; + + auto ret = nix_store_get_fs_closure( + ctx, + store, + drvPath, // Use derivation path + false, // flip_direction + true, // include_outputs + false, // include_derivers + &data, + [](nix_c_context * context, void * userdata, const StorePath * path) { + auto * data = static_cast(userdata); + (*data->count)++; + // Set an error immediately + nix_set_err_msg(context, NIX_ERR_UNKNOWN, "Test error"); + }); + + // Should have aborted with error + ASSERT_EQ(ret, NIX_ERR_UNKNOWN); + ASSERT_EQ(call_count, 1); // Should have been called exactly once, then aborted +} + } // namespace nixC From 3fb943d130868f2290d260bfd7a19cb633519ca9 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 15 Oct 2025 14:55:28 +0200 Subject: [PATCH 003/213] C API: Make store realise tests multi-platform ... and improve assertions. --- src/libstore-tests/nix_api_store.cc | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/src/libstore-tests/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc index 6d6017f1f..16d1ac0d8 100644 --- a/src/libstore-tests/nix_api_store.cc +++ b/src/libstore-tests/nix_api_store.cc @@ -240,7 +240,10 @@ public: std::stringstream buffer; buffer << t.rdbuf(); - drv = nix_derivation_from_json(ctx, store, buffer.str().c_str()); + // Replace the hardcoded system with the current system + std::string jsonStr = nix::replaceStrings(buffer.str(), "x86_64-linux", nix::settings.thisSystem.get()); + + drv = nix_derivation_from_json(ctx, store, jsonStr.c_str()); assert_ctx_ok(); ASSERT_NE(drv, nullptr); @@ -249,6 +252,7 @@ public: ASSERT_NE(drvPath, nullptr); auto cb = LambdaAdapter{.fun = [&](const char * outname, const StorePath * outPath_) { + ASSERT_NE(outname, nullptr) << "Output name should not be NULL"; auto is_valid_path = nix_store_is_valid_path(ctx, store, outPath_); ASSERT_EQ(is_valid_path, true); ASSERT_STREQ(outname, "out") << "Expected single 'out' output"; @@ -292,7 +296,10 @@ TEST_F(nix_api_store_test_base, build_from_json) std::stringstream buffer; buffer << t.rdbuf(); - auto * drv = nix_derivation_from_json(ctx, store, buffer.str().c_str()); + // Replace the hardcoded system with the current system + std::string jsonStr = nix::replaceStrings(buffer.str(), "x86_64-linux", nix::settings.thisSystem.get()); + + auto * drv = nix_derivation_from_json(ctx, store, jsonStr.c_str()); assert_ctx_ok(); ASSERT_NE(drv, nullptr); @@ -300,15 +307,21 @@ TEST_F(nix_api_store_test_base, build_from_json) assert_ctx_ok(); ASSERT_NE(drv, nullptr); + int callbackCount = 0; auto cb = LambdaAdapter{.fun = [&](const char * outname, const StorePath * outPath) { + ASSERT_NE(outname, nullptr); + ASSERT_STREQ(outname, "out"); + ASSERT_NE(outPath, nullptr); auto is_valid_path = nix_store_is_valid_path(ctx, store, outPath); ASSERT_EQ(is_valid_path, true); + callbackCount++; }}; auto ret = nix_store_realise( ctx, store, drvPath, static_cast(&cb), decltype(cb)::call_void); assert_ctx_ok(); ASSERT_EQ(ret, NIX_OK); + ASSERT_EQ(callbackCount, 1) << "Callback should have been invoked exactly once"; // Clean up nix_store_path_free(drvPath); From 12293a8b1162bc273f991b098e05c93e5ff32c5f Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 15 Oct 2025 15:05:50 +0200 Subject: [PATCH 004/213] C API: Document nix_store_copy_closure flags --- src/libstore-c/nix_api_store.h | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/libstore-c/nix_api_store.h b/src/libstore-c/nix_api_store.h index fd7ce068a..f477d084a 100644 --- a/src/libstore-c/nix_api_store.h +++ b/src/libstore-c/nix_api_store.h @@ -253,9 +253,14 @@ nix_err nix_store_copy_closure(nix_c_context * context, Store * srcStore, Store * @param[out] context Optional, stores error information * @param[in] store nix store reference * @param[in] store_path The path to compute from - * @param[in] flip_direction - * @param[in] include_outputs - * @param[in] include_derivers + * @param[in] flip_direction If false, compute the forward closure (paths referenced by any store path in the closure). + * If true, compute the backward closure (paths that reference any store path in the closure). + * @param[in] include_outputs If flip_direction is false: for any derivation in the closure, include its outputs. + * If flip_direction is true: for any output in the closure, include derivations that produce + * it. + * @param[in] include_derivers If flip_direction is false: for any output in the closure, include the derivation that + * produced it. + * If flip_direction is true: for any derivation in the closure, include its outputs. * @param[in] callback The function to call for every store path, in no particular order * @param[in] userdata The userdata to pass to the callback */ From 6fa03765edcce6e5403903cd68a2cc464e67e4d1 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 15 Oct 2025 15:19:40 +0200 Subject: [PATCH 005/213] C API: Propagate nix_store_realise build errors --- src/libstore-c/nix_api_store.cc | 8 ++ src/libstore-c/nix_api_store.h | 2 + src/libstore-tests/nix_api_store.cc | 135 ++++++++++++++++++++++++++++ 3 files changed, 145 insertions(+) diff --git a/src/libstore-c/nix_api_store.cc b/src/libstore-c/nix_api_store.cc index 6ee792fc3..e18463192 100644 --- a/src/libstore-c/nix_api_store.cc +++ b/src/libstore-c/nix_api_store.cc @@ -173,6 +173,14 @@ nix_err nix_store_realise( const auto nixStore = store->ptr; auto results = nixStore->buildPathsWithResults(paths, nix::bmNormal, nixStore); + assert(results.size() == 1); + + // Check if any builds failed + for (auto & result : results) { + if (!result.success()) + result.rethrow(); + } + if (callback) { for (const auto & result : results) { for (const auto & [outputName, realisation] : result.builtOutputs) { diff --git a/src/libstore-c/nix_api_store.h b/src/libstore-c/nix_api_store.h index f477d084a..964f6d6d5 100644 --- a/src/libstore-c/nix_api_store.h +++ b/src/libstore-c/nix_api_store.h @@ -186,6 +186,8 @@ nix_err nix_store_real_path( * @param[in] path Path to build * @param[in] userdata data to pass to every callback invocation * @param[in] callback called for every realised output + * @return NIX_OK if the build succeeded, or an error code if the build/scheduling/outputs/copying/etc failed. + * On error, the callback is never invoked and error information is stored in context. */ nix_err nix_store_realise( nix_c_context * context, diff --git a/src/libstore-tests/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc index 16d1ac0d8..045b4ad83 100644 --- a/src/libstore-tests/nix_api_store.cc +++ b/src/libstore-tests/nix_api_store.cc @@ -329,6 +329,141 @@ TEST_F(nix_api_store_test_base, build_from_json) nix_store_free(store); } +TEST_F(nix_api_store_test_base, nix_store_realise_invalid_system) +{ + // Test that nix_store_realise properly reports errors when the system is invalid + nix::experimentalFeatureSettings.set("extra-experimental-features", "ca-derivations"); + nix::settings.substituters = {}; + + auto * store = open_local_store(); + + std::filesystem::path unitTestData{getenv("_NIX_TEST_UNIT_DATA")}; + std::ifstream t{unitTestData / "derivation/ca/self-contained.json"}; + std::stringstream buffer; + buffer << t.rdbuf(); + + // Use an invalid system that cannot be built + std::string jsonStr = nix::replaceStrings(buffer.str(), "x86_64-linux", "bogus65-bogusos"); + + auto * drv = nix_derivation_from_json(ctx, store, jsonStr.c_str()); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + auto * drvPath = nix_add_derivation(ctx, store, drv); + assert_ctx_ok(); + ASSERT_NE(drvPath, nullptr); + + int callbackCount = 0; + auto cb = LambdaAdapter{.fun = [&](const char * outname, const StorePath * outPath) { callbackCount++; }}; + + auto ret = nix_store_realise( + ctx, store, drvPath, static_cast(&cb), decltype(cb)::call_void); + + // Should fail with an error + ASSERT_NE(ret, NIX_OK); + ASSERT_EQ(callbackCount, 0) << "Callback should not be invoked when build fails"; + + // Check that error message is set + std::string errMsg = nix_err_msg(nullptr, ctx, nullptr); + ASSERT_FALSE(errMsg.empty()) << "Error message should be set"; + ASSERT_NE(errMsg.find("system"), std::string::npos) << "Error should mention system"; + + // Clean up + nix_store_path_free(drvPath); + nix_derivation_free(drv); + nix_store_free(store); +} + +TEST_F(nix_api_store_test_base, nix_store_realise_builder_fails) +{ + // Test that nix_store_realise properly reports errors when the builder fails + nix::experimentalFeatureSettings.set("extra-experimental-features", "ca-derivations"); + nix::settings.substituters = {}; + + auto * store = open_local_store(); + + std::filesystem::path unitTestData{getenv("_NIX_TEST_UNIT_DATA")}; + std::ifstream t{unitTestData / "derivation/ca/self-contained.json"}; + std::stringstream buffer; + buffer << t.rdbuf(); + + // Replace with current system and make builder command fail + std::string jsonStr = nix::replaceStrings(buffer.str(), "x86_64-linux", nix::settings.thisSystem.get()); + jsonStr = nix::replaceStrings(jsonStr, "echo $name foo > $out", "exit 1"); + + auto * drv = nix_derivation_from_json(ctx, store, jsonStr.c_str()); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + auto * drvPath = nix_add_derivation(ctx, store, drv); + assert_ctx_ok(); + ASSERT_NE(drvPath, nullptr); + + int callbackCount = 0; + auto cb = LambdaAdapter{.fun = [&](const char * outname, const StorePath * outPath) { callbackCount++; }}; + + auto ret = nix_store_realise( + ctx, store, drvPath, static_cast(&cb), decltype(cb)::call_void); + + // Should fail with an error + ASSERT_NE(ret, NIX_OK); + ASSERT_EQ(callbackCount, 0) << "Callback should not be invoked when build fails"; + + // Check that error message is set + std::string errMsg = nix_err_msg(nullptr, ctx, nullptr); + ASSERT_FALSE(errMsg.empty()) << "Error message should be set"; + + // Clean up + nix_store_path_free(drvPath); + nix_derivation_free(drv); + nix_store_free(store); +} + +TEST_F(nix_api_store_test_base, nix_store_realise_builder_no_output) +{ + // Test that nix_store_realise properly reports errors when builder succeeds but produces no output + nix::experimentalFeatureSettings.set("extra-experimental-features", "ca-derivations"); + nix::settings.substituters = {}; + + auto * store = open_local_store(); + + std::filesystem::path unitTestData{getenv("_NIX_TEST_UNIT_DATA")}; + std::ifstream t{unitTestData / "derivation/ca/self-contained.json"}; + std::stringstream buffer; + buffer << t.rdbuf(); + + // Replace with current system and make builder succeed but not produce output + std::string jsonStr = nix::replaceStrings(buffer.str(), "x86_64-linux", nix::settings.thisSystem.get()); + jsonStr = nix::replaceStrings(jsonStr, "echo $name foo > $out", "true"); + + auto * drv = nix_derivation_from_json(ctx, store, jsonStr.c_str()); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + auto * drvPath = nix_add_derivation(ctx, store, drv); + assert_ctx_ok(); + ASSERT_NE(drvPath, nullptr); + + int callbackCount = 0; + auto cb = LambdaAdapter{.fun = [&](const char * outname, const StorePath * outPath) { callbackCount++; }}; + + auto ret = nix_store_realise( + ctx, store, drvPath, static_cast(&cb), decltype(cb)::call_void); + + // Should fail with an error + ASSERT_NE(ret, NIX_OK); + ASSERT_EQ(callbackCount, 0) << "Callback should not be invoked when build produces no output"; + + // Check that error message is set + std::string errMsg = nix_err_msg(nullptr, ctx, nullptr); + ASSERT_FALSE(errMsg.empty()) << "Error message should be set"; + + // Clean up + nix_store_path_free(drvPath); + nix_derivation_free(drv); + nix_store_free(store); +} + TEST_F(NixApiStoreTestWithRealisedPath, nix_store_get_fs_closure_with_outputs) { // Test closure computation with include_outputs on a derivation path From 6036aaf798f38a2c1a1d63a16f8566c98a60dbcf Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 15 Oct 2025 22:04:21 +0200 Subject: [PATCH 006/213] C API: Check output callback order --- src/libstore-tests/nix_api_store.cc | 149 ++++++++++++++++++++++++++++ 1 file changed, 149 insertions(+) diff --git a/src/libstore-tests/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc index 045b4ad83..228b8069f 100644 --- a/src/libstore-tests/nix_api_store.cc +++ b/src/libstore-tests/nix_api_store.cc @@ -613,6 +613,155 @@ TEST_F(NixApiStoreTestWithRealisedPath, nix_store_get_fs_closure_include_deriver ASSERT_EQ(closure_paths.count(drvPathName), 1) << "Derivation should be in closure when include_derivers=true"; } +TEST_F(NixApiStoreTestWithRealisedPath, nix_store_realise_output_ordering) +{ + // Test that nix_store_realise returns outputs in alphabetical order by output name. + // This test uses a CA derivation with 10 outputs in randomized input order + // to verify that the callback order is deterministic and alphabetical. + nix::experimentalFeatureSettings.set("extra-experimental-features", "ca-derivations"); + nix::settings.substituters = {}; + + auto * store = open_local_store(); + + // Create a CA derivation with 10 outputs using proper placeholders + auto outa_ph = nix::hashPlaceholder("outa"); + auto outb_ph = nix::hashPlaceholder("outb"); + auto outc_ph = nix::hashPlaceholder("outc"); + auto outd_ph = nix::hashPlaceholder("outd"); + auto oute_ph = nix::hashPlaceholder("oute"); + auto outf_ph = nix::hashPlaceholder("outf"); + auto outg_ph = nix::hashPlaceholder("outg"); + auto outh_ph = nix::hashPlaceholder("outh"); + auto outi_ph = nix::hashPlaceholder("outi"); + auto outj_ph = nix::hashPlaceholder("outj"); + + std::string drvJson = R"({ + "version": 3, + "name": "multi-output-test", + "system": ")" + nix::settings.thisSystem.get() + + R"(", + "builder": "/bin/sh", + "args": ["-c", "echo a > $outa; echo b > $outb; echo c > $outc; echo d > $outd; echo e > $oute; echo f > $outf; echo g > $outg; echo h > $outh; echo i > $outi; echo j > $outj"], + "env": { + "builder": "/bin/sh", + "name": "multi-output-test", + "system": ")" + nix::settings.thisSystem.get() + + R"(", + "outf": ")" + outf_ph + + R"(", + "outd": ")" + outd_ph + + R"(", + "outi": ")" + outi_ph + + R"(", + "oute": ")" + oute_ph + + R"(", + "outh": ")" + outh_ph + + R"(", + "outc": ")" + outc_ph + + R"(", + "outb": ")" + outb_ph + + R"(", + "outg": ")" + outg_ph + + R"(", + "outj": ")" + outj_ph + + R"(", + "outa": ")" + outa_ph + + R"(" + }, + "inputDrvs": {}, + "inputSrcs": [], + "outputs": { + "outd": { "hashAlgo": "sha256", "method": "nar" }, + "outf": { "hashAlgo": "sha256", "method": "nar" }, + "outg": { "hashAlgo": "sha256", "method": "nar" }, + "outb": { "hashAlgo": "sha256", "method": "nar" }, + "outc": { "hashAlgo": "sha256", "method": "nar" }, + "outi": { "hashAlgo": "sha256", "method": "nar" }, + "outj": { "hashAlgo": "sha256", "method": "nar" }, + "outh": { "hashAlgo": "sha256", "method": "nar" }, + "outa": { "hashAlgo": "sha256", "method": "nar" }, + "oute": { "hashAlgo": "sha256", "method": "nar" } + } + })"; + + auto * drv = nix_derivation_from_json(ctx, store, drvJson.c_str()); + assert_ctx_ok(); + ASSERT_NE(drv, nullptr); + + auto * drvPath = nix_add_derivation(ctx, store, drv); + assert_ctx_ok(); + ASSERT_NE(drvPath, nullptr); + + // Realise the derivation - capture the order outputs are returned + std::map outputs; + std::vector output_order; + auto cb = LambdaAdapter{.fun = [&](const char * outname, const StorePath * outPath) { + ASSERT_NE(outname, nullptr); + ASSERT_NE(outPath, nullptr); + output_order.push_back(outname); + outputs.emplace(outname, outPath->path); + }}; + + auto ret = nix_store_realise( + ctx, store, drvPath, static_cast(&cb), decltype(cb)::call_void); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + ASSERT_EQ(outputs.size(), 10); + + // Verify outputs are returned in alphabetical order by output name + std::vector expected_order = { + "outa", "outb", "outc", "outd", "oute", "outf", "outg", "outh", "outi", "outj"}; + ASSERT_EQ(output_order, expected_order) << "Outputs should be returned in alphabetical order by output name"; + + // Now compute closure with include_outputs and collect paths in order + struct CallbackData + { + std::vector * paths; + }; + + std::vector closure_paths; + CallbackData data{&closure_paths}; + + ret = nix_store_get_fs_closure( + ctx, + store, + drvPath, + false, // flip_direction + true, // include_outputs - include the outputs in the closure + false, // include_derivers + &data, + [](nix_c_context * context, void * userdata, const StorePath * path) { + auto * data = static_cast(userdata); + std::string path_str; + nix_store_path_name(path, OBSERVE_STRING(path_str)); + data->paths->push_back(path_str); + }); + assert_ctx_ok(); + ASSERT_EQ(ret, NIX_OK); + + // Should contain at least the derivation and 10 outputs + ASSERT_GE(closure_paths.size(), 11); + + // Verify all outputs are present in the closure + for (const auto & [outname, outPath] : outputs) { + std::string outPathName = store->ptr->printStorePath(outPath); + + bool found = false; + for (const auto & p : closure_paths) { + // nix_store_path_name returns just the name part, so match against full path name + if (outPathName.find(p) != std::string::npos) { + found = true; + break; + } + } + ASSERT_TRUE(found) << "Output " << outname << " (" << outPathName << ") not found in closure"; + } + + nix_store_path_free(drvPath); + nix_derivation_free(drv); + nix_store_free(store); +} + TEST_F(NixApiStoreTestWithRealisedPath, nix_store_get_fs_closure_error_propagation) { // Test that errors in the callback abort the closure computation From 61fbef42a6eeae7553f148f1759c5a770a2f65aa Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 18 Oct 2025 18:47:27 +0300 Subject: [PATCH 007/213] libstore: Simplify check for S3-specific URI query parameters Instead of hardcoding strings we should instead use the setting objects to determine the query names that should be preserved. --- .../include/nix/store/s3-binary-cache-store.hh | 8 ++++++-- src/libstore/s3-binary-cache-store.cc | 15 +++++++-------- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/src/libstore/include/nix/store/s3-binary-cache-store.hh b/src/libstore/include/nix/store/s3-binary-cache-store.hh index c8cb967c1..e5fcbeda3 100644 --- a/src/libstore/include/nix/store/s3-binary-cache-store.hh +++ b/src/libstore/include/nix/store/s3-binary-cache-store.hh @@ -21,8 +21,6 @@ struct S3BinaryCacheStoreConfig : HttpBinaryCacheStoreConfig Nix uses the `default` profile. )"}; -public: - const Setting region{ this, "us-east-1", @@ -63,6 +61,12 @@ public: > addressing instead of virtual host based addressing. )"}; + /** + * Set of settings that are part of the S3 URI itself. + * These are needed for region specification and other S3-specific settings. + */ + const std::set s3UriSettings = {&profile, ®ion, &scheme, &endpoint}; + static const std::string name() { return "S3 Binary Cache Store"; diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index a84ea5fcb..ac08a4982 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -1,10 +1,10 @@ #include "nix/store/s3-binary-cache-store.hh" - -#include - #include "nix/store/http-binary-cache-store.hh" #include "nix/store/store-registration.hh" +#include +#include + namespace nix { StringSet S3BinaryCacheStoreConfig::uriSchemes() @@ -17,14 +17,13 @@ S3BinaryCacheStoreConfig::S3BinaryCacheStoreConfig( : StoreConfig(params) , HttpBinaryCacheStoreConfig(scheme, _cacheUri, params) { - // For S3 stores, preserve S3-specific query parameters as part of the URL - // These are needed for region specification and other S3-specific settings assert(cacheUri.query.empty()); + assert(cacheUri.scheme == "s3"); - // Only copy S3-specific parameters to the URL query - static const std::set s3Params = {"region", "endpoint", "profile", "scheme"}; for (const auto & [key, value] : params) { - if (s3Params.contains(key)) { + auto s3Params = + std::views::transform(s3UriSettings, [](const AbstractSetting * setting) { return setting->name; }); + if (std::ranges::contains(s3Params, key)) { cacheUri.query[key] = value; } } From 3d147c04a5f9d03e1696fb25b495a077885d2cf7 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 18 Oct 2025 19:11:39 +0300 Subject: [PATCH 008/213] libstore: Implement getHumanReadableURI for S3BinaryCacheStoreConfig This slightly improves the logs situation by including the region/profile/endpoint in the logs when S3 store references get printed. Instead of: copying path '/nix/store/lxnp9cs4cfh2g9r2bs4z7gwwz9kdj2r9-test-package-c' to 's3://bucketname'... This now includes: copying path '/nix/store/lxnp9cs4cfh2g9r2bs4z7gwwz9kdj2r9-test-package-c' to 's3://bucketname?endpoint=http://server:9000®ion=eu-west-1'... --- .../include/nix/store/s3-binary-cache-store.hh | 2 ++ src/libstore/s3-binary-cache-store.cc | 13 +++++++++++++ 2 files changed, 15 insertions(+) diff --git a/src/libstore/include/nix/store/s3-binary-cache-store.hh b/src/libstore/include/nix/store/s3-binary-cache-store.hh index e5fcbeda3..288ca41a0 100644 --- a/src/libstore/include/nix/store/s3-binary-cache-store.hh +++ b/src/libstore/include/nix/store/s3-binary-cache-store.hh @@ -75,6 +75,8 @@ struct S3BinaryCacheStoreConfig : HttpBinaryCacheStoreConfig static StringSet uriSchemes(); static std::string doc(); + + std::string getHumanReadableURI() const override; }; } // namespace nix diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index ac08a4982..0b37ac5d7 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -29,6 +29,19 @@ S3BinaryCacheStoreConfig::S3BinaryCacheStoreConfig( } } +std::string S3BinaryCacheStoreConfig::getHumanReadableURI() const +{ + auto reference = getReference(); + reference.params = [&]() { + Params relevantParams; + for (auto & setting : s3UriSettings) + if (setting->overridden) + relevantParams.insert({setting->name, reference.params.at(setting->name)}); + return relevantParams; + }(); + return reference.render(); +} + std::string S3BinaryCacheStoreConfig::doc() { return R"( From 606c258c6f485fe2316057505714dfee0f5fbe26 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Tue, 21 Oct 2025 15:56:39 +0200 Subject: [PATCH 009/213] Fix computeStorePath() default argument --- src/libstore/include/nix/store/store-dir-config.hh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/include/nix/store/store-dir-config.hh b/src/libstore/include/nix/store/store-dir-config.hh index 07cda5c12..34e928182 100644 --- a/src/libstore/include/nix/store/store-dir-config.hh +++ b/src/libstore/include/nix/store/store-dir-config.hh @@ -91,7 +91,7 @@ struct StoreDirConfig std::pair computeStorePath( std::string_view name, const SourcePath & path, - ContentAddressMethod method = FileIngestionMethod::NixArchive, + ContentAddressMethod method = ContentAddressMethod::Raw::NixArchive, HashAlgorithm hashAlgo = HashAlgorithm::SHA256, const StorePathSet & references = {}, PathFilter & filter = defaultPathFilter) const; From 62247af3638adaf4c8a7eae1decda6c34f5c6a78 Mon Sep 17 00:00:00 2001 From: Cole Helbling Date: Tue, 21 Oct 2025 10:06:03 -0700 Subject: [PATCH 010/213] libstore: remove useless fmt --- src/libstore/build/derivation-building-goal.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/libstore/build/derivation-building-goal.cc b/src/libstore/build/derivation-building-goal.cc index a6fe95f3e..68a86cac2 100644 --- a/src/libstore/build/derivation-building-goal.cc +++ b/src/libstore/build/derivation-building-goal.cc @@ -239,7 +239,6 @@ Goal::Co DerivationBuildingGoal::tryToBuild() : buildMode == bmCheck ? "checking outputs of '%s'" : "building '%s'", worker.store.printStorePath(drvPath)); - fmt("building '%s'", worker.store.printStorePath(drvPath)); #ifndef _WIN32 // TODO enable build hook on Windows if (hook) msg += fmt(" on '%s'", hook->machineName); From b558dac7a95a8007151e8f0d2757a33eb5d8871d Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 21 Oct 2025 21:59:12 +0200 Subject: [PATCH 011/213] flake.nix: Add nix run .#open-manual Great for reviewing the rendered manual --- flake.nix | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/flake.nix b/flake.nix index 8d3d963be..418f3180f 100644 --- a/flake.nix +++ b/flake.nix @@ -471,6 +471,27 @@ } ); + apps = forAllSystems ( + system: + let + pkgs = nixpkgsFor.${system}.native; + opener = if pkgs.stdenv.isDarwin then "open" else "xdg-open"; + in + { + open-manual = { + type = "app"; + program = "${pkgs.writeShellScript "open-nix-manual" '' + manual_path="${self.packages.${system}.nix-manual}/share/doc/nix/manual/index.html" + if ! ${opener} "$manual_path"; then + echo "Failed to open manual with ${opener}. Manual is located at:" + echo "$manual_path" + fi + ''}"; + meta.description = "Open the Nix manual in your browser"; + }; + } + ); + devShells = let makeShell = import ./packaging/dev-shell.nix { inherit lib devFlake; }; From a38c7eb64e325f7d099d941e52fd7ceec4b93618 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 22 Oct 2025 00:56:37 +0200 Subject: [PATCH 012/213] Print failing attribute paths in nix flake check --- src/nix/flake.cc | 37 ++++++++++++++++++++++++++++---- tests/functional/flakes/check.sh | 21 ++++++++++++++++++ 2 files changed, 54 insertions(+), 4 deletions(-) diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 04d4ec8eb..998a36bcc 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -423,7 +423,7 @@ struct CmdFlakeCheck : FlakeCommand return std::nullopt; }; - std::vector drvPaths; + std::map> attrPathsByDrv; auto checkApp = [&](const std::string & attrPath, Value & v, const PosIdx pos) { try { @@ -621,7 +621,13 @@ struct CmdFlakeCheck : FlakeCommand .drvPath = makeConstantStorePathRef(*drvPath), .outputs = OutputsSpec::All{}, }; - drvPaths.push_back(std::move(path)); + + // Build and store the attribute path for error reporting + AttrPath attrPath; + attrPath.push_back(AttrName(state->symbols.create(name))); + attrPath.push_back(AttrName(attr.name)); + attrPath.push_back(AttrName(attr2.name)); + attrPathsByDrv[path].push_back(std::move(attrPath)); } } } @@ -785,7 +791,9 @@ struct CmdFlakeCheck : FlakeCommand }); } - if (build && !drvPaths.empty()) { + if (build && !attrPathsByDrv.empty()) { + auto keys = std::views::keys(attrPathsByDrv); + std::vector drvPaths(keys.begin(), keys.end()); // TODO: This filtering of substitutable paths is a temporary workaround until // https://github.com/NixOS/nix/issues/5025 (union stores) is implemented. // @@ -811,7 +819,28 @@ struct CmdFlakeCheck : FlakeCommand } Activity act(*logger, lvlInfo, actUnknown, fmt("running %d flake checks", toBuild.size())); - store->buildPaths(toBuild); + auto results = store->buildPathsWithResults(toBuild); + + // Report build failures with attribute paths + for (auto & result : results) { + if (auto * failure = result.tryGetFailure()) { + auto it = attrPathsByDrv.find(result.path); + if (it != attrPathsByDrv.end() && !it->second.empty()) { + for (auto & attrPath : it->second) { + auto attrPathStr = showAttrPath(state->symbols, attrPath); + reportError(Error( + "failed to build attribute '%s', build of '%s' failed: %s", + attrPathStr, + result.path.to_string(*store), + failure->errorMsg)); + } + } else { + // Derivation has no attribute path (e.g., a build dependency) + reportError( + Error("build of '%s' failed: %s", result.path.to_string(*store), failure->errorMsg)); + } + } + } } if (hasErrors) throw Error("some errors were encountered during the evaluation"); diff --git a/tests/functional/flakes/check.sh b/tests/functional/flakes/check.sh index 55cd3805f..cb4e3eeba 100755 --- a/tests/functional/flakes/check.sh +++ b/tests/functional/flakes/check.sh @@ -192,3 +192,24 @@ EOF # shellcheck disable=SC2015 checkRes=$(nix flake check "$flakeDir" 2>&1 && fail "nix flake check should have failed" || true) echo "$checkRes" | grepQuiet -E "builder( for .*)? failed with exit code 1" + +# Test that attribute paths are shown in error messages +cat > "$flakeDir"/flake.nix <&1 && fail "nix flake check should have failed" || true) +echo "$checkRes" | grepQuiet "checks.${system}.failingCheck" +echo "$checkRes" | grepQuiet "checks.${system}.anotherFailingCheck" From d4fd5c222d294dcdc3f04d8384176bed07b61857 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 22 Oct 2025 01:03:31 +0200 Subject: [PATCH 013/213] Remove "(ignored)" from errors in nix flake check --keep-going --- src/nix/flake.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 998a36bcc..01f5ce120 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -367,7 +367,7 @@ struct CmdFlakeCheck : FlakeCommand throw; } catch (Error & e) { if (settings.keepGoing) { - ignoreExceptionExceptInterrupt(); + logError(e.info()); hasErrors = true; } else throw; From 32b286e5d6cadadd97b0f8112644671fbb7b54df Mon Sep 17 00:00:00 2001 From: Taeer Bar-Yam Date: Mon, 20 Oct 2025 21:47:25 +0200 Subject: [PATCH 014/213] libexpr: parser.y: api.value.type variant --- src/libexpr/eval.cc | 6 +- src/libexpr/include/nix/expr/nixexpr.hh | 6 +- src/libexpr/include/nix/expr/parser-state.hh | 16 +- src/libexpr/lexer.l | 34 ++-- src/libexpr/nixexpr.cc | 4 +- src/libexpr/parser.y | 158 ++++++++----------- 6 files changed, 99 insertions(+), 125 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 800c29fad..7a00f4ddf 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2050,10 +2050,10 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) }; // List of returned strings. References to these Values must NOT be persisted. - SmallTemporaryValueVector values(es->size()); + SmallTemporaryValueVector values(es.size()); Value * vTmpP = values.data(); - for (auto & [i_pos, i] : *es) { + for (auto & [i_pos, i] : es) { Value & vTmp = *vTmpP++; i->eval(state, env, vTmp); @@ -2097,7 +2097,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) .debugThrow(); } else { if (s.empty()) - s.reserve(es->size()); + s.reserve(es.size()); /* skip canonization of first path, which would only be not canonized in the first place if it's coming from a ./${foo} type path */ diff --git a/src/libexpr/include/nix/expr/nixexpr.hh b/src/libexpr/include/nix/expr/nixexpr.hh index 863a1369d..86ad01504 100644 --- a/src/libexpr/include/nix/expr/nixexpr.hh +++ b/src/libexpr/include/nix/expr/nixexpr.hh @@ -695,11 +695,11 @@ struct ExprConcatStrings : Expr { PosIdx pos; bool forceString; - std::vector> * es; - ExprConcatStrings(const PosIdx & pos, bool forceString, std::vector> * es) + std::vector> es; + ExprConcatStrings(const PosIdx & pos, bool forceString, std::vector> && es) : pos(pos) , forceString(forceString) - , es(es) {}; + , es(std::move(es)) {}; PosIdx getPos() const override { diff --git a/src/libexpr/include/nix/expr/parser-state.hh b/src/libexpr/include/nix/expr/parser-state.hh index 55dce3047..18d2051d0 100644 --- a/src/libexpr/include/nix/expr/parser-state.hh +++ b/src/libexpr/include/nix/expr/parser-state.hh @@ -282,7 +282,7 @@ ParserState::stripIndentation(const PosIdx pos, std::vector>; + std::vector> es2{}; atStartOfLine = true; size_t curDropped = 0; size_t n = es.size(); @@ -290,7 +290,7 @@ ParserState::stripIndentation(const PosIdx pos, std::vectoremplace_back(i->first, e); + es2.emplace_back(i->first, e); }; const auto trimString = [&](const StringToken & t) { std::string s2; @@ -324,7 +324,7 @@ ParserState::stripIndentation(const PosIdx pos, std::vectoremplace_back(i->first, new ExprString(alloc, s2)); + es2.emplace_back(i->first, new ExprString(alloc, s2)); } }; for (; i != es.end(); ++i, --n) { @@ -333,19 +333,17 @@ ParserState::stripIndentation(const PosIdx pos, std::vectorsize() == 0) { + if (es2.size() == 0) { auto * const result = new ExprString(""); - delete es2; return result; } /* If this is a single string, then don't do a concatenation. */ - if (es2->size() == 1 && dynamic_cast((*es2)[0].second)) { - auto * const result = (*es2)[0].second; - delete es2; + if (es2.size() == 1 && dynamic_cast((es2)[0].second)) { + auto * const result = (es2)[0].second; return result; } - return new ExprConcatStrings(pos, true, es2); + return new ExprConcatStrings(pos, true, std::move(es2)); } inline PosIdx LexerState::at(const ParserLocation & loc) diff --git a/src/libexpr/lexer.l b/src/libexpr/lexer.l index f420fc13f..74a9065a4 100644 --- a/src/libexpr/lexer.l +++ b/src/libexpr/lexer.l @@ -142,11 +142,11 @@ or { return OR_KW; } return PIPE_INTO; } -{ID} { yylval->id = {yytext, (size_t) yyleng}; return ID; } +{ID} { yylval->emplace(yytext, (size_t) yyleng); return ID; } {INT} { errno = 0; std::optional numMay = string2Int(yytext); if (numMay.has_value()) { - yylval->n = NixInt{*numMay}; + yylval->emplace(*numMay); } else { throw ParseError(ErrorInfo{ .msg = HintFmt("invalid integer '%1%'", yytext), @@ -156,7 +156,7 @@ or { return OR_KW; } return INT_LIT; } {FLOAT} { errno = 0; - yylval->nf = strtod(yytext, 0); + yylval->emplace(strtod(yytext, 0)); if (errno != 0) throw ParseError(ErrorInfo{ .msg = HintFmt("invalid float '%1%'", yytext), @@ -183,7 +183,7 @@ or { return OR_KW; } /* It is impossible to match strings ending with '$' with one regex because trailing contexts are only valid at the end of a rule. (A sane but undocumented limitation.) */ - yylval->str = unescapeStr(yytext, yyleng, [&]() { return state->positions[CUR_POS]; }); + yylval->emplace(unescapeStr(yytext, yyleng, [&]() { return state->positions[CUR_POS]; })); return STR; } \$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } @@ -198,27 +198,27 @@ or { return OR_KW; } \'\'(\ *\n)? { PUSH_STATE(IND_STRING); return IND_STRING_OPEN; } ([^\$\']|\$[^\{\']|\'[^\'\$])+ { - yylval->str = {yytext, (size_t) yyleng, true}; - forceNoNullByte(yylval->str, [&]() { return state->positions[CUR_POS]; }); + yylval->emplace(yytext, (size_t) yyleng, true); + forceNoNullByte(yylval->as(), [&]() { return state->positions[CUR_POS]; }); return IND_STR; } \'\'\$ | \$ { - yylval->str = {"$", 1}; + yylval->emplace("$", 1); return IND_STR; } \'\'\' { - yylval->str = {"''", 2}; + yylval->emplace("''", 2); return IND_STR; } \'\'\\{ANY} { - yylval->str = unescapeStr(yytext + 2, yyleng - 2, [&]() { return state->positions[CUR_POS]; }); + yylval->emplace(unescapeStr(yytext + 2, yyleng - 2, [&]() { return state->positions[CUR_POS]; })); return IND_STR; } \$\{ { PUSH_STATE(DEFAULT); return DOLLAR_CURLY; } \'\' { POP_STATE(); return IND_STRING_CLOSE; } \' { - yylval->str = {"'", 1}; + yylval->emplace("'", 1); return IND_STR; } @@ -232,14 +232,14 @@ or { return OR_KW; } {PATH_SEG} { POP_STATE(); PUSH_STATE(INPATH_SLASH); - yylval->path = {yytext, (size_t) yyleng}; + yylval->emplace(yytext, (size_t) yyleng); return PATH; } {HPATH_START} { POP_STATE(); PUSH_STATE(INPATH_SLASH); - yylval->path = {yytext, (size_t) yyleng}; + yylval->emplace(yytext, (size_t) yyleng); return HPATH; } @@ -248,7 +248,7 @@ or { return OR_KW; } PUSH_STATE(INPATH_SLASH); else PUSH_STATE(INPATH); - yylval->path = {yytext, (size_t) yyleng}; + yylval->emplace(yytext, (size_t) yyleng); return PATH; } {HPATH} { @@ -256,7 +256,7 @@ or { return OR_KW; } PUSH_STATE(INPATH_SLASH); else PUSH_STATE(INPATH); - yylval->path = {yytext, (size_t) yyleng}; + yylval->emplace(yytext, (size_t) yyleng); return HPATH; } @@ -272,7 +272,7 @@ or { return OR_KW; } PUSH_STATE(INPATH_SLASH); else PUSH_STATE(INPATH); - yylval->str = {yytext, (size_t) yyleng}; + yylval->emplace(yytext, (size_t) yyleng); return STR; } {ANY} | @@ -294,8 +294,8 @@ or { return OR_KW; } }); } -{SPATH} { yylval->path = {yytext, (size_t) yyleng}; return SPATH; } -{URI} { yylval->uri = {yytext, (size_t) yyleng}; return URI; } +{SPATH} { yylval->emplace(yytext, (size_t) yyleng); return SPATH; } +{URI} { yylval->emplace(yytext, (size_t) yyleng); return URI; } %{ // Doc comment rule diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index 5b9d17d49..a77e42356 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -246,7 +246,7 @@ void ExprConcatStrings::show(const SymbolTable & symbols, std::ostream & str) co { bool first = true; str << "("; - for (auto & i : *es) { + for (auto & i : es) { if (first) first = false; else @@ -564,7 +564,7 @@ void ExprConcatStrings::bindVars(EvalState & es, const std::shared_ptres) + for (auto & i : this->es) i.second->bindVars(es, env); } diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 9186fcf4b..93c944dcf 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -14,6 +14,10 @@ %code requires { +// bison adds a bunch of switch statements with default: +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wswitch-enum" + #ifndef BISON_HEADER #define BISON_HEADER @@ -120,46 +124,28 @@ static Expr * makeCall(PosIdx pos, Expr * fn, Expr * arg) { %} -%union { - // !!! We're probably leaking stuff here. - nix::Expr * e; - nix::ExprList * list; - nix::ExprAttrs * attrs; - nix::Formals * formals; - nix::Formal * formal; - nix::NixInt n; - nix::NixFloat nf; - nix::StringToken id; // !!! -> Symbol - nix::StringToken path; - nix::StringToken uri; - nix::StringToken str; - std::vector * attrNames; - std::vector> * inheritAttrs; - std::vector> * string_parts; - std::variant * to_be_string; - std::vector>> * ind_string_parts; -} +%define api.value.type variant -%type start expr expr_function expr_if expr_op -%type expr_select expr_simple expr_app -%type expr_pipe_from expr_pipe_into -%type expr_list -%type binds binds1 -%type formals formal_set -%type formal -%type attrpath -%type attrs -%type string_parts_interpolated -%type ind_string_parts -%type path_start -%type string_parts string_attr -%type attr -%token ID -%token STR IND_STR -%token INT_LIT -%token FLOAT_LIT -%token PATH HPATH SPATH PATH_END -%token URI +%type start expr expr_function expr_if expr_op +%type expr_select expr_simple expr_app +%type expr_pipe_from expr_pipe_into +%type expr_list +%type binds binds1 +%type formals formal_set +%type formal +%type > attrpath +%type >> attrs +%type >> string_parts_interpolated +%type >>> ind_string_parts +%type path_start +%type > string_parts string_attr +%type attr +%token ID +%token STR IND_STR +%token INT_LIT +%token FLOAT_LIT +%token PATH HPATH SPATH PATH_END +%token URI %token IF THEN ELSE ASSERT WITH LET IN_KW REC INHERIT EQ NEQ AND OR IMPL OR_KW %token PIPE_FROM PIPE_INTO /* <| and |> */ %token DOLLAR_CURLY /* == ${ */ @@ -261,9 +247,9 @@ expr_op | expr_op OR expr_op { $$ = new ExprOpOr(state->at(@2), $1, $3); } | expr_op IMPL expr_op { $$ = new ExprOpImpl(state->at(@2), $1, $3); } | expr_op UPDATE expr_op { $$ = new ExprOpUpdate(state->at(@2), $1, $3); } - | expr_op '?' attrpath { $$ = new ExprOpHasAttr(state->alloc, $1, std::move(*$3)); delete $3; } + | expr_op '?' attrpath { $$ = new ExprOpHasAttr(state->alloc, $1, std::move($3)); } | expr_op '+' expr_op - { $$ = new ExprConcatStrings(state->at(@2), false, new std::vector >({{state->at(@1), $1}, {state->at(@3), $3}})); } + { $$ = new ExprConcatStrings(state->at(@2), false, {{state->at(@1), $1}, {state->at(@3), $3}}); } | expr_op '-' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.sub), {$1, $3}); } | expr_op '*' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.mul), {$1, $3}); } | expr_op '/' expr_op { $$ = new ExprCall(state->at(@2), new ExprVar(state->s.div), {$1, $3}); } @@ -282,9 +268,9 @@ expr_app expr_select : expr_simple '.' attrpath - { $$ = new ExprSelect(state->alloc, CUR_POS, $1, std::move(*$3), nullptr); delete $3; } + { $$ = new ExprSelect(state->alloc, CUR_POS, $1, std::move($3), nullptr); } | expr_simple '.' attrpath OR_KW expr_select - { $$ = new ExprSelect(state->alloc, CUR_POS, $1, std::move(*$3), $5); delete $3; $5->warnIfCursedOr(state->symbols, state->positions); } + { $$ = new ExprSelect(state->alloc, CUR_POS, $1, std::move($3), $5); $5->warnIfCursedOr(state->symbols, state->positions); } | /* Backwards compatibility: because Nixpkgs has a function named ‘or’, allow stuff like ‘map or [...]’. This production is problematic (see https://github.com/NixOS/nix/issues/11118) and will be refactored in the @@ -311,17 +297,15 @@ expr_simple std::visit(overloaded{ [&](std::string_view str) { $$ = new ExprString(state->alloc, str); }, [&](Expr * expr) { $$ = expr; }}, - *$2); - delete $2; + $2); } | IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE { - $$ = state->stripIndentation(CUR_POS, std::move(*$2)); - delete $2; + $$ = state->stripIndentation(CUR_POS, std::move($2)); } | path_start PATH_END | path_start string_parts_interpolated PATH_END { - $2->insert($2->begin(), {state->at(@1), $1}); - $$ = new ExprConcatStrings(CUR_POS, false, $2); + $2.insert($2.begin(), {state->at(@1), $1}); + $$ = new ExprConcatStrings(CUR_POS, false, std::move($2)); } | SPATH { std::string_view path($1.p + 1, $1.l - 2); @@ -354,20 +338,19 @@ expr_simple ; string_parts - : STR { $$ = new std::variant($1); } - | string_parts_interpolated { $$ = new std::variant(new ExprConcatStrings(CUR_POS, true, $1)); } - | { $$ = new std::variant(std::string_view()); } + : STR { $$ = $1; } + | string_parts_interpolated { $$ = new ExprConcatStrings(CUR_POS, true, std::move($1)); } + | { $$ = std::string_view(); } ; string_parts_interpolated : string_parts_interpolated STR - { $$ = $1; $1->emplace_back(state->at(@2), new ExprString(state->alloc, $2)); } - | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); } - | DOLLAR_CURLY expr '}' { $$ = new std::vector>; $$->emplace_back(state->at(@1), $2); } + { $$ = $1; $$.emplace_back(state->at(@2), new ExprString(state->alloc, $2)); } + | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $$.emplace_back(state->at(@2), $3); } + | DOLLAR_CURLY expr '}' { $$.emplace_back(state->at(@1), $2); } | STR DOLLAR_CURLY expr '}' { - $$ = new std::vector>; - $$->emplace_back(state->at(@1), new ExprString(state->alloc, $1)); - $$->emplace_back(state->at(@2), $3); + $$.emplace_back(state->at(@1), new ExprString(state->alloc, $1)); + $$.emplace_back(state->at(@2), $3); } ; @@ -408,9 +391,9 @@ path_start ; ind_string_parts - : ind_string_parts IND_STR { $$ = $1; $1->emplace_back(state->at(@2), $2); } - | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $1->emplace_back(state->at(@2), $3); } - | { $$ = new std::vector>>; } + : ind_string_parts IND_STR { $$ = $1; $$.emplace_back(state->at(@2), $2); } + | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $$.emplace_back(state->at(@2), $3); } + | { } ; binds @@ -421,19 +404,17 @@ binds binds1 : binds1[accum] attrpath '=' expr ';' { $$ = $accum; - state->addAttr($$, std::move(*$attrpath), @attrpath, $expr, @expr); - delete $attrpath; + state->addAttr($$, std::move($attrpath), @attrpath, $expr, @expr); } | binds[accum] INHERIT attrs ';' { $$ = $accum; - for (auto & [i, iPos] : *$attrs) { + for (auto & [i, iPos] : $attrs) { if ($accum->attrs.find(i.symbol) != $accum->attrs.end()) state->dupAttr(i.symbol, iPos, $accum->attrs[i.symbol].pos); $accum->attrs.emplace( i.symbol, ExprAttrs::AttrDef(new ExprVar(iPos, i.symbol), iPos, ExprAttrs::AttrDef::Kind::Inherited)); } - delete $attrs; } | binds[accum] INHERIT '(' expr ')' attrs ';' { $$ = $accum; @@ -441,7 +422,7 @@ binds1 $accum->inheritFromExprs = std::make_unique>(); $accum->inheritFromExprs->push_back($expr); auto from = new nix::ExprInheritFrom(state->at(@expr), $accum->inheritFromExprs->size() - 1); - for (auto & [i, iPos] : *$attrs) { + for (auto & [i, iPos] : $attrs) { if ($accum->attrs.find(i.symbol) != $accum->attrs.end()) state->dupAttr(i.symbol, iPos, $accum->attrs[i.symbol].pos); $accum->attrs.emplace( @@ -451,51 +432,45 @@ binds1 iPos, ExprAttrs::AttrDef::Kind::InheritedFrom)); } - delete $attrs; } | attrpath '=' expr ';' { $$ = new ExprAttrs; - state->addAttr($$, std::move(*$attrpath), @attrpath, $expr, @expr); - delete $attrpath; + state->addAttr($$, std::move($attrpath), @attrpath, $expr, @expr); } ; attrs - : attrs attr { $$ = $1; $1->emplace_back(AttrName(state->symbols.create($2)), state->at(@2)); } + : attrs attr { $$ = $1; $$.emplace_back(AttrName(state->symbols.create($2)), state->at(@2)); } | attrs string_attr { $$ = $1; std::visit(overloaded { - [&](std::string_view str) { $$->emplace_back(AttrName(state->symbols.create(str)), state->at(@2)); }, + [&](std::string_view str) { $$.emplace_back(AttrName(state->symbols.create(str)), state->at(@2)); }, [&](Expr * expr) { throw ParseError({ .msg = HintFmt("dynamic attributes not allowed in inherit"), .pos = state->positions[state->at(@2)] }); } - }, *$2); - delete $2; + }, $2); } - | { $$ = new std::vector>; } + | { } ; attrpath - : attrpath '.' attr { $$ = $1; $1->push_back(AttrName(state->symbols.create($3))); } + : attrpath '.' attr { $$ = $1; $$.push_back(AttrName(state->symbols.create($3))); } | attrpath '.' string_attr { $$ = $1; std::visit(overloaded { - [&](std::string_view str) { $$->push_back(AttrName(state->symbols.create(str))); }, - [&](Expr * expr) { $$->push_back(AttrName(expr)); } - }, *$3); - delete $3; + [&](std::string_view str) { $$.push_back(AttrName(state->symbols.create(str))); }, + [&](Expr * expr) { $$.push_back(AttrName(expr)); } + }, $3); } - | attr { $$ = new std::vector; $$->push_back(AttrName(state->symbols.create($1))); } + | attr { $$.push_back(AttrName(state->symbols.create($1))); } | string_attr - { $$ = new std::vector; - std::visit(overloaded { - [&](std::string_view str) { $$->push_back(AttrName(state->symbols.create(str))); }, - [&](Expr * expr) { $$->push_back(AttrName(expr)); } - }, *$1); - delete $1; + { std::visit(overloaded { + [&](std::string_view str) { $$.push_back(AttrName(state->symbols.create(str))); }, + [&](Expr * expr) { $$.push_back(AttrName(expr)); } + }, $1); } ; @@ -506,7 +481,7 @@ attr string_attr : '"' string_parts '"' { $$ = $2; } - | DOLLAR_CURLY expr '}' { $$ = new std::variant($2); } + | DOLLAR_CURLY expr '}' { $$ = $2; } ; expr_list @@ -524,14 +499,14 @@ formal_set formals : formals[accum] ',' formal - { $$ = $accum; $$->formals.emplace_back(*$formal); delete $formal; } + { $$ = $accum; $$->formals.emplace_back(std::move($formal)); } | formal - { $$ = new Formals; $$->formals.emplace_back(*$formal); delete $formal; } + { $$ = new Formals; $$->formals.emplace_back(std::move($formal)); } ; formal - : ID { $$ = new Formal{CUR_POS, state->symbols.create($1), 0}; } - | ID '?' expr { $$ = new Formal{CUR_POS, state->symbols.create($1), $3}; } + : ID { $$ = Formal{CUR_POS, state->symbols.create($1), 0}; } + | ID '?' expr { $$ = Formal{CUR_POS, state->symbols.create($1), $3}; } ; %% @@ -582,3 +557,4 @@ Expr * parseExprFromBuf( } +#pragma GCC diagnostic pop // end ignored "-Wswitch-enum" From 96c8cc550f23be83719e141b5f63f91964e0c824 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Wed, 22 Oct 2025 02:53:51 +0300 Subject: [PATCH 015/213] libexpr/meson: Rice the compiler inlining heuristics to improve perf of the bison generated parser MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Turns out both GCC and Clang need a bit of hand-holding to optimize the bison generated code well, otherwise parser performance tanks. (Comparisons against baseline in 7e8db2eb59d8798047e8cc025a3eb18613a8918c): For GCC: Benchmark 1 (15 runs): result/bin/nix-instantiate --parse ../nixpkgs/pkgs/development/haskell-modules/hackage-packages.nix measurement mean ± σ min … max outliers delta wall_time 335ms ± 2.89ms 332ms … 342ms 0 ( 0%) 0% Benchmark 2 (16 runs): result-old/bin/nix-instantiate --parse ../nixpkgs/pkgs/development/haskell-modules/hackage-packages.nix measurement mean ± σ min … max outliers delta wall_time 330ms ± 2.87ms 326ms … 337ms 0 ( 0%) - 1.4% ± 0.6% For Clang: Benchmark 1 (15 runs): result-clang/bin/nix-instantiate --parse ../nixpkgs/pkgs/development/haskell-modules/hackage-packages.nix measurement mean ± σ min … max outliers delta wall_time 340ms ± 1.43ms 338ms … 343ms 0 ( 0%) 0% Benchmark 2 (15 runs): result-old-clang/bin/nix-instantiate --parse ../nixpkgs/pkgs/development/haskell-modules/hackage-packages.nix measurement mean ± σ min … max outliers delta wall_time 334ms ± 1.61ms 332ms … 338ms 0 ( 0%) ⚡- 1.7% ± 0.3% --- src/libexpr/meson.build | 49 +++++++++++++++++++++++++++++++++++++++-- 1 file changed, 47 insertions(+), 2 deletions(-) diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index 1314ab65b..e40d27722 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -183,17 +183,62 @@ subdir('primops') subdir('nix-meson-build-support/export-all-symbols') subdir('nix-meson-build-support/windows-version') +# Turns out that Bison/Flex are particularly sensitive to compilers +# failing to inline functions. For that reason we crank up the inlining +# threshold manually for optimized builds. Yes, this can be considered 'ricing' +# the compiler, but it does pay off. +# +# NOTE: missed inlining can be spotted (for Clang) using -Rpass-missed=inline +# and -fdump-ipa-inline-missed (for GCC). +parser_library_cpp_args = [] + +if not get_option('debug') + if cxx.get_id() == 'clang' + # The default as of LLVM 21 is 225: + # llc --help-hidden | grep inline-threshold + parser_library_cpp_args += [ + '-mllvm', + '-inline-threshold=5000', + ] + elif cxx.get_id() == 'gcc' + parser_library_cpp_args += [ + '--param=max-inline-insns-single=1000', + '--param=max-inline-insns-auto=1000', + '--param=inline-unit-growth=400', + ] + endif +endif + +# Working around https://github.com/mesonbuild/meson/issues/1367. +parser_library = static_library( + 'nixexpr-parser', + parser_tab, + lexer_tab, + cpp_args : parser_library_cpp_args, + dependencies : deps_public + deps_private + deps_other, + include_directories : include_dirs, + # 1. Stdlib and regular assertions regress parser performance significantly, so build without + # them for this one library when building in a release configuration. + # 2. Disable LTO for GCC because then inlining flags won't apply, since LTO in GCC is done + # by plonking down GIMPLE in the archive. + override_options : [ + 'b_ndebug=@0@'.format(not get_option('debug')), + 'b_lto=@0@'.format(cxx.get_id() != 'gcc'), + ], +) + this_library = library( 'nixexpr', sources, config_priv_h, - parser_tab, - lexer_tab, + parser_tab[1], + lexer_tab[1], generated_headers, soversion : nix_soversion, dependencies : deps_public + deps_private + deps_other, include_directories : include_dirs, link_args : linker_export_flags, + link_whole : [ parser_library ], prelink : true, # For C++ static initializers install : true, cpp_pch : do_pch ? [ 'pch/precompiled-headers.hh' ] : [], From 387eceff45c4cb5a58fe6b4b20a65613d82a290d Mon Sep 17 00:00:00 2001 From: adeci Date: Wed, 22 Oct 2025 13:53:31 -0400 Subject: [PATCH 016/213] fetchers: Add helpful hint for file+git URL scheme error At least one user has probably used `file+git://` when they mean `git+file://`, maybe thinking of it as "a file-based git repository". This adds a specific error message to hint at the correct URL scheme format and may save some users from resorting to `path:///` and copying an entire repo. --- src/libfetchers/fetchers.cc | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index 7c741a7a3..324e8884c 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -5,6 +5,7 @@ #include "nix/util/json-utils.hh" #include "nix/fetchers/fetch-settings.hh" #include "nix/fetchers/fetch-to-store.hh" +#include "nix/util/url.hh" #include @@ -65,6 +66,12 @@ Input Input::fromURL(const Settings & settings, const ParsedURL & url, bool requ } } + // Provide a helpful hint when user tries file+git instead of git+file + auto parsedScheme = parseUrlScheme(url.scheme); + if (parsedScheme.application == "file" && parsedScheme.transport == "git") { + throw Error("input '%s' is unsupported; did you mean 'git+file' instead of 'file+git'?", url); + } + throw Error("input '%s' is unsupported", url); } From 459f9e01851cadb7f642cbce473ed7dff215f314 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Domen=20Ko=C5=BEar?= Date: Wed, 22 Oct 2025 13:38:05 -0500 Subject: [PATCH 017/213] Fix misleading error messages for missing NARs due to stale cache MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When Nix's SQLite narinfo cache indicates a NAR exists, but the NAR has been garbage collected from the binary cache, Nix displays error messages even though the operation succeeds via fallback. This is misleading because the cached narinfo is simply outdated. This changes SubstituteGone exceptions to produce warnings instead of errors, accurately reflecting that this is an expected cache coherency issue, not an actual failure. Fixes #11411 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- src/libstore/build/substitution-goal.cc | 8 +++++--- tests/functional/binary-cache.sh | 8 +++++++- 2 files changed, 12 insertions(+), 4 deletions(-) diff --git a/src/libstore/build/substitution-goal.cc b/src/libstore/build/substitution-goal.cc index d16e530a4..ac18de304 100644 --- a/src/libstore/build/substitution-goal.cc +++ b/src/libstore/build/substitution-goal.cc @@ -268,16 +268,18 @@ Goal::Co PathSubstitutionGoal::tryToRun( try { promise.get_future().get(); } catch (std::exception & e) { - printError(e.what()); - /* Cause the parent build to fail unless --fallback is given, or the substitute has disappeared. The latter case behaves the same as the substitute never having existed in the first place. */ try { throw; - } catch (SubstituteGone &) { + } catch (SubstituteGone & sg) { + /* Missing NARs are expected when they've been garbage collected. + This is not a failure, so log as a warning instead of an error. */ + logWarning({.msg = sg.info().msg}); } catch (...) { + printError(e.what()); substituterFailed = true; } diff --git a/tests/functional/binary-cache.sh b/tests/functional/binary-cache.sh index 2c102df07..d801ac6aa 100755 --- a/tests/functional/binary-cache.sh +++ b/tests/functional/binary-cache.sh @@ -111,7 +111,13 @@ clearStore mv "$cacheDir/nar" "$cacheDir/nar2" -nix-build --substituters "file://$cacheDir" --no-require-sigs dependencies.nix -o "$TEST_ROOT/result" +nix-build --substituters "file://$cacheDir" --no-require-sigs dependencies.nix -o "$TEST_ROOT/result" 2>&1 | tee "$TEST_ROOT/log" + +# Verify that missing NARs produce warnings, not errors +# The build should succeed despite the warnings +grepQuiet "does not exist in binary cache" "$TEST_ROOT/log" +# Ensure the message is not at error level by checking that the command succeeded +[ -e "$TEST_ROOT/result" ] mv "$cacheDir/nar2" "$cacheDir/nar" From 350d60283281a0dba5c3aa4753983754de74ecc9 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 23 Oct 2025 01:49:31 +0300 Subject: [PATCH 018/213] meson: Only enable b_lto for nixexpr-parser when b_lto is enabled globally --- src/libexpr/meson.build | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/meson.build b/src/libexpr/meson.build index e40d27722..18c4c7fa3 100644 --- a/src/libexpr/meson.build +++ b/src/libexpr/meson.build @@ -223,7 +223,7 @@ parser_library = static_library( # by plonking down GIMPLE in the archive. override_options : [ 'b_ndebug=@0@'.format(not get_option('debug')), - 'b_lto=@0@'.format(cxx.get_id() != 'gcc'), + 'b_lto=@0@'.format(get_option('b_lto') and cxx.get_id() != 'gcc'), ], ) From ad5c6a53b91b6d9b0165e0cf09d9397dfd4657d7 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 23 Oct 2025 02:01:14 +0300 Subject: [PATCH 019/213] ci: Move magic-nix-cache-action into install-nix-action composite This reduces duplication and pins the underlying version of magic-nix-cache, as we already do with other actions. --- .github/actions/install-nix-action/action.yaml | 11 +++++++++++ .github/workflows/ci.yml | 6 +----- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/.github/actions/install-nix-action/action.yaml b/.github/actions/install-nix-action/action.yaml index 46abea179..d694b8eae 100644 --- a/.github/actions/install-nix-action/action.yaml +++ b/.github/actions/install-nix-action/action.yaml @@ -23,6 +23,10 @@ inputs: github_token: description: "Github token" required: true + use_cache: + description: "Whether to setup magic-nix-cache" + default: true + required: false runs: using: "composite" steps: @@ -118,3 +122,10 @@ runs: source-url: ${{ inputs.experimental-installer-version != 'latest' && 'https://artifacts.nixos.org/experimental-installer/tag/${{ inputs.experimental-installer-version }}/${{ env.EXPERIMENTAL_INSTALLER_ARTIFACT }}' || '' }} nix-package-url: ${{ inputs.dogfood == 'true' && steps.download-nix-installer.outputs.tarball-path || (inputs.tarball_url || '') }} extra-conf: ${{ inputs.extra_nix_config }} + - uses: DeterminateSystems/magic-nix-cache-action@565684385bcd71bad329742eefe8d12f2e765b39 # v13 + if: ${{ inputs.use_cache == 'true' }} + with: + diagnostic-endpoint: '' + use-flakehub: false + use-gha-cache: true + source-revision: c2f46a0afa5f95fd4c184a533afd280c68cf63ff # v0.1.6 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1edfcf167..5766ba3c1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,6 +29,7 @@ jobs: extra_nix_config: experimental-features = nix-command flakes github_token: ${{ secrets.GITHUB_TOKEN }} + use_cache: false - run: nix flake show --all-systems --json pre-commit-checks: @@ -41,7 +42,6 @@ jobs: dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }} extra_nix_config: experimental-features = nix-command flakes github_token: ${{ secrets.GITHUB_TOKEN }} - - uses: DeterminateSystems/magic-nix-cache-action@main - run: ./ci/gha/tests/pre-commit-checks basic-checks: @@ -92,7 +92,6 @@ jobs: dogfood: ${{ github.event_name == 'workflow_dispatch' && inputs.dogfood || github.event_name != 'workflow_dispatch' }} # The sandbox would otherwise be disabled by default on Darwin extra_nix_config: "sandbox = true" - - uses: DeterminateSystems/magic-nix-cache-action@main # Since ubuntu 22.30, unprivileged usernamespaces are no longer allowed to map to the root user: # https://ubuntu.com/blog/ubuntu-23-10-restricted-unprivileged-user-namespaces - run: sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0 @@ -230,7 +229,6 @@ jobs: - uses: cachix/install-nix-action@v31 with: install_url: https://releases.nixos.org/nix/nix-2.20.3/install - - uses: DeterminateSystems/magic-nix-cache-action@main - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#nix.version | tr -d \")" >> $GITHUB_ENV - run: nix --experimental-features 'nix-command flakes' build .#dockerImage -L - run: docker load -i ./result/image.tar.gz @@ -289,7 +287,6 @@ jobs: extra_nix_config: experimental-features = nix-command flakes github_token: ${{ secrets.GITHUB_TOKEN }} - - uses: DeterminateSystems/magic-nix-cache-action@main - run: nix build -L --out-link ./new-nix && PATH=$(pwd)/new-nix/bin:$PATH MAX_FLAKES=25 flake-regressions/eval-all.sh profile_build: @@ -310,7 +307,6 @@ jobs: extra_nix_config: | experimental-features = flakes nix-command ca-derivations impure-derivations max-jobs = 1 - - uses: DeterminateSystems/magic-nix-cache-action@main - run: | nix build -L --file ./ci/gha/profile-build buildTimeReport --out-link build-time-report.md cat build-time-report.md >> $GITHUB_STEP_SUMMARY From c8a15bf70d8ec73b11440a6272e20bc23ce10bb3 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 23 Oct 2025 02:03:23 +0300 Subject: [PATCH 020/213] ci: Pin cachix action --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5766ba3c1..48fdf09f1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -170,7 +170,7 @@ jobs: echo "installer-url=file://$GITHUB_WORKSPACE/out" >> "$GITHUB_OUTPUT" TARBALL_PATH="$(find "$GITHUB_WORKSPACE/out" -name 'nix*.tar.xz' -print | head -n 1)" echo "tarball-path=file://$TARBALL_PATH" >> "$GITHUB_OUTPUT" - - uses: cachix/install-nix-action@v31 + - uses: cachix/install-nix-action@c134e4c9e34bac6cab09cf239815f9339aaaf84e # v31.5.1 if: ${{ !matrix.experimental-installer }} with: install_url: ${{ format('{0}/install', steps.installer-tarball-url.outputs.installer-url) }} From f3d8d1f719156ad27a03d2b8211e4295d6f778bf Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 23 Oct 2025 02:06:34 +0300 Subject: [PATCH 021/213] ci: Reuse composite install-nix-action for docker_push_image job --- .github/workflows/ci.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 48fdf09f1..8a0820903 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -226,11 +226,13 @@ jobs: - uses: actions/checkout@v5 with: fetch-depth: 0 - - uses: cachix/install-nix-action@v31 + - uses: ./.github/actions/install-nix-action with: - install_url: https://releases.nixos.org/nix/nix-2.20.3/install - - run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#nix.version | tr -d \")" >> $GITHUB_ENV - - run: nix --experimental-features 'nix-command flakes' build .#dockerImage -L + dogfood: false + extra_nix_config: | + experimental-features = flakes nix-command + - run: echo NIX_VERSION="$(nix eval .\#nix.version | tr -d \")" >> $GITHUB_ENV + - run: nix build .#dockerImage -L - run: docker load -i ./result/image.tar.gz - run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:$NIX_VERSION - run: docker tag nix:$NIX_VERSION ${{ secrets.DOCKERHUB_USERNAME }}/nix:master From 3c83856494e482e31959039fb328557d601fca77 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 23 Oct 2025 02:08:54 +0300 Subject: [PATCH 022/213] ci: Update pinned install_url 2.30.2 -> 2.32.1 --- .github/actions/install-nix-action/action.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/install-nix-action/action.yaml b/.github/actions/install-nix-action/action.yaml index d694b8eae..3f668864b 100644 --- a/.github/actions/install-nix-action/action.yaml +++ b/.github/actions/install-nix-action/action.yaml @@ -16,7 +16,7 @@ inputs: install_url: description: "URL of the Nix installer" required: false - default: "https://releases.nixos.org/nix/nix-2.30.2/install" + default: "https://releases.nixos.org/nix/nix-2.32.1/install" tarball_url: description: "URL of the Nix tarball to use with the experimental installer" required: false From 953929f8990b6ca639b65d9f105c823c770bf7c1 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Wed, 22 Oct 2025 20:08:33 +0000 Subject: [PATCH 023/213] fix(libstore): use CURLOPT_POSTFIELDSIZE_LARGE for POST requests Fix POST requests with data to use the correct curl option for specifying body size. Previously used CURLOPT_INFILESIZE_LARGE for both POST and PUT, but POST requires CURLOPT_POSTFIELDSIZE_LARGE. This caused POST request bodies to not be sent correctly, manifesting as S3 multipart CompleteMultipartUpload requests failing with "You must specify at least one part" even though the XML body contained valid parts. --- src/libstore/filetransfer.cc | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 201f2984e..68c8a9e3a 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -388,13 +388,15 @@ struct curlFileTransfer : public FileTransfer curl_easy_setopt(req, CURLOPT_NOBODY, 1); if (request.data) { - if (request.post) + if (request.post) { curl_easy_setopt(req, CURLOPT_POST, 1L); - else + curl_easy_setopt(req, CURLOPT_POSTFIELDSIZE_LARGE, (curl_off_t) request.data->length()); + } else { curl_easy_setopt(req, CURLOPT_UPLOAD, 1L); + curl_easy_setopt(req, CURLOPT_INFILESIZE_LARGE, (curl_off_t) request.data->length()); + } curl_easy_setopt(req, CURLOPT_READFUNCTION, readCallbackWrapper); curl_easy_setopt(req, CURLOPT_READDATA, this); - curl_easy_setopt(req, CURLOPT_INFILESIZE_LARGE, (curl_off_t) request.data->length()); curl_easy_setopt(req, CURLOPT_SEEKFUNCTION, seekCallbackWrapper); curl_easy_setopt(req, CURLOPT_SEEKDATA, this); } From b047cecf5c9d1cd7b9ad6c1b9e69c66f34f3c603 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Tue, 21 Oct 2025 08:32:32 +0000 Subject: [PATCH 024/213] refactor(libstore): extract getCompressionMethod() in HttpBinaryCacheStore Extract the path-based compression method determination logic into a protected method that returns std::optional. This allows subclasses to reuse the logic and makes the semantics clearer (nullopt means no compression, not empty string). This prepares for S3BinaryCacheStore to apply the same compression rules when implementing multipart uploads. --- src/libstore/http-binary-cache-store.cc | 28 +++++++++++++------------ 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 8d5f427af..9567aec2f 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -97,6 +97,18 @@ public: protected: + std::optional getCompressionMethod(const std::string & path) + { + if (hasSuffix(path, ".narinfo") && !config->narinfoCompression.get().empty()) + return config->narinfoCompression; + else if (hasSuffix(path, ".ls") && !config->lsCompression.get().empty()) + return config->lsCompression; + else if (hasPrefix(path, "log/") && !config->logCompression.get().empty()) + return config->logCompression; + else + return std::nullopt; + } + void maybeDisable() { auto state(_state.lock()); @@ -149,19 +161,9 @@ protected: auto data = StreamToSourceAdapter(istream).drain(); - // Determine compression method based on file type - std::string compressionMethod; - if (hasSuffix(path, ".narinfo")) - compressionMethod = config->narinfoCompression; - else if (hasSuffix(path, ".ls")) - compressionMethod = config->lsCompression; - else if (hasPrefix(path, "log/")) - compressionMethod = config->logCompression; - - // Apply compression if configured - if (!compressionMethod.empty()) { - data = compress(compressionMethod, data); - req.headers.emplace_back("Content-Encoding", compressionMethod); + if (auto compressionMethod = getCompressionMethod(path)) { + data = compress(*compressionMethod, data); + req.headers.emplace_back("Content-Encoding", *compressionMethod); } req.data = std::move(data); From 78888ec8a8cf8194c41adc334a0dee4db4f8d999 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Thu, 23 Oct 2025 06:02:58 +0000 Subject: [PATCH 025/213] docs: add s3:ListBucket to S3 read permissions The s3:ListBucket permission is required for read operations on S3 binary caches, not just for writes. Without this permission, users get "Access Denied" errors when running nix-build. --- src/libstore/s3-binary-cache-store.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.md b/src/libstore/s3-binary-cache-store.md index daa41defd..0b0c26919 100644 --- a/src/libstore/s3-binary-cache-store.md +++ b/src/libstore/s3-binary-cache-store.md @@ -27,7 +27,8 @@ like the following to be accessible: "Sid": "AllowDirectReads", "Action": [ "s3:GetObject", - "s3:GetBucketLocation" + "s3:GetBucketLocation", + "s3:ListBucket" ], "Effect": "Allow", "Resource": [ @@ -51,7 +52,7 @@ Consult the documentation linked above for further details. ### Authenticated reads to your S3 binary cache -Your bucket will need a bucket policy allowing the desired users to perform the `s3:GetObject` and `s3:GetBucketLocation` action on all objects in the bucket. +Your bucket will need a bucket policy allowing the desired users to perform the `s3:GetObject`, `s3:GetBucketLocation`, and `s3:ListBucket` actions on all objects in the bucket. The [anonymous policy given above](#anonymous-reads-to-your-s3-compatible-binary-cache) can be updated to have a restricted `Principal` to support this. ### Authenticated writes to your S3-compatible binary cache From f594a8e11e06b9ed6b2d62efbf74e964e9c1848c Mon Sep 17 00:00:00 2001 From: Jens Petersen Date: Fri, 24 Oct 2025 01:24:04 +0800 Subject: [PATCH 026/213] libexpr needs boost-1.87+ for try_emplace_and_cvisit MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Since 2.32, nix now needs boost 1.87 or later to build, due to using unordered::concurrent_flat_map try_emplace_and_cvisit ../src/libexpr/eval.cc: In member function ‘void nix::EvalState::evalFile(const nix::SourcePath&, nix::Value&, bool)’: ../src/libexpr/eval.cc:1096:20: error: ‘class boost::unordered::concurrent_flat_map, std::equal_to, traceable_allocator > >’ has no member named ‘try_emplace_and_cvisit’; did you mean ‘try_emplace_or_cvisit’? 1096 | fileEvalCache->try_emplace_and_cvisit( | ^~~~~~~~~~~~~~~~~~~~~~ | try_emplace_or_cvisit See https://github.com/boostorg/unordered/commit/834580b53948eec553c232dda40beefc68b3e8f9 --- src/libutil/meson.build | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libutil/meson.build b/src/libutil/meson.build index acba0b81b..8b7a5d977 100644 --- a/src/libutil/meson.build +++ b/src/libutil/meson.build @@ -64,7 +64,7 @@ boost = dependency( 'url', ], include_type : 'system', - version : '>=1.82.0', + version : '>=1.87.0', ) # boost is a public dependency, but not a pkg-config dependency unfortunately, so we # put in `deps_other`. From c87f29a0b6c575f914b9a687e04f2727ccb5b9b2 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 23 Oct 2025 14:03:21 -0400 Subject: [PATCH 027/213] Fix some characterization tests A few changes had cropped up with `_NIX_TEST_ACCEPT=1`: 1. Blake hashing test JSON had a different indentation 2. Store URI had improper non-quoted spaces (1) was is just fixed, as we trust nlohmann JSON to parse JSON correctly, regardless of whitespace. For (2), the existing URL was made a read-only test, since we very much wish to continue parsing such invalid URLs directly. And then the original read/write test was updated to properly percent-encode the space, as the normal form should be. --- src/libstore-tests/data/store-reference/local_3.txt | 2 +- .../data/store-reference/local_3_no_percent.txt | 1 + src/libstore-tests/store-reference.cc | 5 ++++- src/libutil-tests/data/hash/blake3-base64.json | 6 +++--- src/libutil-tests/data/hash/sha256-base64.json | 6 +++--- 5 files changed, 12 insertions(+), 8 deletions(-) create mode 100644 src/libstore-tests/data/store-reference/local_3_no_percent.txt diff --git a/src/libstore-tests/data/store-reference/local_3.txt b/src/libstore-tests/data/store-reference/local_3.txt index 2a67a3426..cd015d74f 100644 --- a/src/libstore-tests/data/store-reference/local_3.txt +++ b/src/libstore-tests/data/store-reference/local_3.txt @@ -1 +1 @@ -local://?root=/foo bar/baz \ No newline at end of file +local://?root=/foo%20bar/baz \ No newline at end of file diff --git a/src/libstore-tests/data/store-reference/local_3_no_percent.txt b/src/libstore-tests/data/store-reference/local_3_no_percent.txt new file mode 100644 index 000000000..2a67a3426 --- /dev/null +++ b/src/libstore-tests/data/store-reference/local_3_no_percent.txt @@ -0,0 +1 @@ +local://?root=/foo bar/baz \ No newline at end of file diff --git a/src/libstore-tests/store-reference.cc b/src/libstore-tests/store-reference.cc index 7b42b45a2..7ae944348 100644 --- a/src/libstore-tests/store-reference.cc +++ b/src/libstore-tests/store-reference.cc @@ -100,9 +100,12 @@ URI_TEST(local_1, localExample_1) URI_TEST(local_2, localExample_2) -/* Test path with spaces */ +/* Test path with encoded spaces */ URI_TEST(local_3, localExample_3) +/* Test path with spaces that are improperly not encoded */ +URI_TEST_READ(local_3_no_percent, localExample_3) + URI_TEST_READ(local_shorthand_1, localExample_1) URI_TEST_READ(local_shorthand_2, localExample_2) diff --git a/src/libutil-tests/data/hash/blake3-base64.json b/src/libutil-tests/data/hash/blake3-base64.json index d668c2d9c..b9a20cdb4 100644 --- a/src/libutil-tests/data/hash/blake3-base64.json +++ b/src/libutil-tests/data/hash/blake3-base64.json @@ -1,5 +1,5 @@ { - "algorithm": "blake3", - "format": "base64", - "hash": "nnDuFEmWX7YtBJBAoe0G7Dd0MNpuwTFz58T//NKL6YA=" + "algorithm": "blake3", + "format": "base64", + "hash": "nnDuFEmWX7YtBJBAoe0G7Dd0MNpuwTFz58T//NKL6YA=" } diff --git a/src/libutil-tests/data/hash/sha256-base64.json b/src/libutil-tests/data/hash/sha256-base64.json index 239764dd1..838af80a7 100644 --- a/src/libutil-tests/data/hash/sha256-base64.json +++ b/src/libutil-tests/data/hash/sha256-base64.json @@ -1,5 +1,5 @@ { - "algorithm": "sha256", - "format": "base64", - "hash": "8OTC92xYkW7CWPJGhRvqCR0U1CR6L8PhhpRGGxgW4Ts=" + "algorithm": "sha256", + "format": "base64", + "hash": "8OTC92xYkW7CWPJGhRvqCR0U1CR6L8PhhpRGGxgW4Ts=" } From d6f1e2de21b090ada8f658751ec1c528f13316bc Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 23 Oct 2025 21:17:09 +0200 Subject: [PATCH 028/213] Merge pull request #14323 from NixOS/skip-nar-parse addToStore(): Don't parse the NAR * StringSource: Implement skip() This is slightly faster than doing a read() into a buffer just to discard the data. * LocalStore::addToStore(): Skip unnecessary NARs rather than parsing them Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com> --- src/libstore/local-store.cc | 6 ++---- src/libutil/include/nix/util/serialise.hh | 2 ++ src/libutil/serialise.cc | 10 ++++++++++ 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 59d5cc24f..3f108f97e 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1048,15 +1048,13 @@ void LocalStore::addToStore(const ValidPathInfo & info, Source & source, RepairF /* In case we are not interested in reading the NAR: discard it. */ bool narRead = false; Finally cleanup = [&]() { - if (!narRead) { - NullFileSystemObjectSink sink; + if (!narRead) try { - parseDump(sink, source); + source.skip(info.narSize); } catch (...) { // TODO: should Interrupted be handled here? ignoreExceptionInDestructor(); } - } }; addTempRoot(info.path); diff --git a/src/libutil/include/nix/util/serialise.hh b/src/libutil/include/nix/util/serialise.hh index 8799e128f..d6845a494 100644 --- a/src/libutil/include/nix/util/serialise.hh +++ b/src/libutil/include/nix/util/serialise.hh @@ -255,6 +255,8 @@ struct StringSource : Source } size_t read(char * data, size_t len) override; + + void skip(size_t len) override; }; /** diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc index 47a00c8d6..ba153625e 100644 --- a/src/libutil/serialise.cc +++ b/src/libutil/serialise.cc @@ -242,6 +242,16 @@ size_t StringSource::read(char * data, size_t len) return n; } +void StringSource::skip(size_t len) +{ + const size_t remain = s.size() - pos; + if (len > remain) { + pos = s.size(); + throw EndOfFile("end of string reached"); + } + pos += len; +} + std::unique_ptr sourceToSink(std::function fun) { struct SourceToSink : FinishSink From 4f5af471fbfc0f551ace552b3f8f184641814313 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 23 Oct 2025 23:49:41 +0300 Subject: [PATCH 029/213] Revert "libmain: Catch logger exceptions in `handleExceptions`" This reverts commit 90d1ff480590b56db202a20c3927df4bf05e4eac. The initial issue with EPIPE was solved in 9f680874c5aa15304c3ab3b942170a743287f87b. Now this patch does move bad than good by eating up boost::io::format_error that are bugs. --- src/libmain/shared.cc | 49 +++++++++++++++++++------------------------ 1 file changed, 22 insertions(+), 27 deletions(-) diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 7187e9720..4b36ec98e 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -320,34 +320,29 @@ int handleExceptions(const std::string & programName, std::function fun) std::string error = ANSI_RED "error:" ANSI_NORMAL " "; try { try { - try { - fun(); - } catch (...) { - /* Subtle: we have to make sure that any `interrupted' - condition is discharged before we reach printMsg() - below, since otherwise it will throw an (uncaught) - exception. */ - setInterruptThrown(); - throw; - } - } catch (Exit & e) { - return e.status; - } catch (UsageError & e) { - logError(e.info()); - printError("Try '%1% --help' for more information.", programName); - return 1; - } catch (BaseError & e) { - logError(e.info()); - return e.info().status; - } catch (std::bad_alloc & e) { - printError(error + "out of memory"); - return 1; - } catch (std::exception & e) { - printError(error + e.what()); - return 1; + fun(); + } catch (...) { + /* Subtle: we have to make sure that any `interrupted' + condition is discharged before we reach printMsg() + below, since otherwise it will throw an (uncaught) + exception. */ + setInterruptThrown(); + throw; } - } catch (...) { - /* In case logger also throws just give up. */ + } catch (Exit & e) { + return e.status; + } catch (UsageError & e) { + logError(e.info()); + printError("Try '%1% --help' for more information.", programName); + return 1; + } catch (BaseError & e) { + logError(e.info()); + return e.info().status; + } catch (std::bad_alloc & e) { + printError(error + "out of memory"); + return 1; + } catch (std::exception & e) { + printError(error + e.what()); return 1; } From b5ae3e10c27a1fd0b3de453dcf02c83e3f2c4e10 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 24 Oct 2025 00:29:08 +0300 Subject: [PATCH 030/213] libstore/filetransfer: Remove verifyTLS from FileTransferRequest, since it's always true This variable is always true, so there's no use-case for it anymore. --- src/libstore/filetransfer.cc | 9 ++------- src/libstore/include/nix/store/filetransfer.hh | 1 - 2 files changed, 2 insertions(+), 8 deletions(-) diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 201f2984e..1c97cf400 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -399,13 +399,8 @@ struct curlFileTransfer : public FileTransfer curl_easy_setopt(req, CURLOPT_SEEKDATA, this); } - if (request.verifyTLS) { - if (settings.caFile != "") - curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.get().c_str()); - } else { - curl_easy_setopt(req, CURLOPT_SSL_VERIFYPEER, 0); - curl_easy_setopt(req, CURLOPT_SSL_VERIFYHOST, 0); - } + if (settings.caFile != "") + curl_easy_setopt(req, CURLOPT_CAINFO, settings.caFile.get().c_str()); #if !defined(_WIN32) && LIBCURL_VERSION_NUM >= 0x071000 curl_easy_setopt(req, CURLOPT_SOCKOPTFUNCTION, cloexec_callback); diff --git a/src/libstore/include/nix/store/filetransfer.hh b/src/libstore/include/nix/store/filetransfer.hh index 34ec316ef..2b86f6ac9 100644 --- a/src/libstore/include/nix/store/filetransfer.hh +++ b/src/libstore/include/nix/store/filetransfer.hh @@ -99,7 +99,6 @@ struct FileTransferRequest VerbatimURL uri; Headers headers; std::string expectedETag; - bool verifyTLS = true; bool head = false; bool post = false; size_t tries = fileTransferSettings.tries; From 4c4eb5d07fc59513fb44ca348b42a33015a65e50 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 24 Oct 2025 01:34:06 +0300 Subject: [PATCH 031/213] ci: Bump magic-nix-cache with post-build-hook fix No tagged release with the fix for [^]. [^]: https://github.com/DeterminateSystems/magic-nix-cache/commit/578f01e1473129cc289d579ba6dee9cdac40aeab --- .github/actions/install-nix-action/action.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/actions/install-nix-action/action.yaml b/.github/actions/install-nix-action/action.yaml index 3f668864b..00d02d6a2 100644 --- a/.github/actions/install-nix-action/action.yaml +++ b/.github/actions/install-nix-action/action.yaml @@ -128,4 +128,4 @@ runs: diagnostic-endpoint: '' use-flakehub: false use-gha-cache: true - source-revision: c2f46a0afa5f95fd4c184a533afd280c68cf63ff # v0.1.6 + source-revision: 92d9581367be2233c2d5714a2640e1339f4087d8 # main From 7308fde0bc0ee9a902891385453ef6313cb91194 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Thu, 23 Oct 2025 19:18:57 +0200 Subject: [PATCH 032/213] Allow access to the result of fetchClosure --- src/libexpr/primops/fetchClosure.cc | 6 ++++++ tests/functional/dependencies.builder0.sh | 2 ++ tests/functional/fetchClosure.sh | 8 ++++++++ 3 files changed, 16 insertions(+) diff --git a/src/libexpr/primops/fetchClosure.cc b/src/libexpr/primops/fetchClosure.cc index 63da53aa9..6e1389814 100644 --- a/src/libexpr/primops/fetchClosure.cc +++ b/src/libexpr/primops/fetchClosure.cc @@ -64,6 +64,8 @@ static void runFetchClosureWithRewrite( .pos = state.positions[pos]}); } + state.allowClosure(toPath); + state.mkStorePathString(toPath, v); } @@ -91,6 +93,8 @@ static void runFetchClosureWithContentAddressedPath( .pos = state.positions[pos]}); } + state.allowClosure(fromPath); + state.mkStorePathString(fromPath, v); } @@ -115,6 +119,8 @@ static void runFetchClosureWithInputAddressedPath( .pos = state.positions[pos]}); } + state.allowClosure(fromPath); + state.mkStorePathString(fromPath, v); } diff --git a/tests/functional/dependencies.builder0.sh b/tests/functional/dependencies.builder0.sh index 6fbe4a07a..f680cf7f2 100644 --- a/tests/functional/dependencies.builder0.sh +++ b/tests/functional/dependencies.builder0.sh @@ -17,4 +17,6 @@ ln -s "$out" "$out"/self echo program > "$out"/program chmod +x "$out"/program +echo '1 + 2' > "$out"/foo.nix + echo FOO diff --git a/tests/functional/fetchClosure.sh b/tests/functional/fetchClosure.sh index 9b79ab396..85a83d192 100755 --- a/tests/functional/fetchClosure.sh +++ b/tests/functional/fetchClosure.sh @@ -99,6 +99,14 @@ clearStore [ -e "$caPath" ] +# Test import-from-derivation on the result of fetchClosure. +[[ $(nix eval -v --expr " + import \"\${builtins.fetchClosure { + fromStore = \"file://$cacheDir\"; + fromPath = $caPath; + }}/foo.nix\" +") = 3 ]] + # Check that URL query parameters aren't allowed. clearStore narCache=$TEST_ROOT/nar-cache From 8d338c9234b8e7758eb8683174741be5d350d1aa Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 23 Oct 2025 17:11:37 -0400 Subject: [PATCH 033/213] JSON Schema for `DerivedPath` Note that this is "deriving path" in the manual -- the great sed of the code base to bring it in sync has yet to happen yet. --- doc/manual/package.nix | 1 + doc/manual/source/SUMMARY.md.in | 1 + .../source/protocols/json/deriving-path.md | 21 +++++++++++++++ doc/manual/source/protocols/json/meson.build | 1 + .../protocols/json/schema/deriving-path-v1 | 1 + .../json/schema/deriving-path-v1.yaml | 27 +++++++++++++++++++ src/json-schema-checks/deriving-path | 1 + src/json-schema-checks/meson.build | 9 +++++++ src/json-schema-checks/package.nix | 1 + 9 files changed, 63 insertions(+) create mode 100644 doc/manual/source/protocols/json/deriving-path.md create mode 120000 doc/manual/source/protocols/json/schema/deriving-path-v1 create mode 100644 doc/manual/source/protocols/json/schema/deriving-path-v1.yaml create mode 120000 src/json-schema-checks/deriving-path diff --git a/doc/manual/package.nix b/doc/manual/package.nix index 30486869e..eb20f8714 100644 --- a/doc/manual/package.nix +++ b/doc/manual/package.nix @@ -35,6 +35,7 @@ mkMesonDerivation (finalAttrs: { ../../.version # For example JSON ../../src/libutil-tests/data/hash + ../../src/libstore-tests/data/derived-path # Too many different types of files to filter for now ../../doc/manual ./. diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index f74ed7043..b4796f652 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -120,6 +120,7 @@ - [Hash](protocols/json/hash.md) - [Store Object Info](protocols/json/store-object-info.md) - [Derivation](protocols/json/derivation.md) + - [Deriving Path](protocols/json/deriving-path.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) - [Store Path Specification](protocols/store-path.md) - [Nix Archive (NAR) Format](protocols/nix-archive.md) diff --git a/doc/manual/source/protocols/json/deriving-path.md b/doc/manual/source/protocols/json/deriving-path.md new file mode 100644 index 000000000..9851b371d --- /dev/null +++ b/doc/manual/source/protocols/json/deriving-path.md @@ -0,0 +1,21 @@ +{{#include deriving-path-v1-fixed.md}} + +## Examples + +### Constant + +```json +{{#include schema/deriving-path-v1/single_opaque.json}} +``` + +### Output of static derivation + +```json +{{#include schema/deriving-path-v1/single_built.json}} +``` + +### Output of dynamic derivation + +```json +{{#include schema/deriving-path-v1/single_built_built.json}} +``` diff --git a/doc/manual/source/protocols/json/meson.build b/doc/manual/source/protocols/json/meson.build index 44795599c..191ec6dbe 100644 --- a/doc/manual/source/protocols/json/meson.build +++ b/doc/manual/source/protocols/json/meson.build @@ -11,6 +11,7 @@ json_schema_config = files('json-schema-for-humans-config.yaml') schemas = [ 'hash-v1', 'derivation-v3', + 'deriving-path-v1', ] schema_files = files() diff --git a/doc/manual/source/protocols/json/schema/deriving-path-v1 b/doc/manual/source/protocols/json/schema/deriving-path-v1 new file mode 120000 index 000000000..92ec6d01a --- /dev/null +++ b/doc/manual/source/protocols/json/schema/deriving-path-v1 @@ -0,0 +1 @@ +../../../../../../src/libstore-tests/data/derived-path \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/deriving-path-v1.yaml b/doc/manual/source/protocols/json/schema/deriving-path-v1.yaml new file mode 100644 index 000000000..9c0350d3d --- /dev/null +++ b/doc/manual/source/protocols/json/schema/deriving-path-v1.yaml @@ -0,0 +1,27 @@ +"$schema": http://json-schema.org/draft-04/schema# +"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/deriving-path-v1.json +title: Deriving Path +description: | + This schema describes the JSON representation of Nix's [Deriving Path](@docroot@/store/derivation/index.md#deriving-path). +oneOf: + - title: Constant + description: | + See [Constant](@docroot@/store/derivation/index.md#deriving-path-constant) deriving path. + type: string + - title: Output + description: | + See [Output](@docroot@/store/derivation/index.md#deriving-path-output) deriving path. + type: object + properties: + drvPath: + "$ref": "#" + description: | + A deriving path to a [Derivation](@docroot@/store/derivation/index.md#store-derivation), whose output is being referred to. + output: + type: string + description: | + The name of an output produced by that derivation (e.g. "out", "doc", etc.). + required: + - drvPath + - output + additionalProperties: false diff --git a/src/json-schema-checks/deriving-path b/src/json-schema-checks/deriving-path new file mode 120000 index 000000000..4f50b2ee9 --- /dev/null +++ b/src/json-schema-checks/deriving-path @@ -0,0 +1 @@ +../../src/libstore-tests/data/derived-path \ No newline at end of file diff --git a/src/json-schema-checks/meson.build b/src/json-schema-checks/meson.build index ebd6f6b2b..09da8770b 100644 --- a/src/json-schema-checks/meson.build +++ b/src/json-schema-checks/meson.build @@ -52,6 +52,15 @@ schemas = [ # 'output-inputAddressed.json', # ], # }, + { + 'stem' : 'deriving-path', + 'schema' : schema_dir / 'deriving-path-v1.yaml', + 'files' : [ + 'single_opaque.json', + 'single_built.json', + 'single_built_built.json', + ], + }, ] # Validate each example against the schema diff --git a/src/json-schema-checks/package.nix b/src/json-schema-checks/package.nix index 41458adb8..cf4e4cb19 100644 --- a/src/json-schema-checks/package.nix +++ b/src/json-schema-checks/package.nix @@ -22,6 +22,7 @@ mkMesonDerivation (finalAttrs: { ../../doc/manual/source/protocols/json/schema ../../src/libutil-tests/data/hash ../../src/libstore-tests/data/derivation + ../../src/libstore-tests/data/derived-path ./. ]; From f1968ea38e51201b37962a9cfd80775989a56d46 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Thu, 23 Oct 2025 05:03:16 +0000 Subject: [PATCH 034/213] refactor(libstore): replace HTTP method boolean flags with enum Replace the individual boolean flags (head, post) with a unified HttpMethod enum struct in FileTransferRequest. --- src/libfetchers/git-lfs-fetch.cc | 2 +- src/libstore/filetransfer.cc | 4 ++-- src/libstore/http-binary-cache-store.cc | 2 +- .../include/nix/store/filetransfer.hh | 21 ++++++++++++++++--- 4 files changed, 22 insertions(+), 7 deletions(-) diff --git a/src/libfetchers/git-lfs-fetch.cc b/src/libfetchers/git-lfs-fetch.cc index 9688daa4a..aee1163bb 100644 --- a/src/libfetchers/git-lfs-fetch.cc +++ b/src/libfetchers/git-lfs-fetch.cc @@ -209,7 +209,7 @@ std::vector Fetch::fetchUrls(const std::vector & pointe auto url = api.endpoint + "/objects/batch"; const auto & authHeader = api.authHeader; FileTransferRequest request(parseURL(url)); - request.post = true; + request.method = HttpMethod::POST; Headers headers; if (authHeader.has_value()) headers.push_back({"Authorization", *authHeader}); diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index b9e52e7d5..c2c2a86c4 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -384,11 +384,11 @@ struct curlFileTransfer : public FileTransfer if (settings.downloadSpeed.get() > 0) curl_easy_setopt(req, CURLOPT_MAX_RECV_SPEED_LARGE, (curl_off_t) (settings.downloadSpeed.get() * 1024)); - if (request.head) + if (request.method == HttpMethod::HEAD) curl_easy_setopt(req, CURLOPT_NOBODY, 1); if (request.data) { - if (request.post) { + if (request.method == HttpMethod::POST) { curl_easy_setopt(req, CURLOPT_POST, 1L); curl_easy_setopt(req, CURLOPT_POSTFIELDSIZE_LARGE, (curl_off_t) request.data->length()); } else { diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 9567aec2f..5c455dd04 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -139,7 +139,7 @@ protected: try { FileTransferRequest request(makeRequest(path)); - request.head = true; + request.method = HttpMethod::HEAD; getFileTransfer()->download(request); return true; } catch (FileTransferError & e) { diff --git a/src/libstore/include/nix/store/filetransfer.hh b/src/libstore/include/nix/store/filetransfer.hh index 2b86f6ac9..286014517 100644 --- a/src/libstore/include/nix/store/filetransfer.hh +++ b/src/libstore/include/nix/store/filetransfer.hh @@ -83,6 +83,15 @@ extern FileTransferSettings fileTransferSettings; extern const unsigned int RETRY_TIME_MS_DEFAULT; +/** + * HTTP methods supported by FileTransfer. + */ +enum struct HttpMethod { + GET, + HEAD, + POST, +}; + /** * Username and optional password for HTTP basic authentication. * These are used with curl's CURLOPT_USERNAME and CURLOPT_PASSWORD options @@ -99,8 +108,7 @@ struct FileTransferRequest VerbatimURL uri; Headers headers; std::string expectedETag; - bool head = false; - bool post = false; + HttpMethod method = HttpMethod::GET; size_t tries = fileTransferSettings.tries; unsigned int baseRetryTimeMs = RETRY_TIME_MS_DEFAULT; ActivityId parentAct; @@ -129,7 +137,14 @@ struct FileTransferRequest std::string verb() const { - return data ? "upload" : "download"; + switch (method) { + case HttpMethod::HEAD: + case HttpMethod::GET: + return "download"; + case HttpMethod::POST: + return "upload"; + } + unreachable(); } private: From d924374bf22176263409c3ad49982aad4e906b2f Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Thu, 23 Oct 2025 20:48:21 +0000 Subject: [PATCH 035/213] docs(libstore): document verb() method returns verb root for gerund form Add documentation to FileTransferRequest::verb() explaining that it returns a verb root intended to be concatenated with "ing" to form the gerund. --- src/libstore/include/nix/store/filetransfer.hh | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/libstore/include/nix/store/filetransfer.hh b/src/libstore/include/nix/store/filetransfer.hh index 286014517..a9ed05dc8 100644 --- a/src/libstore/include/nix/store/filetransfer.hh +++ b/src/libstore/include/nix/store/filetransfer.hh @@ -135,6 +135,11 @@ struct FileTransferRequest { } + /** + * Returns the verb root for logging purposes. + * The returned string is intended to be concatenated with "ing" to form the gerund, + * e.g., "download" + "ing" -> "downloading", "upload" + "ing" -> "uploading". + */ std::string verb() const { switch (method) { From afe5ed879f7015e62bcf431f0bdf70093af63ca5 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Tue, 21 Oct 2025 09:02:36 +0000 Subject: [PATCH 036/213] feat(libstore): add DELETE method support to FileTransfer Add support for HTTP DELETE requests to FileTransfer infrastructure: This enables S3 multipart upload abort functionality via DELETE requests to S3 endpoints. --- src/libstore/filetransfer.cc | 8 ++++++++ src/libstore/include/nix/store/filetransfer.hh | 8 ++++++++ 2 files changed, 16 insertions(+) diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index c2c2a86c4..9fd7a967b 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -387,6 +387,9 @@ struct curlFileTransfer : public FileTransfer if (request.method == HttpMethod::HEAD) curl_easy_setopt(req, CURLOPT_NOBODY, 1); + if (request.method == HttpMethod::DELETE) + curl_easy_setopt(req, CURLOPT_CUSTOMREQUEST, "DELETE"); + if (request.data) { if (request.method == HttpMethod::POST) { curl_easy_setopt(req, CURLOPT_POST, 1L); @@ -919,6 +922,11 @@ FileTransferResult FileTransfer::upload(const FileTransferRequest & request) return enqueueFileTransfer(request).get(); } +FileTransferResult FileTransfer::deleteResource(const FileTransferRequest & request) +{ + return enqueueFileTransfer(request).get(); +} + void FileTransfer::download( FileTransferRequest && request, Sink & sink, std::function resultCallback) { diff --git a/src/libstore/include/nix/store/filetransfer.hh b/src/libstore/include/nix/store/filetransfer.hh index a9ed05dc8..402ee4900 100644 --- a/src/libstore/include/nix/store/filetransfer.hh +++ b/src/libstore/include/nix/store/filetransfer.hh @@ -90,6 +90,7 @@ enum struct HttpMethod { GET, HEAD, POST, + DELETE, }; /** @@ -148,6 +149,8 @@ struct FileTransferRequest return "download"; case HttpMethod::POST: return "upload"; + case HttpMethod::DELETE: + return "delet"; } unreachable(); } @@ -221,6 +224,11 @@ struct FileTransfer */ FileTransferResult upload(const FileTransferRequest & request); + /** + * Synchronously delete a resource. + */ + FileTransferResult deleteResource(const FileTransferRequest & request); + /** * Download a file, writing its data to a sink. The sink will be * invoked on the thread of the caller. From 6b7223b6b77c1e0c719f4492496fed22d4cc6830 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Tue, 21 Oct 2025 06:11:23 +0000 Subject: [PATCH 037/213] refactor(libstore): add sizeHint parameter to upsertFile() Add a sizeHint parameter to BinaryCacheStore::upsertFile() to enable size-based upload decisions in implementations. This lays the groundwork for reintroducing S3 multipart upload support. --- src/libstore/binary-cache-store.cc | 8 +++++--- src/libstore/http-binary-cache-store.cc | 3 ++- .../include/nix/store/binary-cache-store.hh | 18 ++++++++++++++++-- src/libstore/local-binary-cache-store.cc | 3 ++- 4 files changed, 25 insertions(+), 7 deletions(-) diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 3705f3d4d..9bb81add7 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -76,9 +76,10 @@ std::optional BinaryCacheStore::getNixCacheInfo() return getFile(cacheInfoFile); } -void BinaryCacheStore::upsertFile(const std::string & path, std::string && data, const std::string & mimeType) +void BinaryCacheStore::upsertFile( + const std::string & path, std::string && data, const std::string & mimeType, uint64_t sizeHint) { - upsertFile(path, std::make_shared(std::move(data)), mimeType); + upsertFile(path, std::make_shared(std::move(data)), mimeType, sizeHint); } void BinaryCacheStore::getFile(const std::string & path, Callback> callback) noexcept @@ -274,7 +275,8 @@ ref BinaryCacheStore::addToStoreCommon( upsertFile( narInfo->url, std::make_shared(fnTemp, std::ios_base::in | std::ios_base::binary), - "application/x-nix-nar"); + "application/x-nix-nar", + narInfo->fileSize); } else stats.narWriteAverted++; diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 8d5f427af..c4dcf0124 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -143,7 +143,8 @@ protected: void upsertFile( const std::string & path, std::shared_ptr> istream, - const std::string & mimeType) override + const std::string & mimeType, + uint64_t sizeHint) override { auto req = makeRequest(path); diff --git a/src/libstore/include/nix/store/binary-cache-store.hh b/src/libstore/include/nix/store/binary-cache-store.hh index 3f4de2bd4..0bed09aec 100644 --- a/src/libstore/include/nix/store/binary-cache-store.hh +++ b/src/libstore/include/nix/store/binary-cache-store.hh @@ -101,13 +101,27 @@ public: virtual bool fileExists(const std::string & path) = 0; virtual void upsertFile( - const std::string & path, std::shared_ptr> istream, const std::string & mimeType) = 0; + const std::string & path, + std::shared_ptr> istream, + const std::string & mimeType, + uint64_t sizeHint) = 0; void upsertFile( const std::string & path, // FIXME: use std::string_view std::string && data, - const std::string & mimeType); + const std::string & mimeType, + uint64_t sizeHint); + + void upsertFile( + const std::string & path, + // FIXME: use std::string_view + std::string && data, + const std::string & mimeType) + { + auto size = data.size(); + upsertFile(path, std::move(data), mimeType, size); + } /** * Dump the contents of the specified file to a sink. diff --git a/src/libstore/local-binary-cache-store.cc b/src/libstore/local-binary-cache-store.cc index b5e43de68..c1811bf17 100644 --- a/src/libstore/local-binary-cache-store.cc +++ b/src/libstore/local-binary-cache-store.cc @@ -56,7 +56,8 @@ protected: void upsertFile( const std::string & path, std::shared_ptr> istream, - const std::string & mimeType) override + const std::string & mimeType, + uint64_t sizeHint) override { auto path2 = config->binaryCacheDir + "/" + path; static std::atomic counter{0}; From 476c21d5ef711397b7c234554bc292c6162d3764 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Tue, 21 Oct 2025 08:45:10 +0000 Subject: [PATCH 038/213] refactor(libstore): expose HttpBinaryCacheStore and add S3BinaryCacheStore Move HttpBinaryCacheStore class from .cc file to header to enable inheritance by S3BinaryCacheStore. Create S3BinaryCacheStore class that overrides upsertFile() to implement multipart upload logic. --- src/libstore/http-binary-cache-store.cc | 406 +++++++++--------- .../nix/store/http-binary-cache-store.hh | 51 +++ .../nix/store/s3-binary-cache-store.hh | 2 + src/libstore/s3-binary-cache-store.cc | 37 ++ 4 files changed, 284 insertions(+), 212 deletions(-) diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 7883161d5..945fe1834 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -51,227 +51,209 @@ std::string HttpBinaryCacheStoreConfig::doc() ; } -class HttpBinaryCacheStore : public virtual BinaryCacheStore +HttpBinaryCacheStore::HttpBinaryCacheStore(ref config) + : Store{*config} // TODO it will actually mutate the configuration + , BinaryCacheStore{*config} + , config{config} { - struct State - { - bool enabled = true; - std::chrono::steady_clock::time_point disabledUntil; - }; + diskCache = getNarInfoDiskCache(); +} - Sync _state; - -public: - - using Config = HttpBinaryCacheStoreConfig; - - ref config; - - HttpBinaryCacheStore(ref config) - : Store{*config} // TODO it will actually mutate the configuration - , BinaryCacheStore{*config} - , config{config} - { - diskCache = getNarInfoDiskCache(); - } - - void init() override - { - // FIXME: do this lazily? - // For consistent cache key handling, use the reference without parameters - // This matches what's used in Store::queryPathInfo() lookups - auto cacheKey = config->getReference().render(/*withParams=*/false); - - if (auto cacheInfo = diskCache->upToDateCacheExists(cacheKey)) { - config->wantMassQuery.setDefault(cacheInfo->wantMassQuery); - config->priority.setDefault(cacheInfo->priority); - } else { - try { - BinaryCacheStore::init(); - } catch (UploadToHTTP &) { - throw Error("'%s' does not appear to be a binary cache", config->cacheUri.to_string()); - } - diskCache->createCache(cacheKey, config->storeDir, config->wantMassQuery, config->priority); - } - } - -protected: - - std::optional getCompressionMethod(const std::string & path) - { - if (hasSuffix(path, ".narinfo") && !config->narinfoCompression.get().empty()) - return config->narinfoCompression; - else if (hasSuffix(path, ".ls") && !config->lsCompression.get().empty()) - return config->lsCompression; - else if (hasPrefix(path, "log/") && !config->logCompression.get().empty()) - return config->logCompression; - else - return std::nullopt; - } - - void maybeDisable() - { - auto state(_state.lock()); - if (state->enabled && settings.tryFallback) { - int t = 60; - printError("disabling binary cache '%s' for %s seconds", config->getHumanReadableURI(), t); - state->enabled = false; - state->disabledUntil = std::chrono::steady_clock::now() + std::chrono::seconds(t); - } - } - - void checkEnabled() - { - auto state(_state.lock()); - if (state->enabled) - return; - if (std::chrono::steady_clock::now() > state->disabledUntil) { - state->enabled = true; - debug("re-enabling binary cache '%s'", config->getHumanReadableURI()); - return; - } - throw SubstituterDisabled("substituter '%s' is disabled", config->getHumanReadableURI()); - } - - bool fileExists(const std::string & path) override - { - checkEnabled(); +void HttpBinaryCacheStore::init() +{ + // FIXME: do this lazily? + // For consistent cache key handling, use the reference without parameters + // This matches what's used in Store::queryPathInfo() lookups + auto cacheKey = config->getReference().render(/*withParams=*/false); + if (auto cacheInfo = diskCache->upToDateCacheExists(cacheKey)) { + config->wantMassQuery.setDefault(cacheInfo->wantMassQuery); + config->priority.setDefault(cacheInfo->priority); + } else { try { - FileTransferRequest request(makeRequest(path)); - request.method = HttpMethod::HEAD; - getFileTransfer()->download(request); - return true; - } catch (FileTransferError & e) { - /* S3 buckets return 403 if a file doesn't exist and the - bucket is unlistable, so treat 403 as 404. */ - if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) - return false; - maybeDisable(); - throw; + BinaryCacheStore::init(); + } catch (UploadToHTTP &) { + throw Error("'%s' does not appear to be a binary cache", config->cacheUri.to_string()); } + diskCache->createCache(cacheKey, config->storeDir, config->wantMassQuery, config->priority); } +} - void upsertFile( - const std::string & path, - std::shared_ptr> istream, - const std::string & mimeType, - uint64_t sizeHint) override - { - auto req = makeRequest(path); - - auto data = StreamToSourceAdapter(istream).drain(); - - if (auto compressionMethod = getCompressionMethod(path)) { - data = compress(*compressionMethod, data); - req.headers.emplace_back("Content-Encoding", *compressionMethod); - } - - req.data = std::move(data); - req.mimeType = mimeType; - - try { - getFileTransfer()->upload(req); - } catch (FileTransferError & e) { - throw UploadToHTTP( - "while uploading to HTTP binary cache at '%s': %s", config->cacheUri.to_string(), e.msg()); - } - } - - FileTransferRequest makeRequest(const std::string & path) - { - /* Otherwise the last path fragment will get discarded. */ - auto cacheUriWithTrailingSlash = config->cacheUri; - if (!cacheUriWithTrailingSlash.path.empty()) - cacheUriWithTrailingSlash.path.push_back(""); - - /* path is not a path, but a full relative or absolute - URL, e.g. we've seen in the wild NARINFO files have a URL - field which is - `nar/15f99rdaf26k39knmzry4xd0d97wp6yfpnfk1z9avakis7ipb9yg.nar?hash=zphkqn2wg8mnvbkixnl2aadkbn0rcnfj` - (note the query param) and that gets passed here. */ - auto result = parseURLRelative(path, cacheUriWithTrailingSlash); - - /* For S3 URLs, preserve query parameters from the base URL when the - relative path doesn't have its own query parameters. This is needed - to preserve S3-specific parameters like endpoint and region. */ - if (config->cacheUri.scheme == "s3" && result.query.empty()) { - result.query = config->cacheUri.query; - } - - return FileTransferRequest(result); - } - - void getFile(const std::string & path, Sink & sink) override - { - checkEnabled(); - auto request(makeRequest(path)); - try { - getFileTransfer()->download(std::move(request), sink); - } catch (FileTransferError & e) { - if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) - throw NoSuchBinaryCacheFile( - "file '%s' does not exist in binary cache '%s'", path, config->getHumanReadableURI()); - maybeDisable(); - throw; - } - } - - void getFile(const std::string & path, Callback> callback) noexcept override - { - auto callbackPtr = std::make_shared(std::move(callback)); - - try { - checkEnabled(); - - auto request(makeRequest(path)); - - getFileTransfer()->enqueueFileTransfer( - request, {[callbackPtr, this](std::future result) { - try { - (*callbackPtr)(std::move(result.get().data)); - } catch (FileTransferError & e) { - if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) - return (*callbackPtr)({}); - maybeDisable(); - callbackPtr->rethrow(); - } catch (...) { - callbackPtr->rethrow(); - } - }}); - - } catch (...) { - callbackPtr->rethrow(); - return; - } - } - - std::optional getNixCacheInfo() override - { - try { - auto result = getFileTransfer()->download(makeRequest(cacheInfoFile)); - return result.data; - } catch (FileTransferError & e) { - if (e.error == FileTransfer::NotFound) - return std::nullopt; - maybeDisable(); - throw; - } - } - - /** - * This isn't actually necessary read only. We support "upsert" now, so we - * have a notion of authentication via HTTP POST/PUT. - * - * For now, we conservatively say we don't know. - * - * \todo try to expose our HTTP authentication status. - */ - std::optional isTrustedClient() override - { +std::optional HttpBinaryCacheStore::getCompressionMethod(const std::string & path) +{ + if (hasSuffix(path, ".narinfo") && !config->narinfoCompression.get().empty()) + return config->narinfoCompression; + else if (hasSuffix(path, ".ls") && !config->lsCompression.get().empty()) + return config->lsCompression; + else if (hasPrefix(path, "log/") && !config->logCompression.get().empty()) + return config->logCompression; + else return std::nullopt; +} + +void HttpBinaryCacheStore::maybeDisable() +{ + auto state(_state.lock()); + if (state->enabled && settings.tryFallback) { + int t = 60; + printError("disabling binary cache '%s' for %s seconds", config->getHumanReadableURI(), t); + state->enabled = false; + state->disabledUntil = std::chrono::steady_clock::now() + std::chrono::seconds(t); } -}; +} + +void HttpBinaryCacheStore::checkEnabled() +{ + auto state(_state.lock()); + if (state->enabled) + return; + if (std::chrono::steady_clock::now() > state->disabledUntil) { + state->enabled = true; + debug("re-enabling binary cache '%s'", config->getHumanReadableURI()); + return; + } + throw SubstituterDisabled("substituter '%s' is disabled", config->getHumanReadableURI()); +} + +bool HttpBinaryCacheStore::fileExists(const std::string & path) +{ + checkEnabled(); + + try { + FileTransferRequest request(makeRequest(path)); + request.method = HttpMethod::HEAD; + getFileTransfer()->download(request); + return true; + } catch (FileTransferError & e) { + /* S3 buckets return 403 if a file doesn't exist and the + bucket is unlistable, so treat 403 as 404. */ + if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) + return false; + maybeDisable(); + throw; + } +} + +void HttpBinaryCacheStore::upsertFile( + const std::string & path, + std::shared_ptr> istream, + const std::string & mimeType, + uint64_t sizeHint) +{ + auto req = makeRequest(path); + + auto data = StreamToSourceAdapter(istream).drain(); + + auto compressionMethod = getCompressionMethod(path); + + if (compressionMethod) { + data = compress(*compressionMethod, data); + req.headers.emplace_back("Content-Encoding", *compressionMethod); + } + + req.data = std::move(data); + req.mimeType = mimeType; + + try { + getFileTransfer()->upload(req); + } catch (FileTransferError & e) { + throw UploadToHTTP("while uploading to HTTP binary cache at '%s': %s", config->cacheUri.to_string(), e.msg()); + } +} + +FileTransferRequest HttpBinaryCacheStore::makeRequest(const std::string & path) +{ + /* Otherwise the last path fragment will get discarded. */ + auto cacheUriWithTrailingSlash = config->cacheUri; + if (!cacheUriWithTrailingSlash.path.empty()) + cacheUriWithTrailingSlash.path.push_back(""); + + /* path is not a path, but a full relative or absolute + URL, e.g. we've seen in the wild NARINFO files have a URL + field which is + `nar/15f99rdaf26k39knmzry4xd0d97wp6yfpnfk1z9avakis7ipb9yg.nar?hash=zphkqn2wg8mnvbkixnl2aadkbn0rcnfj` + (note the query param) and that gets passed here. */ + auto result = parseURLRelative(path, cacheUriWithTrailingSlash); + + /* For S3 URLs, preserve query parameters from the base URL when the + relative path doesn't have its own query parameters. This is needed + to preserve S3-specific parameters like endpoint and region. */ + if (config->cacheUri.scheme == "s3" && result.query.empty()) { + result.query = config->cacheUri.query; + } + + return FileTransferRequest(result); +} + +void HttpBinaryCacheStore::getFile(const std::string & path, Sink & sink) +{ + checkEnabled(); + auto request(makeRequest(path)); + try { + getFileTransfer()->download(std::move(request), sink); + } catch (FileTransferError & e) { + if (e.error == FileTransfer::NotFound || e.error == FileTransfer::Forbidden) + throw NoSuchBinaryCacheFile( + "file '%s' does not exist in binary cache '%s'", path, config->getHumanReadableURI()); + maybeDisable(); + throw; + } +} + +void HttpBinaryCacheStore::getFile(const std::string & path, Callback> callback) noexcept +{ + auto callbackPtr = std::make_shared(std::move(callback)); + + try { + checkEnabled(); + + auto request(makeRequest(path)); + + getFileTransfer()->enqueueFileTransfer(request, {[callbackPtr, this](std::future result) { + try { + (*callbackPtr)(std::move(result.get().data)); + } catch (FileTransferError & e) { + if (e.error == FileTransfer::NotFound + || e.error == FileTransfer::Forbidden) + return (*callbackPtr)({}); + maybeDisable(); + callbackPtr->rethrow(); + } catch (...) { + callbackPtr->rethrow(); + } + }}); + + } catch (...) { + callbackPtr->rethrow(); + return; + } +} + +std::optional HttpBinaryCacheStore::getNixCacheInfo() +{ + try { + auto result = getFileTransfer()->download(makeRequest(cacheInfoFile)); + return result.data; + } catch (FileTransferError & e) { + if (e.error == FileTransfer::NotFound) + return std::nullopt; + maybeDisable(); + throw; + } +} + +/** + * This isn't actually necessary read only. We support "upsert" now, so we + * have a notion of authentication via HTTP POST/PUT. + * + * For now, we conservatively say we don't know. + * + * \todo try to expose our HTTP authentication status. + */ +std::optional HttpBinaryCacheStore::isTrustedClient() +{ + return std::nullopt; +} ref HttpBinaryCacheStore::Config::openStore() const { diff --git a/src/libstore/include/nix/store/http-binary-cache-store.hh b/src/libstore/include/nix/store/http-binary-cache-store.hh index e0b7ac1ea..d8ba72390 100644 --- a/src/libstore/include/nix/store/http-binary-cache-store.hh +++ b/src/libstore/include/nix/store/http-binary-cache-store.hh @@ -3,6 +3,10 @@ #include "nix/util/url.hh" #include "nix/store/binary-cache-store.hh" +#include "nix/store/filetransfer.hh" +#include "nix/util/sync.hh" + +#include namespace nix { @@ -46,4 +50,51 @@ struct HttpBinaryCacheStoreConfig : std::enable_shared_from_this _state; + +public: + + using Config = HttpBinaryCacheStoreConfig; + + ref config; + + HttpBinaryCacheStore(ref config); + + void init() override; + +protected: + + std::optional getCompressionMethod(const std::string & path); + + void maybeDisable(); + + void checkEnabled(); + + bool fileExists(const std::string & path) override; + + void upsertFile( + const std::string & path, + std::shared_ptr> istream, + const std::string & mimeType, + uint64_t sizeHint) override; + + FileTransferRequest makeRequest(const std::string & path); + + void getFile(const std::string & path, Sink & sink) override; + + void getFile(const std::string & path, Callback> callback) noexcept override; + + std::optional getNixCacheInfo() override; + + std::optional isTrustedClient() override; +}; + } // namespace nix diff --git a/src/libstore/include/nix/store/s3-binary-cache-store.hh b/src/libstore/include/nix/store/s3-binary-cache-store.hh index 288ca41a0..81a2d3f3f 100644 --- a/src/libstore/include/nix/store/s3-binary-cache-store.hh +++ b/src/libstore/include/nix/store/s3-binary-cache-store.hh @@ -77,6 +77,8 @@ struct S3BinaryCacheStoreConfig : HttpBinaryCacheStoreConfig static std::string doc(); std::string getHumanReadableURI() const override; + + ref openStore() const override; }; } // namespace nix diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 0b37ac5d7..5d97fb0fd 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -7,6 +7,36 @@ namespace nix { +class S3BinaryCacheStore : public virtual HttpBinaryCacheStore +{ +public: + S3BinaryCacheStore(ref config) + : Store{*config} + , BinaryCacheStore{*config} + , HttpBinaryCacheStore{config} + , s3Config{config} + { + } + + void upsertFile( + const std::string & path, + std::shared_ptr> istream, + const std::string & mimeType, + uint64_t sizeHint) override; + +private: + ref s3Config; +}; + +void S3BinaryCacheStore::upsertFile( + const std::string & path, + std::shared_ptr> istream, + const std::string & mimeType, + uint64_t sizeHint) +{ + HttpBinaryCacheStore::upsertFile(path, istream, mimeType, sizeHint); +} + StringSet S3BinaryCacheStoreConfig::uriSchemes() { return {"s3"}; @@ -51,6 +81,13 @@ std::string S3BinaryCacheStoreConfig::doc() )"; } +ref S3BinaryCacheStoreConfig::openStore() const +{ + auto sharedThis = std::const_pointer_cast( + std::static_pointer_cast(shared_from_this())); + return make_ref(ref{sharedThis}); +} + static RegisterStoreImplementation registerS3BinaryCacheStore; } // namespace nix From 1cd8458c28bb06c90486023a52db3cee706a7a70 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Sat, 25 Oct 2025 02:09:06 +0300 Subject: [PATCH 039/213] tests/functional: Add source-paths tests This has already been implemented in 1e709554d565be51ab8d5a7e4941b0cc1da70807 as a side-effect of mounting the accessors in storeFS. Let's test this so it doesn't regress. (cherry-picked from https://github.com/NixOS/nix/pull/12915) --- tests/functional/flakes/source-paths.sh | 34 +++++++++++++++++++++++++ 1 file changed, 34 insertions(+) diff --git a/tests/functional/flakes/source-paths.sh b/tests/functional/flakes/source-paths.sh index 4709bf2fc..3aa3683c2 100644 --- a/tests/functional/flakes/source-paths.sh +++ b/tests/functional/flakes/source-paths.sh @@ -12,6 +12,10 @@ cat > "$repo/flake.nix" < "$repo/foo" + +expectStderr 1 nix eval "$repo#z" | grepQuiet "error: Path 'foo' in the repository \"$repo\" is not tracked by Git." +expectStderr 1 nix eval "$repo#a" | grepQuiet "error: Path 'foo' in the repository \"$repo\" is not tracked by Git." + +git -C "$repo" add "$repo/foo" + +[[ $(nix eval --raw "$repo#z") = 123 ]] + +expectStderr 1 nix eval "$repo#b" | grepQuiet "error: Path 'dir' does not exist in Git repository \"$repo\"." + +mkdir -p "$repo/dir" +echo 456 > "$repo/dir/default.nix" + +expectStderr 1 nix eval "$repo#b" | grepQuiet "error: Path 'dir' in the repository \"$repo\" is not tracked by Git." + +git -C "$repo" add "$repo/dir/default.nix" + +[[ $(nix eval "$repo#b") = 456 ]] From 78e98691d64d50baa2c6e6146250c4d87b177751 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Fri, 24 Oct 2025 23:52:32 +0000 Subject: [PATCH 040/213] refactor(libstore/filetransfer): make setupForS3 public --- src/libstore/include/nix/store/filetransfer.hh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libstore/include/nix/store/filetransfer.hh b/src/libstore/include/nix/store/filetransfer.hh index 402ee4900..305c33af1 100644 --- a/src/libstore/include/nix/store/filetransfer.hh +++ b/src/libstore/include/nix/store/filetransfer.hh @@ -155,9 +155,10 @@ struct FileTransferRequest unreachable(); } + void setupForS3(); + private: friend struct curlFileTransfer; - void setupForS3(); #if NIX_WITH_AWS_AUTH std::optional awsSigV4Provider; #endif From e38128b90d35d31a9ea69802f62219e27f82c12a Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Thu, 16 Oct 2025 18:38:42 +0000 Subject: [PATCH 041/213] feat(libstore): support S3 object versioning via versionId parameter S3 buckets support object versioning to prevent unexpected changes, but Nix previously lacked the ability to fetch specific versions of S3 objects. This adds support for a `versionId` query parameter in S3 URLs, enabling users to pin to specific object versions: ``` s3://bucket/key?region=us-east-1&versionId=abc123 ``` --- doc/manual/rl-next/s3-object-versioning.md | 14 ++++++ src/libstore-tests/s3-url.cc | 50 ++++++++++++++++++++ src/libstore/include/nix/store/s3-url.hh | 1 + src/libstore/s3-url.cc | 10 ++++ tests/nixos/s3-binary-cache-store.nix | 53 ++++++++++++++++++++-- 5 files changed, 123 insertions(+), 5 deletions(-) create mode 100644 doc/manual/rl-next/s3-object-versioning.md diff --git a/doc/manual/rl-next/s3-object-versioning.md b/doc/manual/rl-next/s3-object-versioning.md new file mode 100644 index 000000000..3b85e0926 --- /dev/null +++ b/doc/manual/rl-next/s3-object-versioning.md @@ -0,0 +1,14 @@ +--- +synopsis: "S3 URLs now support object versioning via versionId parameter" +prs: [14274] +issues: [13955] +--- + +S3 URLs now support a `versionId` query parameter to fetch specific versions +of objects from S3 buckets with versioning enabled. This allows pinning to +exact object versions for reproducibility and protection against unexpected +changes: + +``` +s3://bucket/key?region=us-east-1&versionId=abc123def456 +``` diff --git a/src/libstore-tests/s3-url.cc b/src/libstore-tests/s3-url.cc index 2c384c255..9fa625fd6 100644 --- a/src/libstore-tests/s3-url.cc +++ b/src/libstore-tests/s3-url.cc @@ -70,6 +70,25 @@ INSTANTIATE_TEST_SUITE_P( }, "with_profile_and_region", }, + ParsedS3URLTestCase{ + "s3://my-bucket/my-key.txt?versionId=abc123xyz", + { + .bucket = "my-bucket", + .key = {"my-key.txt"}, + .versionId = "abc123xyz", + }, + "with_versionId", + }, + ParsedS3URLTestCase{ + "s3://bucket/path/to/object?region=eu-west-1&versionId=version456", + { + .bucket = "bucket", + .key = {"path", "to", "object"}, + .region = "eu-west-1", + .versionId = "version456", + }, + "with_region_and_versionId", + }, ParsedS3URLTestCase{ "s3://bucket/key?endpoint=https://minio.local&scheme=http", { @@ -222,6 +241,37 @@ INSTANTIATE_TEST_SUITE_P( }, "https://s3.ap-southeast-2.amazonaws.com/bucket/path/to/file.txt", "complex_path_and_region", + }, + S3ToHttpsConversionTestCase{ + ParsedS3URL{ + .bucket = "my-bucket", + .key = {"my-key.txt"}, + .versionId = "abc123xyz", + }, + ParsedURL{ + .scheme = "https", + .authority = ParsedURL::Authority{.host = "s3.us-east-1.amazonaws.com"}, + .path = {"", "my-bucket", "my-key.txt"}, + .query = {{"versionId", "abc123xyz"}}, + }, + "https://s3.us-east-1.amazonaws.com/my-bucket/my-key.txt?versionId=abc123xyz", + "with_versionId", + }, + S3ToHttpsConversionTestCase{ + ParsedS3URL{ + .bucket = "versioned-bucket", + .key = {"path", "to", "object"}, + .region = "eu-west-1", + .versionId = "version456", + }, + ParsedURL{ + .scheme = "https", + .authority = ParsedURL::Authority{.host = "s3.eu-west-1.amazonaws.com"}, + .path = {"", "versioned-bucket", "path", "to", "object"}, + .query = {{"versionId", "version456"}}, + }, + "https://s3.eu-west-1.amazonaws.com/versioned-bucket/path/to/object?versionId=version456", + "with_region_and_versionId", }), [](const ::testing::TestParamInfo & info) { return info.param.description; }); diff --git a/src/libstore/include/nix/store/s3-url.hh b/src/libstore/include/nix/store/s3-url.hh index 4ee0c87f9..cf59dbea8 100644 --- a/src/libstore/include/nix/store/s3-url.hh +++ b/src/libstore/include/nix/store/s3-url.hh @@ -26,6 +26,7 @@ struct ParsedS3URL std::optional profile; std::optional region; std::optional scheme; + std::optional versionId; /** * The endpoint can be either missing, be an absolute URI (with a scheme like `http:`) * or an authority (so an IP address or a registered name). diff --git a/src/libstore/s3-url.cc b/src/libstore/s3-url.cc index e8fbba8f7..503c0cd91 100644 --- a/src/libstore/s3-url.cc +++ b/src/libstore/s3-url.cc @@ -48,6 +48,7 @@ try { .profile = getOptionalParam("profile"), .region = getOptionalParam("region"), .scheme = getOptionalParam("scheme"), + .versionId = getOptionalParam("versionId"), .endpoint = [&]() -> decltype(ParsedS3URL::endpoint) { if (!endpoint) return std::monostate(); @@ -73,6 +74,12 @@ ParsedURL ParsedS3URL::toHttpsUrl() const auto regionStr = region.transform(toView).value_or("us-east-1"); auto schemeStr = scheme.transform(toView).value_or("https"); + // Build query parameters (e.g., versionId if present) + StringMap queryParams; + if (versionId) { + queryParams["versionId"] = *versionId; + } + // Handle endpoint configuration using std::visit return std::visit( overloaded{ @@ -85,6 +92,7 @@ ParsedURL ParsedS3URL::toHttpsUrl() const .scheme = std::string{schemeStr}, .authority = ParsedURL::Authority{.host = "s3." + regionStr + ".amazonaws.com"}, .path = std::move(path), + .query = std::move(queryParams), }; }, [&](const ParsedURL::Authority & auth) { @@ -96,6 +104,7 @@ ParsedURL ParsedS3URL::toHttpsUrl() const .scheme = std::string{schemeStr}, .authority = auth, .path = std::move(path), + .query = std::move(queryParams), }; }, [&](const ParsedURL & endpointUrl) { @@ -107,6 +116,7 @@ ParsedURL ParsedS3URL::toHttpsUrl() const .scheme = endpointUrl.scheme, .authority = endpointUrl.authority, .path = std::move(path), + .query = std::move(queryParams), }; }, }, diff --git a/tests/nixos/s3-binary-cache-store.nix b/tests/nixos/s3-binary-cache-store.nix index 981fab868..a2ede4572 100644 --- a/tests/nixos/s3-binary-cache-store.nix +++ b/tests/nixos/s3-binary-cache-store.nix @@ -1,7 +1,5 @@ { - lib, config, - nixpkgs, ... }: @@ -147,7 +145,7 @@ in else: machine.fail(f"nix path-info {pkg}") - def setup_s3(populate_bucket=[], public=False): + def setup_s3(populate_bucket=[], public=False, versioned=False): """ Decorator that creates/destroys a unique bucket for each test. Optionally pre-populates bucket with specified packages. @@ -156,14 +154,17 @@ in Args: populate_bucket: List of packages to upload before test runs public: If True, make the bucket publicly accessible + versioned: If True, enable versioning on the bucket before populating """ def decorator(test_func): def wrapper(): bucket = str(uuid.uuid4()) server.succeed(f"mc mb minio/{bucket}") - if public: - server.succeed(f"mc anonymous set download minio/{bucket}") try: + if public: + server.succeed(f"mc anonymous set download minio/{bucket}") + if versioned: + server.succeed(f"mc version enable minio/{bucket}") if populate_bucket: store_url = make_s3_url(bucket) for pkg in populate_bucket: @@ -597,6 +598,47 @@ in print(" ✓ File content verified correct (hash matches)") + @setup_s3(populate_bucket=[PKGS['A']], versioned=True) + def test_versioned_urls(bucket): + """Test that versionId parameter is accepted in S3 URLs""" + print("\n=== Testing Versioned URLs ===") + + # Get the nix-cache-info file + cache_info_url = make_s3_url(bucket, path="/nix-cache-info") + + # Fetch without versionId should work + client.succeed( + f"{ENV_WITH_CREDS} nix eval --impure --expr " + f"'builtins.fetchurl {{ name = \"cache-info\"; url = \"{cache_info_url}\"; }}'" + ) + print(" ✓ Fetch without versionId works") + + # List versions to get a version ID + # MinIO output format: [timestamp] size tier versionId versionNumber method filename + versions_output = server.succeed(f"mc ls --versions minio/{bucket}/nix-cache-info") + + # Extract version ID from output (4th field after STANDARD) + import re + version_match = re.search(r'STANDARD\s+(\S+)\s+v\d+', versions_output) + if not version_match: + print(f"Debug: versions output: {versions_output}") + raise Exception("Could not extract version ID from MinIO output") + + version_id = version_match.group(1) + print(f" ✓ Found version ID: {version_id}") + + # Version ID should not be "null" since versioning was enabled before upload + if version_id == "null": + raise Exception("Version ID is 'null' - versioning may not be working correctly") + + # Fetch with versionId parameter + versioned_url = f"{cache_info_url}&versionId={version_id}" + client.succeed( + f"{ENV_WITH_CREDS} nix eval --impure --expr " + f"'builtins.fetchurl {{ name = \"cache-info-versioned\"; url = \"{versioned_url}\"; }}'" + ) + print(" ✓ Fetch with versionId parameter works") + # ============================================================================ # Main Test Execution # ============================================================================ @@ -626,6 +668,7 @@ in test_compression_mixed() test_compression_disabled() test_nix_prefetch_url() + test_versioned_urls() print("\n" + "="*80) print("✓ All S3 Binary Cache Store Tests Passed!") From 0f0d9255c62054b606cb56594526d03b431fed51 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 24 Oct 2025 18:00:05 -0400 Subject: [PATCH 042/213] Clean up JSON utils in a few ways In particular - Remove `get`, it is redundant with `valueAt` and the `get` in `util.hh`. - Remove `nullableValueAt`. It is morally just the function composition `getNullable . valueAt`, not an orthogonal combinator like the others. - `optionalValueAt` return a pointer, not `std::optional`. This also expresses optionality, but without creating a needless copy. This brings it in line with the other combinators which also return references. - Delete `valueAt` and `optionalValueAt` taking the map by value, as we did for `get` in 408c09a1207e1f6bb7367322ceb25d187334673f, which prevents bugs / unnecessary copies. `adl_serializer::from_json` was the one use of `getNullable`. I give it a little static function for the ultimate creation of a `std::optional` it does need to do (after switching it to using `getNullable . valueAt`. That could go in `json-utils.hh` eventually, but I didn't bother for now since only one things needs it. Co-authored-by: Sergei Zimmerman --- src/libfetchers/fetchers.cc | 5 +- src/libstore/derivation-options.cc | 49 ++++++++++++----- src/libstore/nar-info.cc | 10 ++-- src/libutil-tests/json-utils.cc | 62 +++++++++++++--------- src/libutil/include/nix/util/json-utils.hh | 20 ++++--- src/libutil/json-utils.cc | 45 +++------------- 6 files changed, 99 insertions(+), 92 deletions(-) diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index 324e8884c..c9c0fffa2 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -519,10 +519,11 @@ using namespace nix; fetchers::PublicKey adl_serializer::from_json(const json & json) { fetchers::PublicKey res = {}; - if (auto type = optionalValueAt(json, "type")) + auto & obj = getObject(json); + if (auto * type = optionalValueAt(obj, "type")) res.type = getString(*type); - res.key = getString(valueAt(json, "key")); + res.key = getString(valueAt(obj, "key")); return res; } diff --git a/src/libstore/derivation-options.cc b/src/libstore/derivation-options.cc index 698485c0d..6afaf0348 100644 --- a/src/libstore/derivation-options.cc +++ b/src/libstore/derivation-options.cc @@ -116,27 +116,29 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt DerivationOptions defaults = {}; if (shouldWarn && parsed) { - if (get(parsed->structuredAttrs, "allowedReferences")) { + auto & structuredAttrs = getObject(parsed->structuredAttrs); + + if (get(structuredAttrs, "allowedReferences")) { warn( "'structuredAttrs' disables the effect of the top-level attribute 'allowedReferences'; use 'outputChecks' instead"); } - if (get(parsed->structuredAttrs, "allowedRequisites")) { + if (get(structuredAttrs, "allowedRequisites")) { warn( "'structuredAttrs' disables the effect of the top-level attribute 'allowedRequisites'; use 'outputChecks' instead"); } - if (get(parsed->structuredAttrs, "disallowedRequisites")) { + if (get(structuredAttrs, "disallowedRequisites")) { warn( "'structuredAttrs' disables the effect of the top-level attribute 'disallowedRequisites'; use 'outputChecks' instead"); } - if (get(parsed->structuredAttrs, "disallowedReferences")) { + if (get(structuredAttrs, "disallowedReferences")) { warn( "'structuredAttrs' disables the effect of the top-level attribute 'disallowedReferences'; use 'outputChecks' instead"); } - if (get(parsed->structuredAttrs, "maxSize")) { + if (get(structuredAttrs, "maxSize")) { warn( "'structuredAttrs' disables the effect of the top-level attribute 'maxSize'; use 'outputChecks' instead"); } - if (get(parsed->structuredAttrs, "maxClosureSize")) { + if (get(structuredAttrs, "maxClosureSize")) { warn( "'structuredAttrs' disables the effect of the top-level attribute 'maxClosureSize'; use 'outputChecks' instead"); } @@ -145,11 +147,15 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt return { .outputChecks = [&]() -> OutputChecksVariant { if (parsed) { + auto & structuredAttrs = getObject(parsed->structuredAttrs); + std::map res; - if (auto outputChecks = get(parsed->structuredAttrs, "outputChecks")) { - for (auto & [outputName, output] : getObject(*outputChecks)) { + if (auto * outputChecks = get(structuredAttrs, "outputChecks")) { + for (auto & [outputName, output_] : getObject(*outputChecks)) { OutputChecks checks; + auto & output = getObject(output_); + if (auto maxSize = get(output, "maxSize")) checks.maxSize = maxSize->get(); @@ -195,7 +201,9 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt std::map res; if (parsed) { - if (auto udr = get(parsed->structuredAttrs, "unsafeDiscardReferences")) { + auto & structuredAttrs = getObject(parsed->structuredAttrs); + + if (auto * udr = get(structuredAttrs, "unsafeDiscardReferences")) { for (auto & [outputName, output] : getObject(*udr)) { if (!output.is_boolean()) throw Error("attribute 'unsafeDiscardReferences.\"%s\"' must be a Boolean", outputName); @@ -226,7 +234,7 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt std::map ret; if (parsed) { - auto e = optionalValueAt(parsed->structuredAttrs, "exportReferencesGraph"); + auto e = optionalValueAt(getObject(parsed->structuredAttrs), "exportReferencesGraph"); if (!e || !e->is_object()) return ret; for (auto & [key, value] : getObject(*e)) { @@ -333,8 +341,10 @@ namespace nlohmann { using namespace nix; -DerivationOptions adl_serializer::from_json(const json & json) +DerivationOptions adl_serializer::from_json(const json & json_) { + auto & json = getObject(json_); + return { .outputChecks = [&]() -> OutputChecksVariant { auto outputChecks = getObject(valueAt(json, "outputChecks")); @@ -397,13 +407,24 @@ void adl_serializer::to_json(json & json, const DerivationOpt json["allowSubstitutes"] = o.allowSubstitutes; } -DerivationOptions::OutputChecks adl_serializer::from_json(const json & json) +template +static inline std::optional ptrToOwned(const json * ptr) { + if (ptr) + return std::optional{*ptr}; + else + return std::nullopt; +} + +DerivationOptions::OutputChecks adl_serializer::from_json(const json & json_) +{ + auto & json = getObject(json_); + return { .ignoreSelfRefs = getBoolean(valueAt(json, "ignoreSelfRefs")), - .allowedReferences = nullableValueAt(json, "allowedReferences"), + .allowedReferences = ptrToOwned(getNullable(valueAt(json, "allowedReferences"))), .disallowedReferences = getStringSet(valueAt(json, "disallowedReferences")), - .allowedRequisites = nullableValueAt(json, "allowedRequisites"), + .allowedRequisites = ptrToOwned(getNullable(valueAt(json, "allowedRequisites"))), .disallowedRequisites = getStringSet(valueAt(json, "disallowedRequisites")), }; } diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index 1e7c48287..6f1abb273 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -159,17 +159,19 @@ NarInfo NarInfo::fromJSON(const StoreDirConfig & store, const StorePath & path, UnkeyedValidPathInfo::fromJSON(store, json), }}; + auto & obj = getObject(json); + if (json.contains("url")) - res.url = getString(valueAt(json, "url")); + res.url = getString(valueAt(obj, "url")); if (json.contains("compression")) - res.compression = getString(valueAt(json, "compression")); + res.compression = getString(valueAt(obj, "compression")); if (json.contains("downloadHash")) - res.fileHash = Hash::parseAny(getString(valueAt(json, "downloadHash")), std::nullopt); + res.fileHash = Hash::parseAny(getString(valueAt(obj, "downloadHash")), std::nullopt); if (json.contains("downloadSize")) - res.fileSize = getUnsigned(valueAt(json, "downloadSize")); + res.fileSize = getUnsigned(valueAt(obj, "downloadSize")); return res; } diff --git a/src/libutil-tests/json-utils.cc b/src/libutil-tests/json-utils.cc index 7d02894c6..b5c011355 100644 --- a/src/libutil-tests/json-utils.cc +++ b/src/libutil-tests/json-utils.cc @@ -70,7 +70,7 @@ TEST(valueAt, simpleObject) auto nested = R"({ "hello": { "world": "" } })"_json; - ASSERT_EQ(valueAt(valueAt(getObject(nested), "hello"), "world"), ""); + ASSERT_EQ(valueAt(getObject(valueAt(getObject(nested), "hello")), "world"), ""); } TEST(valueAt, missingKey) @@ -119,10 +119,12 @@ TEST(getArray, wrongAssertions) { auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json; - ASSERT_THROW(getArray(valueAt(json, "object")), Error); - ASSERT_THROW(getArray(valueAt(json, "string")), Error); - ASSERT_THROW(getArray(valueAt(json, "int")), Error); - ASSERT_THROW(getArray(valueAt(json, "boolean")), Error); + auto & obj = getObject(json); + + ASSERT_THROW(getArray(valueAt(obj, "object")), Error); + ASSERT_THROW(getArray(valueAt(obj, "string")), Error); + ASSERT_THROW(getArray(valueAt(obj, "int")), Error); + ASSERT_THROW(getArray(valueAt(obj, "boolean")), Error); } TEST(getString, rightAssertions) @@ -136,10 +138,12 @@ TEST(getString, wrongAssertions) { auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json; - ASSERT_THROW(getString(valueAt(json, "object")), Error); - ASSERT_THROW(getString(valueAt(json, "array")), Error); - ASSERT_THROW(getString(valueAt(json, "int")), Error); - ASSERT_THROW(getString(valueAt(json, "boolean")), Error); + auto & obj = getObject(json); + + ASSERT_THROW(getString(valueAt(obj, "object")), Error); + ASSERT_THROW(getString(valueAt(obj, "array")), Error); + ASSERT_THROW(getString(valueAt(obj, "int")), Error); + ASSERT_THROW(getString(valueAt(obj, "boolean")), Error); } TEST(getIntegralNumber, rightAssertions) @@ -156,18 +160,20 @@ TEST(getIntegralNumber, wrongAssertions) auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "signed": -256, "large": 128, "boolean": false })"_json; - ASSERT_THROW(getUnsigned(valueAt(json, "object")), Error); - ASSERT_THROW(getUnsigned(valueAt(json, "array")), Error); - ASSERT_THROW(getUnsigned(valueAt(json, "string")), Error); - ASSERT_THROW(getUnsigned(valueAt(json, "boolean")), Error); - ASSERT_THROW(getUnsigned(valueAt(json, "signed")), Error); + auto & obj = getObject(json); - ASSERT_THROW(getInteger(valueAt(json, "object")), Error); - ASSERT_THROW(getInteger(valueAt(json, "array")), Error); - ASSERT_THROW(getInteger(valueAt(json, "string")), Error); - ASSERT_THROW(getInteger(valueAt(json, "boolean")), Error); - ASSERT_THROW(getInteger(valueAt(json, "large")), Error); - ASSERT_THROW(getInteger(valueAt(json, "signed")), Error); + ASSERT_THROW(getUnsigned(valueAt(obj, "object")), Error); + ASSERT_THROW(getUnsigned(valueAt(obj, "array")), Error); + ASSERT_THROW(getUnsigned(valueAt(obj, "string")), Error); + ASSERT_THROW(getUnsigned(valueAt(obj, "boolean")), Error); + ASSERT_THROW(getUnsigned(valueAt(obj, "signed")), Error); + + ASSERT_THROW(getInteger(valueAt(obj, "object")), Error); + ASSERT_THROW(getInteger(valueAt(obj, "array")), Error); + ASSERT_THROW(getInteger(valueAt(obj, "string")), Error); + ASSERT_THROW(getInteger(valueAt(obj, "boolean")), Error); + ASSERT_THROW(getInteger(valueAt(obj, "large")), Error); + ASSERT_THROW(getInteger(valueAt(obj, "signed")), Error); } TEST(getBoolean, rightAssertions) @@ -181,24 +187,28 @@ TEST(getBoolean, wrongAssertions) { auto json = R"({ "object": {}, "array": [], "string": "", "int": 0, "boolean": false })"_json; - ASSERT_THROW(getBoolean(valueAt(json, "object")), Error); - ASSERT_THROW(getBoolean(valueAt(json, "array")), Error); - ASSERT_THROW(getBoolean(valueAt(json, "string")), Error); - ASSERT_THROW(getBoolean(valueAt(json, "int")), Error); + auto & obj = getObject(json); + + ASSERT_THROW(getBoolean(valueAt(obj, "object")), Error); + ASSERT_THROW(getBoolean(valueAt(obj, "array")), Error); + ASSERT_THROW(getBoolean(valueAt(obj, "string")), Error); + ASSERT_THROW(getBoolean(valueAt(obj, "int")), Error); } TEST(optionalValueAt, existing) { auto json = R"({ "string": "ssh-rsa" })"_json; - ASSERT_EQ(optionalValueAt(json, "string"), std::optional{"ssh-rsa"}); + auto * ptr = optionalValueAt(getObject(json), "string"); + ASSERT_TRUE(ptr); + ASSERT_EQ(*ptr, R"("ssh-rsa")"_json); } TEST(optionalValueAt, empty) { auto json = R"({})"_json; - ASSERT_EQ(optionalValueAt(json, "string"), std::nullopt); + ASSERT_EQ(optionalValueAt(getObject(json), "string"), nullptr); } TEST(getNullable, null) diff --git a/src/libutil/include/nix/util/json-utils.hh b/src/libutil/include/nix/util/json-utils.hh index 4b5fb4b21..51ebb2b6c 100644 --- a/src/libutil/include/nix/util/json-utils.hh +++ b/src/libutil/include/nix/util/json-utils.hh @@ -2,7 +2,6 @@ ///@file #include -#include #include "nix/util/error.hh" #include "nix/util/types.hh" @@ -12,20 +11,25 @@ namespace nix { enum struct ExperimentalFeature; -const nlohmann::json * get(const nlohmann::json & map, const std::string & key); - -nlohmann::json * get(nlohmann::json & map, const std::string & key); - /** * Get the value of a json object at a key safely, failing with a nice * error if the key does not exist. * * Use instead of nlohmann::json::at() to avoid ugly exceptions. */ -const nlohmann::json & valueAt(const nlohmann::json::object_t & map, const std::string & key); +const nlohmann::json & valueAt(const nlohmann::json::object_t & map, std::string_view key); -std::optional optionalValueAt(const nlohmann::json::object_t & value, const std::string & key); -std::optional nullableValueAt(const nlohmann::json::object_t & value, const std::string & key); +/** + * @return A pointer to the value assiocated with `key` if `value` + * contains `key`, otherwise return `nullptr` (not JSON `null`!). + */ +const nlohmann::json * optionalValueAt(const nlohmann::json::object_t & value, std::string_view key); + +/** + * Prevents bugs; see `get` for the same trick. + */ +const nlohmann::json & valueAt(nlohmann::json::object_t && map, std::string_view key) = delete; +const nlohmann::json * optionalValueAt(nlohmann::json::object_t && value, std::string_view key) = delete; /** * Downcast the json object, failing with a nice error if the conversion fails. diff --git a/src/libutil/json-utils.cc b/src/libutil/json-utils.cc index 74b3b27cc..1502384e9 100644 --- a/src/libutil/json-utils.cc +++ b/src/libutil/json-utils.cc @@ -1,52 +1,21 @@ #include "nix/util/json-utils.hh" #include "nix/util/error.hh" #include "nix/util/types.hh" -#include -#include -#include +#include "nix/util/util.hh" namespace nix { -const nlohmann::json * get(const nlohmann::json & map, const std::string & key) +const nlohmann::json & valueAt(const nlohmann::json::object_t & map, std::string_view key) { - auto i = map.find(key); - if (i == map.end()) - return nullptr; - return &*i; -} - -nlohmann::json * get(nlohmann::json & map, const std::string & key) -{ - auto i = map.find(key); - if (i == map.end()) - return nullptr; - return &*i; -} - -const nlohmann::json & valueAt(const nlohmann::json::object_t & map, const std::string & key) -{ - if (!map.contains(key)) + if (auto * p = optionalValueAt(map, key)) + return *p; + else throw Error("Expected JSON object to contain key '%s' but it doesn't: %s", key, nlohmann::json(map).dump()); - - return map.at(key); } -std::optional optionalValueAt(const nlohmann::json::object_t & map, const std::string & key) +const nlohmann::json * optionalValueAt(const nlohmann::json::object_t & map, std::string_view key) { - if (!map.contains(key)) - return std::nullopt; - - return std::optional{map.at(key)}; -} - -std::optional nullableValueAt(const nlohmann::json::object_t & map, const std::string & key) -{ - auto value = valueAt(map, key); - - if (value.is_null()) - return std::nullopt; - - return std::optional{std::move(value)}; + return get(map, key); } const nlohmann::json * getNullable(const nlohmann::json & value) From 7e53afd8b94bf4a42a2959c7db2a0ce491865d59 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 25 Oct 2025 13:22:59 -0400 Subject: [PATCH 043/213] Use types to show that structured attrs are always JSON objects Before we just had partial code accessing it. Now, we use `nlohmann::json::object_t`, which is a `std::map`, to enforce this by construction. --- src/libexpr/primops.cc | 2 +- src/libstore/build/derivation-env-desugar.cc | 2 +- src/libstore/derivation-options.cc | 21 ++++++++++--------- .../include/nix/store/parsed-derivations.hh | 6 +++--- src/libstore/parsed-derivations.cc | 9 ++++---- src/nix/nix-build/nix-build.cc | 2 +- 6 files changed, 22 insertions(+), 20 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 5f06bf009..94dd5cb19 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1374,7 +1374,7 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName pos, "while evaluating the `__structuredAttrs` " "attribute passed to builtins.derivationStrict")) - jsonObject = StructuredAttrs{.structuredAttrs = json::object()}; + jsonObject = StructuredAttrs{}; /* Check whether null attributes should be ignored. */ bool ignoreNulls = false; diff --git a/src/libstore/build/derivation-env-desugar.cc b/src/libstore/build/derivation-env-desugar.cc index d6e002d91..8d552fc4d 100644 --- a/src/libstore/build/derivation-env-desugar.cc +++ b/src/libstore/build/derivation-env-desugar.cc @@ -25,7 +25,7 @@ DesugaredEnv DesugaredEnv::create( if (drv.structuredAttrs) { auto json = drv.structuredAttrs->prepareStructuredAttrs(store, drvOptions, inputPaths, drv.outputs); res.atFileEnvPair("NIX_ATTRS_SH_FILE", ".attrs.sh") = StructuredAttrs::writeShell(json); - res.atFileEnvPair("NIX_ATTRS_JSON_FILE", ".attrs.json") = json.dump(); + res.atFileEnvPair("NIX_ATTRS_JSON_FILE", ".attrs.json") = static_cast(std::move(json)).dump(); } else { /* In non-structured mode, set all bindings either directory in the environment or via a file, as specified by diff --git a/src/libstore/derivation-options.cc b/src/libstore/derivation-options.cc index 6afaf0348..bd9704b44 100644 --- a/src/libstore/derivation-options.cc +++ b/src/libstore/derivation-options.cc @@ -22,9 +22,9 @@ getStringAttr(const StringMap & env, const StructuredAttrs * parsed, const std:: if (i == parsed->structuredAttrs.end()) return {}; else { - if (!i->is_string()) + if (!i->second.is_string()) throw Error("attribute '%s' of must be a string", name); - return i->get(); + return i->second.get(); } } else { auto i = env.find(name); @@ -42,9 +42,9 @@ static bool getBoolAttr(const StringMap & env, const StructuredAttrs * parsed, c if (i == parsed->structuredAttrs.end()) return def; else { - if (!i->is_boolean()) + if (!i->second.is_boolean()) throw Error("attribute '%s' must be a Boolean", name); - return i->get(); + return i->second.get(); } } else { auto i = env.find(name); @@ -63,10 +63,11 @@ getStringsAttr(const StringMap & env, const StructuredAttrs * parsed, const std: if (i == parsed->structuredAttrs.end()) return {}; else { - if (!i->is_array()) + if (!i->second.is_array()) throw Error("attribute '%s' must be a list of strings", name); + auto & a = getArray(i->second); Strings res; - for (auto j = i->begin(); j != i->end(); ++j) { + for (auto j = a.begin(); j != a.end(); ++j) { if (!j->is_string()) throw Error("attribute '%s' must be a list of strings", name); res.push_back(j->get()); @@ -116,7 +117,7 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt DerivationOptions defaults = {}; if (shouldWarn && parsed) { - auto & structuredAttrs = getObject(parsed->structuredAttrs); + auto & structuredAttrs = parsed->structuredAttrs; if (get(structuredAttrs, "allowedReferences")) { warn( @@ -147,7 +148,7 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt return { .outputChecks = [&]() -> OutputChecksVariant { if (parsed) { - auto & structuredAttrs = getObject(parsed->structuredAttrs); + auto & structuredAttrs = parsed->structuredAttrs; std::map res; if (auto * outputChecks = get(structuredAttrs, "outputChecks")) { @@ -201,7 +202,7 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt std::map res; if (parsed) { - auto & structuredAttrs = getObject(parsed->structuredAttrs); + auto & structuredAttrs = parsed->structuredAttrs; if (auto * udr = get(structuredAttrs, "unsafeDiscardReferences")) { for (auto & [outputName, output] : getObject(*udr)) { @@ -234,7 +235,7 @@ DerivationOptions::fromStructuredAttrs(const StringMap & env, const StructuredAt std::map ret; if (parsed) { - auto e = optionalValueAt(getObject(parsed->structuredAttrs), "exportReferencesGraph"); + auto * e = optionalValueAt(parsed->structuredAttrs, "exportReferencesGraph"); if (!e || !e->is_object()) return ret; for (auto & [key, value] : getObject(*e)) { diff --git a/src/libstore/include/nix/store/parsed-derivations.hh b/src/libstore/include/nix/store/parsed-derivations.hh index edef1b2d2..52e97b0e7 100644 --- a/src/libstore/include/nix/store/parsed-derivations.hh +++ b/src/libstore/include/nix/store/parsed-derivations.hh @@ -18,7 +18,7 @@ struct StructuredAttrs { static constexpr std::string_view envVarName{"__json"}; - nlohmann::json structuredAttrs; + nlohmann::json::object_t structuredAttrs; bool operator==(const StructuredAttrs &) const = default; @@ -45,7 +45,7 @@ struct StructuredAttrs */ static void checkKeyNotInUse(const StringPairs & env); - nlohmann::json prepareStructuredAttrs( + nlohmann::json::object_t prepareStructuredAttrs( Store & store, const DerivationOptions & drvOptions, const StorePathSet & inputPaths, @@ -62,7 +62,7 @@ struct StructuredAttrs * `prepareStructuredAttrs`, *not* the original `structuredAttrs` * field. */ - static std::string writeShell(const nlohmann::json & prepared); + static std::string writeShell(const nlohmann::json::object_t & prepared); }; } // namespace nix diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc index 9e8d44d6e..8d147f65f 100644 --- a/src/libstore/parsed-derivations.cc +++ b/src/libstore/parsed-derivations.cc @@ -33,7 +33,8 @@ std::optional StructuredAttrs::tryExtract(StringPairs & env) std::pair StructuredAttrs::unparse() const { - return {envVarName, structuredAttrs.dump()}; + // TODO don't copy the JSON object just to dump it. + return {envVarName, static_cast(structuredAttrs).dump()}; } void StructuredAttrs::checkKeyNotInUse(const StringPairs & env) @@ -97,7 +98,7 @@ static nlohmann::json pathInfoToJSON(Store & store, const StorePathSet & storePa return jsonList; } -nlohmann::json StructuredAttrs::prepareStructuredAttrs( +nlohmann::json::object_t StructuredAttrs::prepareStructuredAttrs( Store & store, const DerivationOptions & drvOptions, const StorePathSet & inputPaths, @@ -120,7 +121,7 @@ nlohmann::json StructuredAttrs::prepareStructuredAttrs( return json; } -std::string StructuredAttrs::writeShell(const nlohmann::json & json) +std::string StructuredAttrs::writeShell(const nlohmann::json::object_t & json) { auto handleSimpleType = [](const nlohmann::json & value) -> std::optional { @@ -144,7 +145,7 @@ std::string StructuredAttrs::writeShell(const nlohmann::json & json) std::string jsonSh; - for (auto & [key, value] : json.items()) { + for (auto & [key, value] : json) { if (!std::regex_match(key, shVarName)) continue; diff --git a/src/nix/nix-build/nix-build.cc b/src/nix/nix-build/nix-build.cc index eef97aa19..8aced503b 100644 --- a/src/nix/nix-build/nix-build.cc +++ b/src/nix/nix-build/nix-build.cc @@ -600,7 +600,7 @@ static void main_nix_build(int argc, char ** argv) structuredAttrsRC = StructuredAttrs::writeShell(json); auto attrsJSON = (tmpDir.path() / ".attrs.json").string(); - writeFile(attrsJSON, json.dump()); + writeFile(attrsJSON, static_cast(std::move(json)).dump()); auto attrsSH = (tmpDir.path() / ".attrs.sh").string(); writeFile(attrsSH, structuredAttrsRC); From 9d4d10954a06a9c37bb88fb702e084e4634ebd3c Mon Sep 17 00:00:00 2001 From: Marcel Date: Mon, 27 Oct 2025 00:05:39 +0100 Subject: [PATCH 044/213] diff-closures: print sizes with dynamic unit --- src/libutil-tests/util.cc | 1 + src/libutil/include/nix/util/util.hh | 2 +- src/libutil/util.cc | 9 +++++---- src/nix/diff-closures.cc | 3 +-- src/nix/path-info.cc | 2 +- 5 files changed, 9 insertions(+), 8 deletions(-) diff --git a/src/libutil-tests/util.cc b/src/libutil-tests/util.cc index c48b97e8e..32114d9da 100644 --- a/src/libutil-tests/util.cc +++ b/src/libutil-tests/util.cc @@ -158,6 +158,7 @@ TEST(renderSize, misc) ASSERT_EQ(renderSize(972, true), " 0.9 KiB"); ASSERT_EQ(renderSize(973, true), " 1.0 KiB"); // FIXME: should round down ASSERT_EQ(renderSize(1024, true), " 1.0 KiB"); + ASSERT_EQ(renderSize(-1024, true), " -1.0 KiB"); ASSERT_EQ(renderSize(1024 * 1024, true), "1024.0 KiB"); ASSERT_EQ(renderSize(1100 * 1024, true), " 1.1 MiB"); ASSERT_EQ(renderSize(2ULL * 1024 * 1024 * 1024, true), " 2.0 GiB"); diff --git a/src/libutil/include/nix/util/util.hh b/src/libutil/include/nix/util/util.hh index 26f03938a..cb1c9694d 100644 --- a/src/libutil/include/nix/util/util.hh +++ b/src/libutil/include/nix/util/util.hh @@ -104,7 +104,7 @@ N string2IntWithUnitPrefix(std::string_view s) * GiB`. If `align` is set, the number will be right-justified by * padding with spaces on the left. */ -std::string renderSize(uint64_t value, bool align = false); +std::string renderSize(int64_t value, bool align = false); /** * Parse a string into a float. diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 383a904ad..69826070c 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -132,15 +132,16 @@ std::optional string2Float(const std::string_view s) template std::optional string2Float(const std::string_view s); template std::optional string2Float(const std::string_view s); -std::string renderSize(uint64_t value, bool align) +std::string renderSize(int64_t value, bool align) { static const std::array prefixes{{'K', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'}}; size_t power = 0; - double res = value; - while (res > 1024 && power < prefixes.size()) { + double abs_value = std::abs(value); + while (abs_value > 1024 && power < prefixes.size()) { ++power; - res /= 1024; + abs_value /= 1024; } + double res = (double) value / std::pow(1024.0, power); return fmt(align ? "%6.1f %ciB" : "%.1f %ciB", power == 0 ? res / 1024 : res, prefixes.at(power)); } diff --git a/src/nix/diff-closures.cc b/src/nix/diff-closures.cc index cbf842e5c..d36a21d74 100644 --- a/src/nix/diff-closures.cc +++ b/src/nix/diff-closures.cc @@ -107,8 +107,7 @@ void printClosureDiff( if (!removed.empty() || !added.empty()) items.push_back(fmt("%s → %s", showVersions(removed), showVersions(added))); if (showDelta) - items.push_back( - fmt("%s%+.1f KiB" ANSI_NORMAL, sizeDelta > 0 ? ANSI_RED : ANSI_GREEN, sizeDelta / 1024.0)); + items.push_back(fmt("%s%s" ANSI_NORMAL, sizeDelta > 0 ? ANSI_RED : ANSI_GREEN, renderSize(sizeDelta))); logger->cout("%s%s: %s", indent, name, concatStringsSep(", ", items)); } } diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index fef3ae120..146b775e5 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -141,7 +141,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON void printSize(std::ostream & str, uint64_t value) { if (humanReadable) - str << fmt("\t%s", renderSize(value, true)); + str << fmt("\t%s", renderSize((int64_t) value, true)); else str << fmt("\t%11d", value); } From 1f6ac88efc33f896d632dd7fcb6244a43a822097 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Oct 2025 14:10:34 +0100 Subject: [PATCH 045/213] Mark some fields in EvalState as const --- src/libexpr/eval.cc | 12 ++++++++---- src/libexpr/include/nix/expr/eval.hh | 19 ++++++++++--------- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 7a00f4ddf..a6973f590 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -3067,7 +3067,7 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path) return parseExprFromFile(path, staticBaseEnv); } -Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv) +Expr * EvalState::parseExprFromFile(const SourcePath & path, const std::shared_ptr & staticEnv) { auto buffer = path.resolveSymlinks().readFile(); // readFile hopefully have left some extra space for terminators @@ -3075,8 +3075,8 @@ Expr * EvalState::parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv) +Expr * EvalState::parseExprFromString( + std::string s_, const SourcePath & basePath, const std::shared_ptr & staticEnv) { // NOTE this method (and parseStdin) must take care to *fully copy* their input // into their respective Pos::Origin until the parser stops overwriting its input @@ -3210,7 +3210,11 @@ std::optional EvalState::resolveLookupPathPath(const LookupPath::Pat } Expr * EvalState::parse( - char * text, size_t length, Pos::Origin origin, const SourcePath & basePath, std::shared_ptr & staticEnv) + char * text, + size_t length, + Pos::Origin origin, + const SourcePath & basePath, + const std::shared_ptr & staticEnv) { DocCommentMap tmpDocComments; // Only used when not origin is not a SourcePath DocCommentMap * docComments = &tmpDocComments; diff --git a/src/libexpr/include/nix/expr/eval.hh b/src/libexpr/include/nix/expr/eval.hh index 76ce62b87..2640dae39 100644 --- a/src/libexpr/include/nix/expr/eval.hh +++ b/src/libexpr/include/nix/expr/eval.hh @@ -418,7 +418,7 @@ public: RootValue vImportedDrvToDerivation = nullptr; - ref inputCache; + const ref inputCache; /** * Debugger @@ -471,18 +471,18 @@ private: /* Cache for calls to addToStore(); maps source paths to the store paths. */ - ref> srcToStore; + const ref> srcToStore; /** * A cache that maps paths to "resolved" paths for importing Nix * expressions, i.e. `/foo` to `/foo/default.nix`. */ - ref> importResolutionCache; + const ref> importResolutionCache; /** * A cache from resolved paths to values. */ - ref, @@ -592,12 +592,13 @@ public: * Parse a Nix expression from the specified file. */ Expr * parseExprFromFile(const SourcePath & path); - Expr * parseExprFromFile(const SourcePath & path, std::shared_ptr & staticEnv); + Expr * parseExprFromFile(const SourcePath & path, const std::shared_ptr & staticEnv); /** * Parse a Nix expression from the specified string. */ - Expr * parseExprFromString(std::string s, const SourcePath & basePath, std::shared_ptr & staticEnv); + Expr * + parseExprFromString(std::string s, const SourcePath & basePath, const std::shared_ptr & staticEnv); Expr * parseExprFromString(std::string s, const SourcePath & basePath); Expr * parseStdin(); @@ -766,7 +767,7 @@ public: #if NIX_USE_BOEHMGC /** A GC root for the baseEnv reference. */ - std::shared_ptr baseEnvP; + const std::shared_ptr baseEnvP; #endif public: @@ -780,7 +781,7 @@ public: /** * The same, but used during parsing to resolve variables. */ - std::shared_ptr staticBaseEnv; // !!! should be private + const std::shared_ptr staticBaseEnv; // !!! should be private /** * Internal primops not exposed to the user. @@ -862,7 +863,7 @@ private: size_t length, Pos::Origin origin, const SourcePath & basePath, - std::shared_ptr & staticEnv); + const std::shared_ptr & staticEnv); /** * Current Nix call stack depth, used with `max-call-depth` setting to throw stack overflow hopefully before we run From fdc5600fa71eb13c547ed9909d71b44d41488fc0 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Oct 2025 14:11:59 +0100 Subject: [PATCH 046/213] makeRegexCache(): Return a ref --- src/libexpr/include/nix/expr/eval.hh | 5 +++-- src/libexpr/primops.cc | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/libexpr/include/nix/expr/eval.hh b/src/libexpr/include/nix/expr/eval.hh index 2640dae39..0c7f9cf09 100644 --- a/src/libexpr/include/nix/expr/eval.hh +++ b/src/libexpr/include/nix/expr/eval.hh @@ -191,7 +191,7 @@ std::ostream & operator<<(std::ostream & os, const ValueType t); struct RegexCache; -std::shared_ptr makeRegexCache(); +ref makeRegexCache(); struct DebugTrace { @@ -372,6 +372,7 @@ public: const fetchers::Settings & fetchSettings; const EvalSettings & settings; + SymbolTable symbols; PosTable positions; @@ -504,7 +505,7 @@ private: /** * Cache used by prim_match(). */ - std::shared_ptr regexCache; + const ref regexCache; public: diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 5f06bf009..d87825a6d 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -4611,9 +4611,9 @@ struct RegexCache } }; -std::shared_ptr makeRegexCache() +ref makeRegexCache() { - return std::make_shared(); + return make_ref(); } void prim_match(EvalState & state, const PosIdx pos, Value ** args, Value & v) From ec2fd2dc23671463fa06222e60a383123380295b Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 27 Oct 2025 16:14:19 +0300 Subject: [PATCH 047/213] libexpr: Speed up BindingsBuilder::finishSizeIfNecessary Instead of iterating over the newly built bindings we can do a cheaper set_intersection to count duplicates or fall back to a per-element binary search over the "base" bindings. This speeds up `hello` evaluation by around 10ms (0.196s -> 0.187s) and `nixos.closures.ec2.x86_64-linux` by 140ms (2.744s -> 2.609s). This addresses a somewhat steep performance regression from 82315c3807f90be8f4728b32c343e6a2f31969e3 that reduced memory requirements of attribute set merges. With this patch we get back around to 2.31 level of eval performance while keeping the memory usage optimization. Also document the optimization a bit more. --- src/libexpr/include/nix/expr/attr-set.hh | 45 +++++++++++++++++++++--- 1 file changed, 41 insertions(+), 4 deletions(-) diff --git a/src/libexpr/include/nix/expr/attr-set.hh b/src/libexpr/include/nix/expr/attr-set.hh index 46eecd9bd..f57302c42 100644 --- a/src/libexpr/include/nix/expr/attr-set.hh +++ b/src/libexpr/include/nix/expr/attr-set.hh @@ -5,6 +5,7 @@ #include "nix/expr/symbol-table.hh" #include +#include #include #include @@ -463,12 +464,48 @@ private: return bindings->baseLayer; } + /** + * If the bindings gets "layered" on top of another we need to recalculate + * the number of unique attributes in the chain. + * + * This is done by either iterating over the base "layer" and the newly added + * attributes and counting duplicates. If the base "layer" is big this approach + * is inefficient and we fall back to doing per-element binary search in the base + * "layer". + */ void finishSizeIfNecessary() { - if (hasBaseLayer()) - /* NOTE: Do not use std::ranges::distance, since Bindings is a sized - range, but we are calculating this size here. */ - bindings->numAttrsInChain = std::distance(bindings->begin(), bindings->end()); + if (!hasBaseLayer()) + return; + + auto & base = *bindings->baseLayer; + auto attrs = std::span(bindings->attrs, bindings->numAttrs); + + Bindings::size_type duplicates = 0; + + /* If the base bindings is smaller than the newly added attributes + iterate using std::set_intersection to run in O(|base| + |attrs|) = + O(|attrs|). Otherwise use an O(|attrs| * log(|base|)) per-attr binary + search to check for duplicates. Note that if we are in this code path then + |attrs| <= bindingsUpdateLayerRhsSizeThreshold, which 16 by default. We are + optimizing for the case when a small attribute set gets "layered" on top of + a much larger one. When attrsets are already small it's fine to do a linear + scan, but we should avoid expensive iterations over large "base" attrsets. */ + if (attrs.size() > base.size()) { + std::set_intersection( + base.begin(), + base.end(), + attrs.begin(), + attrs.end(), + boost::make_function_output_iterator([&]([[maybe_unused]] auto && _) { ++duplicates; })); + } else { + for (const auto & attr : attrs) { + if (base.get(attr.name)) + ++duplicates; + } + } + + bindings->numAttrsInChain = base.numAttrsInChain + attrs.size() - duplicates; } public: From bc6b9cef51414391b47633ec455c229627862c0c Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Oct 2025 14:33:51 +0100 Subject: [PATCH 048/213] Move getTarballCache() into fetchers::Settings This keeps the tarball cache open across calls. --- src/libfetchers/git-utils.cc | 13 +++++++++---- src/libfetchers/github.cc | 7 ++++--- .../include/nix/fetchers/fetch-settings.hh | 10 ++++++++++ src/libfetchers/include/nix/fetchers/git-utils.hh | 2 -- src/libfetchers/tarball.cc | 10 ++++++---- 5 files changed, 29 insertions(+), 13 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 215418522..744790de2 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -1304,13 +1304,18 @@ std::vector> GitRepoImpl::getSubmodules return result; } -ref getTarballCache() -{ - static auto repoDir = std::filesystem::path(getCacheDir()) / "tarball-cache"; +namespace fetchers { - return GitRepo::openRepo(repoDir, true, true); +ref Settings::getTarballCache() const +{ + auto tarballCache(_tarballCache.lock()); + if (!*tarballCache) + *tarballCache = GitRepo::openRepo(std::filesystem::path(getCacheDir()) / "tarball-cache", true, true); + return ref(*tarballCache); } +} // namespace fetchers + GitRepo::WorkdirInfo GitRepo::getCachedWorkdirInfo(const std::filesystem::path & path) { static Sync> _cache; diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 2479a57d2..594f3e226 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -270,7 +270,7 @@ struct GitArchiveInputScheme : InputScheme if (auto lastModifiedAttrs = cache->lookup(lastModifiedKey)) { auto treeHash = getRevAttr(*treeHashAttrs, "treeHash"); auto lastModified = getIntAttr(*lastModifiedAttrs, "lastModified"); - if (getTarballCache()->hasObject(treeHash)) + if (input.settings->getTarballCache()->hasObject(treeHash)) return {std::move(input), TarballInfo{.treeHash = treeHash, .lastModified = (time_t) lastModified}}; else debug("Git tree with hash '%s' has disappeared from the cache, refetching...", treeHash.gitRev()); @@ -290,7 +290,7 @@ struct GitArchiveInputScheme : InputScheme *logger, lvlInfo, actUnknown, fmt("unpacking '%s' into the Git cache", input.to_string())); TarArchive archive{*source}; - auto tarballCache = getTarballCache(); + auto tarballCache = input.settings->getTarballCache(); auto parseSink = tarballCache->getFileSystemObjectSink(); auto lastModified = unpackTarfileToSink(archive, *parseSink); auto tree = parseSink->flush(); @@ -324,7 +324,8 @@ struct GitArchiveInputScheme : InputScheme #endif input.attrs.insert_or_assign("lastModified", uint64_t(tarballInfo.lastModified)); - auto accessor = getTarballCache()->getAccessor(tarballInfo.treeHash, false, "«" + input.to_string() + "»"); + auto accessor = + input.settings->getTarballCache()->getAccessor(tarballInfo.treeHash, false, "«" + input.to_string() + "»"); return {accessor, input}; } diff --git a/src/libfetchers/include/nix/fetchers/fetch-settings.hh b/src/libfetchers/include/nix/fetchers/fetch-settings.hh index 605b95e0d..bddca3f9e 100644 --- a/src/libfetchers/include/nix/fetchers/fetch-settings.hh +++ b/src/libfetchers/include/nix/fetchers/fetch-settings.hh @@ -11,6 +11,12 @@ #include +namespace nix { + +struct GitRepo; + +} + namespace nix::fetchers { struct Cache; @@ -125,8 +131,12 @@ struct Settings : public Config ref getCache() const; + ref getTarballCache() const; + private: mutable Sync> _cache; + + mutable Sync> _tarballCache; }; } // namespace nix::fetchers diff --git a/src/libfetchers/include/nix/fetchers/git-utils.hh b/src/libfetchers/include/nix/fetchers/git-utils.hh index 8357ce4cd..19b5f0f6b 100644 --- a/src/libfetchers/include/nix/fetchers/git-utils.hh +++ b/src/libfetchers/include/nix/fetchers/git-utils.hh @@ -120,8 +120,6 @@ struct GitRepo virtual Hash dereferenceSingletonDirectory(const Hash & oid) = 0; }; -ref getTarballCache(); - // A helper to ensure that the `git_*_free` functions get called. template struct Deleter diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index 863a0d680..76cf3fd32 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -136,11 +136,11 @@ static DownloadTarballResult downloadTarball_( .treeHash = treeHash, .lastModified = (time_t) getIntAttr(infoAttrs, "lastModified"), .immutableUrl = maybeGetStrAttr(infoAttrs, "immutableUrl"), - .accessor = getTarballCache()->getAccessor(treeHash, false, displayPrefix), + .accessor = settings.getTarballCache()->getAccessor(treeHash, false, displayPrefix), }; }; - if (cached && !getTarballCache()->hasObject(getRevAttr(cached->value, "treeHash"))) + if (cached && !settings.getTarballCache()->hasObject(getRevAttr(cached->value, "treeHash"))) cached.reset(); if (cached && !cached->expired) @@ -179,7 +179,7 @@ static DownloadTarballResult downloadTarball_( TarArchive{path}; }) : TarArchive{*source}; - auto tarballCache = getTarballCache(); + auto tarballCache = settings.getTarballCache(); auto parseSink = tarballCache->getFileSystemObjectSink(); auto lastModified = unpackTarfileToSink(archive, *parseSink); auto tree = parseSink->flush(); @@ -398,7 +398,9 @@ struct TarballInputScheme : CurlInputScheme input.attrs.insert_or_assign( "narHash", - getTarballCache()->treeHashToNarHash(*input.settings, result.treeHash).to_string(HashFormat::SRI, true)); + input.settings->getTarballCache() + ->treeHashToNarHash(*input.settings, result.treeHash) + .to_string(HashFormat::SRI, true)); return {result.accessor, input}; } From 3994e5627fed704fc0a0cd9717f5d6b6f0f455e5 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 2 Jun 2025 12:06:21 +0200 Subject: [PATCH 049/213] nix store copy-sigs: Add docs --- src/nix/sigs.cc | 7 +++++++ src/nix/store-copy-sigs.md | 30 ++++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+) create mode 100644 src/nix/store-copy-sigs.md diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc index 142421e9c..bb2cd4e6f 100644 --- a/src/nix/sigs.cc +++ b/src/nix/sigs.cc @@ -28,6 +28,13 @@ struct CmdCopySigs : StorePathsCommand return "copy store path signatures from substituters"; } + std::string doc() override + { + return +#include "store-copy-sigs.md" + ; + } + void run(ref store, StorePaths && storePaths) override { if (substituterUris.empty()) diff --git a/src/nix/store-copy-sigs.md b/src/nix/store-copy-sigs.md new file mode 100644 index 000000000..678756221 --- /dev/null +++ b/src/nix/store-copy-sigs.md @@ -0,0 +1,30 @@ +R""( + +# Examples + +* To copy signatures from a binary cache to the local store: + + ```console + # nix store copy-sigs --substituter https://cache.nixos.org \ + --recursive /nix/store/y1x7ng5bmc9s8lqrf98brcpk1a7lbcl5-hello-2.12.1 + ``` + +* To copy signatures from one binary cache to another: + + ```console + # nix store copy-sigs --substituter https://cache.nixos.org \ + --store file:///tmp/binary-cache \ + --recursive -v \ + /nix/store/y1x7ng5bmc9s8lqrf98brcpk1a7lbcl5-hello-2.12.1 + imported 2 signatures + ``` + +# Description + +`nix store copy-sigs` copies store path signatures from one store to another. + +It is not advised to copy signatures to binary cache stores. Binary cache signatures are stored in `.narinfo` files. Since these are cached aggressively, clients may not see the new signatures quickly. It is therefore better to set any required signatures when the paths are first uploaded to the binary cache. + +Store paths are processed in parallel. The amount of parallelism is controlled by the [`http-connections`](@docroot@/command-ref/conf-file.md#conf-http-connections) settings. + +)"" From bbfaaf3a20db5c72367171455443d669c23a2a35 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Oct 2025 14:52:18 +0100 Subject: [PATCH 050/213] showHelp(): Use one callFunction --- src/nix/main.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/nix/main.cc b/src/nix/main.cc index ed889a189..74d22e433 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -256,8 +256,8 @@ static void showHelp(std::vector subcommand, NixArgs & toplevel) vDump->mkString(toplevel.dumpCli()); auto vRes = state.allocValue(); - state.callFunction(*vGenerateManpage, state.getBuiltin("false"), *vRes, noPos); - state.callFunction(*vRes, *vDump, *vRes, noPos); + Value * args[]{&state.getBuiltin("false"), vDump}; + state.callFunction(*vGenerateManpage, args, *vRes, noPos); auto attr = vRes->attrs()->get(state.symbols.create(mdName + ".md")); if (!attr) From fb26285458d2c63e407b4ca78c6d96809963d237 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Oct 2025 14:53:46 +0100 Subject: [PATCH 051/213] Fix #include --- src/nix/env.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/nix/env.cc b/src/nix/env.cc index 0a211399a..a80bcda67 100644 --- a/src/nix/env.cc +++ b/src/nix/env.cc @@ -1,6 +1,7 @@ -#include #include +#include + #include "nix/cmd/command.hh" #include "nix/expr/eval.hh" #include "run.hh" From 8c8b706f6b468153d7b8f8b2f0c88bfa75a1d363 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Oct 2025 15:01:46 +0100 Subject: [PATCH 052/213] Fix an update to a finished value --- src/libexpr-tests/value/print.cc | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/libexpr-tests/value/print.cc b/src/libexpr-tests/value/print.cc index 1959fddf2..6cadbc70a 100644 --- a/src/libexpr-tests/value/print.cc +++ b/src/libexpr-tests/value/print.cc @@ -10,7 +10,7 @@ using namespace testing; struct ValuePrintingTests : LibExprTest { template - void test(Value v, std::string_view expected, A... args) + void test(Value & v, std::string_view expected, A... args) { std::stringstream out; v.print(state, out, args...); @@ -625,10 +625,11 @@ TEST_F(ValuePrintingTests, ansiColorsAttrsElided) vThree.mkInt(3); builder.insert(state.symbols.create("three"), &vThree); - vAttrs.mkAttrs(builder.finish()); + Value vAttrs2; + vAttrs2.mkAttrs(builder.finish()); test( - vAttrs, + vAttrs2, "{ one = " ANSI_CYAN "1" ANSI_NORMAL "; " ANSI_FAINT "«2 attributes elided»" ANSI_NORMAL " }", PrintOptions{.ansiColors = true, .maxAttrs = 1}); } From a91115bf220ad6e8cbac7d977e48c6983e2e91bb Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Oct 2025 15:04:13 +0100 Subject: [PATCH 053/213] Remove unnecessary virtual --- src/libstore/dummy-store.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc index 509b7a0b1..6c8cb3480 100644 --- a/src/libstore/dummy-store.cc +++ b/src/libstore/dummy-store.cc @@ -148,7 +148,7 @@ struct DummyStoreImpl : DummyStore /** * The dummy store is incapable of *not* trusting! :) */ - virtual std::optional isTrustedClient() override + std::optional isTrustedClient() override { return Trusted; } From 3742ae061ef797e42596e1e2fc3b066aaf465373 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Oct 2025 15:04:56 +0100 Subject: [PATCH 054/213] Typo --- src/libstore/filetransfer.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 9fd7a967b..6b9c6602b 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -622,7 +622,7 @@ struct curlFileTransfer : public FileTransfer void quit() { quitting = true; - /* We wil not be processing any more incomming requests */ + /* We wil not be processing any more incoming requests */ while (!incoming.empty()) incoming.pop(); } From 9321669353849228f5beeb8a337053cacaad00d3 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Oct 2025 15:07:01 +0100 Subject: [PATCH 055/213] Make getDefaultCores() static --- src/libstore/globals.cc | 2 +- src/libstore/include/nix/store/globals.hh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc index 4fdb820a9..8c542b686 100644 --- a/src/libstore/globals.cc +++ b/src/libstore/globals.cc @@ -150,7 +150,7 @@ std::vector getUserConfigFiles() return files; } -unsigned int Settings::getDefaultCores() const +unsigned int Settings::getDefaultCores() { const unsigned int concurrency = std::max(1U, std::thread::hardware_concurrency()); const unsigned int maxCPU = getMaxCPU(); diff --git a/src/libstore/include/nix/store/globals.hh b/src/libstore/include/nix/store/globals.hh index 14647c05f..f9be1d482 100644 --- a/src/libstore/include/nix/store/globals.hh +++ b/src/libstore/include/nix/store/globals.hh @@ -77,7 +77,7 @@ public: Settings(); - unsigned int getDefaultCores() const; + static unsigned int getDefaultCores(); Path nixPrefix; From 17777e3b703af1c119b71a18a9e425f22e9accfe Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Oct 2025 15:07:56 +0100 Subject: [PATCH 056/213] Settings typos --- src/libstore/include/nix/store/globals.hh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libstore/include/nix/store/globals.hh b/src/libstore/include/nix/store/globals.hh index f9be1d482..0d6f380a2 100644 --- a/src/libstore/include/nix/store/globals.hh +++ b/src/libstore/include/nix/store/globals.hh @@ -427,7 +427,7 @@ public: R"( If set to `true`, Nix instructs [remote build machines](#conf-builders) to use their own [`substituters`](#conf-substituters) if available. - It means that remote build hosts fetches as many dependencies as possible from their own substituters (e.g, from `cache.nixos.org`) instead of waiting for the local machine to upload them all. + It means that remote build hosts fetch as many dependencies as possible from their own substituters (e.g, from `cache.nixos.org`) instead of waiting for the local machine to upload them all. This can drastically reduce build times if the network connection between the local machine and the remote build host is slow. )"}; @@ -503,7 +503,7 @@ public: by the Nix account, its group should be the group specified here, and its mode should be `1775`. - If the build users group is empty, builds areperformed under + If the build users group is empty, builds are performed under the uid of the Nix process (that is, the uid of the caller if `NIX_REMOTE` is empty, the uid under which the Nix daemon runs if `NIX_REMOTE` is `daemon`). Obviously, this should not be used @@ -847,8 +847,8 @@ public: 4. The path to the build's scratch directory. This directory exists only if the build was run with `--keep-failed`. - The stderr and stdout output from the diff hook isn't - displayed to the user. Instead, it print to the nix-daemon's log. + The stderr and stdout output from the diff hook isn't displayed + to the user. Instead, it prints to the nix-daemon's log. When using the Nix daemon, `diff-hook` must be set in the `nix.conf` configuration file, and cannot be passed at the command line. From 1af5a98955fafea6bd32f4d53c30e5ebc2d85d5e Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Oct 2025 15:09:03 +0100 Subject: [PATCH 057/213] Document removed WorkerProto ops --- src/libstore/include/nix/store/worker-protocol.hh | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/libstore/include/nix/store/worker-protocol.hh b/src/libstore/include/nix/store/worker-protocol.hh index 29d4828c2..aec3820d2 100644 --- a/src/libstore/include/nix/store/worker-protocol.hh +++ b/src/libstore/include/nix/store/worker-protocol.hh @@ -152,6 +152,7 @@ enum struct WorkerProto::Op : uint64_t { AddIndirectRoot = 12, SyncWithGC = 13, FindRoots = 14, + // ExportPath = 16, // removed QueryDeriver = 18, // obsolete SetOptions = 19, CollectGarbage = 20, @@ -161,6 +162,7 @@ enum struct WorkerProto::Op : uint64_t { QueryFailedPaths = 24, ClearFailedPaths = 25, QueryPathInfo = 26, + // ImportPaths = 27, // removed QueryDerivationOutputNames = 28, // obsolete QueryPathFromHashPart = 29, QuerySubstitutablePathInfos = 30, From 91cd42511e5bd591991485a3bb7c942acfc37476 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 27 Oct 2025 15:11:16 +0100 Subject: [PATCH 058/213] Introduce MINIMUM_PROTOCOL_VERSION constant --- src/libstore/daemon.cc | 2 +- src/libstore/include/nix/store/worker-protocol.hh | 1 + src/libstore/remote-store.cc | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/libstore/daemon.cc b/src/libstore/daemon.cc index d6d2a5781..e6efd6c09 100644 --- a/src/libstore/daemon.cc +++ b/src/libstore/daemon.cc @@ -1031,7 +1031,7 @@ void processConnection(ref store, FdSource && from, FdSink && to, Trusted auto [protoVersion, features] = WorkerProto::BasicServerConnection::handshake(to, from, PROTOCOL_VERSION, WorkerProto::allFeatures); - if (protoVersion < 256 + 18) + if (protoVersion < MINIMUM_PROTOCOL_VERSION) throw Error("the Nix client version is too old"); WorkerProto::BasicServerConnection conn; diff --git a/src/libstore/include/nix/store/worker-protocol.hh b/src/libstore/include/nix/store/worker-protocol.hh index aec3820d2..6ae5fdcbc 100644 --- a/src/libstore/include/nix/store/worker-protocol.hh +++ b/src/libstore/include/nix/store/worker-protocol.hh @@ -13,6 +13,7 @@ namespace nix { /* Note: you generally shouldn't change the protocol version. Define a new `WorkerProto::Feature` instead. */ #define PROTOCOL_VERSION (1 << 8 | 38) +#define MINIMUM_PROTOCOL_VERSION (1 << 8 | 18) #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00) #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff) diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc index 0d83aed4c..949a51f18 100644 --- a/src/libstore/remote-store.cc +++ b/src/libstore/remote-store.cc @@ -73,7 +73,7 @@ void RemoteStore::initConnection(Connection & conn) try { auto [protoVersion, features] = WorkerProto::BasicClientConnection::handshake(conn.to, tee, PROTOCOL_VERSION, WorkerProto::allFeatures); - if (protoVersion < 256 + 18) + if (protoVersion < MINIMUM_PROTOCOL_VERSION) throw Error("the Nix daemon version is too old"); conn.protoVersion = protoVersion; conn.features = features; From f234633e2707db752a25f5319d5b63389dd162bd Mon Sep 17 00:00:00 2001 From: Marcel Date: Mon, 27 Oct 2025 00:55:40 +0100 Subject: [PATCH 059/213] refactor(libutil): remove `showBytes()` in favor of `renderSize()` The `showBytes()` function was redundant with `renderSize()` as the latter automatically selects the appropriate unit (KiB, MiB, GiB, etc.) based on the value, whereas `showBytes()` always formatted as MiB regardless of size. Co-authored-by: Bernardo Meurer Costa --- src/libmain/include/nix/main/shared.hh | 2 -- src/libmain/shared.cc | 2 +- src/libstore/optimise-store.cc | 2 +- src/libutil/include/nix/util/util.hh | 2 -- src/libutil/util.cc | 5 ----- 5 files changed, 2 insertions(+), 11 deletions(-) diff --git a/src/libmain/include/nix/main/shared.hh b/src/libmain/include/nix/main/shared.hh index 47d08a050..43069ba82 100644 --- a/src/libmain/include/nix/main/shared.hh +++ b/src/libmain/include/nix/main/shared.hh @@ -89,8 +89,6 @@ extern volatile ::sig_atomic_t blockInt; /* GC helpers. */ -std::string showBytes(uint64_t bytes); - struct GCResults; struct PrintFreed diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 4b36ec98e..3b88ea0c9 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -406,7 +406,7 @@ RunPager::~RunPager() PrintFreed::~PrintFreed() { if (show) - std::cout << fmt("%d store paths deleted, %s freed\n", results.paths.size(), showBytes(results.bytesFreed)); + std::cout << fmt("%d store paths deleted, %s freed\n", results.paths.size(), renderSize(results.bytesFreed)); } } // namespace nix diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc index 8f2878136..3e02fa812 100644 --- a/src/libstore/optimise-store.cc +++ b/src/libstore/optimise-store.cc @@ -312,7 +312,7 @@ void LocalStore::optimiseStore() optimiseStore(stats); - printInfo("%s freed by hard-linking %d files", showBytes(stats.bytesFreed), stats.filesLinked); + printInfo("%s freed by hard-linking %d files", renderSize(stats.bytesFreed), stats.filesLinked); } void LocalStore::optimisePath(const Path & path, RepairFlag repair) diff --git a/src/libutil/include/nix/util/util.hh b/src/libutil/include/nix/util/util.hh index cb1c9694d..1234937b4 100644 --- a/src/libutil/include/nix/util/util.hh +++ b/src/libutil/include/nix/util/util.hh @@ -333,8 +333,6 @@ struct overloaded : Ts... template overloaded(Ts...) -> overloaded; -std::string showBytes(uint64_t bytes); - /** * Provide an addition operator between strings and string_views * inexplicably omitted from the standard library. diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 69826070c..f14bc63ac 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -257,9 +257,4 @@ std::pair getLine(std::string_view s) } } -std::string showBytes(uint64_t bytes) -{ - return fmt("%.2f MiB", bytes / (1024.0 * 1024.0)); -} - } // namespace nix From 584a8e8a0085e3597f79d1ac9cf9970f575de32a Mon Sep 17 00:00:00 2001 From: Marcel Date: Mon, 27 Oct 2025 01:21:02 +0100 Subject: [PATCH 060/213] treewide: replace manual MiB calculations with renderSize --- src/libfetchers/git-utils.cc | 5 +++-- src/libmain/shared.cc | 14 ++++++++------ src/libstore/gc.cc | 5 ++--- 3 files changed, 13 insertions(+), 11 deletions(-) diff --git a/src/libfetchers/git-utils.cc b/src/libfetchers/git-utils.cc index 215418522..65587b43a 100644 --- a/src/libfetchers/git-utils.cc +++ b/src/libfetchers/git-utils.cc @@ -9,6 +9,7 @@ #include "nix/util/users.hh" #include "nix/util/fs-sink.hh" #include "nix/util/sync.hh" +#include "nix/util/util.hh" #include #include @@ -530,12 +531,12 @@ struct GitRepoImpl : GitRepo, std::enable_shared_from_this auto act = (Activity *) payload; act->result( resFetchStatus, - fmt("%d/%d objects received, %d/%d deltas indexed, %.1f MiB", + fmt("%d/%d objects received, %d/%d deltas indexed, %s", stats->received_objects, stats->total_objects, stats->indexed_deltas, stats->total_deltas, - stats->received_bytes / (1024.0 * 1024.0))); + renderSize(stats->received_bytes))); return getInterrupted() ? -1 : 0; } diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 3b88ea0c9..19733fb3e 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -6,6 +6,7 @@ #include "nix/main/loggers.hh" #include "nix/main/progress-bar.hh" #include "nix/util/signals.hh" +#include "nix/util/util.hh" #include #include @@ -64,18 +65,19 @@ void printMissing(ref store, const MissingPaths & missing, Verbosity lvl) } if (!missing.willSubstitute.empty()) { - const float downloadSizeMiB = missing.downloadSize / (1024.f * 1024.f); - const float narSizeMiB = missing.narSize / (1024.f * 1024.f); if (missing.willSubstitute.size() == 1) { printMsg( - lvl, "this path will be fetched (%.2f MiB download, %.2f MiB unpacked):", downloadSizeMiB, narSizeMiB); + lvl, + "this path will be fetched (%s download, %s unpacked):", + renderSize(missing.downloadSize), + renderSize(missing.narSize)); } else { printMsg( lvl, - "these %d paths will be fetched (%.2f MiB download, %.2f MiB unpacked):", + "these %d paths will be fetched (%s download, %s unpacked):", missing.willSubstitute.size(), - downloadSizeMiB, - narSizeMiB); + renderSize(missing.downloadSize), + renderSize(missing.narSize)); } std::vector willSubstituteSorted = {}; std::for_each(missing.willSubstitute.begin(), missing.willSubstitute.end(), [&](const StorePath & p) { diff --git a/src/libstore/gc.cc b/src/libstore/gc.cc index 47f40ab8e..193247aa2 100644 --- a/src/libstore/gc.cc +++ b/src/libstore/gc.cc @@ -5,6 +5,7 @@ #include "nix/util/finally.hh" #include "nix/util/unix-domain-socket.hh" #include "nix/util/signals.hh" +#include "nix/util/util.hh" #include "nix/store/posix-fs-canonicalise.hh" #include "store-config-private.hh" @@ -906,9 +907,7 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results) #endif ; - printInfo( - "note: currently hard linking saves %.2f MiB", - ((unsharedSize - actualSize - overhead) / (1024.0 * 1024.0))); + printInfo("note: currently hard linking saves %s", renderSize(unsharedSize - actualSize - overhead)); } /* While we're at it, vacuum the database. */ From 6b6ceddf72979d8cd29aa79863f212818ff46385 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 2 Jun 2025 11:41:02 +0200 Subject: [PATCH 061/213] nix store copy-sigs: Use http-connections setting to control parallelism Previously it used the `ThreadPool` default, i.e. `std::thread::hardware_concurrency()`. But copying signatures is not primarily CPU-bound so it makes more sense to use the `http-connections` setting (since we're typically copying from/to a binary cache). --- src/nix/sigs.cc | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/nix/sigs.cc b/src/nix/sigs.cc index 142421e9c..b5ad0c133 100644 --- a/src/nix/sigs.cc +++ b/src/nix/sigs.cc @@ -3,6 +3,7 @@ #include "nix/main/shared.hh" #include "nix/store/store-open.hh" #include "nix/util/thread-pool.hh" +#include "nix/store/filetransfer.hh" #include @@ -38,7 +39,7 @@ struct CmdCopySigs : StorePathsCommand for (auto & s : substituterUris) substituters.push_back(openStore(s)); - ThreadPool pool; + ThreadPool pool{fileTransferSettings.httpConnections}; std::atomic added{0}; From 5dcfa86910fa6777e33265647f51116e648f2f70 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Sat, 25 Oct 2025 01:00:10 +0000 Subject: [PATCH 062/213] refactor(libstore): use string_view in HttpBinaryCacheStore::makeRequest --- src/libstore/http-binary-cache-store.cc | 2 +- src/libstore/include/nix/store/http-binary-cache-store.hh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 945fe1834..738db132d 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -161,7 +161,7 @@ void HttpBinaryCacheStore::upsertFile( } } -FileTransferRequest HttpBinaryCacheStore::makeRequest(const std::string & path) +FileTransferRequest HttpBinaryCacheStore::makeRequest(std::string_view path) { /* Otherwise the last path fragment will get discarded. */ auto cacheUriWithTrailingSlash = config->cacheUri; diff --git a/src/libstore/include/nix/store/http-binary-cache-store.hh b/src/libstore/include/nix/store/http-binary-cache-store.hh index d8ba72390..ecad09975 100644 --- a/src/libstore/include/nix/store/http-binary-cache-store.hh +++ b/src/libstore/include/nix/store/http-binary-cache-store.hh @@ -86,7 +86,7 @@ protected: const std::string & mimeType, uint64_t sizeHint) override; - FileTransferRequest makeRequest(const std::string & path); + FileTransferRequest makeRequest(std::string_view path); void getFile(const std::string & path, Sink & sink) override; From ef8dd58d9bb41d5c4dba1765bb4342f4082fba5a Mon Sep 17 00:00:00 2001 From: Taeer Bar-Yam Date: Mon, 27 Oct 2025 18:31:55 +0100 Subject: [PATCH 063/213] parser.y: use std::move() to avoid unnecessary copies With #14314, in some places in the parser we started using C++ objects directly rather than pointers. In those places lines like `$$ = $1` now imply a copy when we don't need one. This commit changes those to `$$ = std::move($1)` to avoid those copies. --- src/libexpr/parser.y | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 93c944dcf..2257e1b45 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -345,8 +345,8 @@ string_parts string_parts_interpolated : string_parts_interpolated STR - { $$ = $1; $$.emplace_back(state->at(@2), new ExprString(state->alloc, $2)); } - | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = $1; $$.emplace_back(state->at(@2), $3); } + { $$ = std::move($1); $$.emplace_back(state->at(@2), new ExprString(state->alloc, $2)); } + | string_parts_interpolated DOLLAR_CURLY expr '}' { $$ = std::move($1); $$.emplace_back(state->at(@2), $3); } | DOLLAR_CURLY expr '}' { $$.emplace_back(state->at(@1), $2); } | STR DOLLAR_CURLY expr '}' { $$.emplace_back(state->at(@1), new ExprString(state->alloc, $1)); @@ -391,8 +391,8 @@ path_start ; ind_string_parts - : ind_string_parts IND_STR { $$ = $1; $$.emplace_back(state->at(@2), $2); } - | ind_string_parts DOLLAR_CURLY expr '}' { $$ = $1; $$.emplace_back(state->at(@2), $3); } + : ind_string_parts IND_STR { $$ = std::move($1); $$.emplace_back(state->at(@2), $2); } + | ind_string_parts DOLLAR_CURLY expr '}' { $$ = std::move($1); $$.emplace_back(state->at(@2), $3); } | { } ; @@ -440,9 +440,9 @@ binds1 ; attrs - : attrs attr { $$ = $1; $$.emplace_back(AttrName(state->symbols.create($2)), state->at(@2)); } + : attrs attr { $$ = std::move($1); $$.emplace_back(AttrName(state->symbols.create($2)), state->at(@2)); } | attrs string_attr - { $$ = $1; + { $$ = std::move($1); std::visit(overloaded { [&](std::string_view str) { $$.emplace_back(AttrName(state->symbols.create(str)), state->at(@2)); }, [&](Expr * expr) { @@ -457,20 +457,20 @@ attrs ; attrpath - : attrpath '.' attr { $$ = $1; $$.push_back(AttrName(state->symbols.create($3))); } + : attrpath '.' attr { $$ = std::move($1); $$.push_back(AttrName(state->symbols.create($3))); } | attrpath '.' string_attr - { $$ = $1; + { $$ = std::move($1); std::visit(overloaded { [&](std::string_view str) { $$.push_back(AttrName(state->symbols.create(str))); }, [&](Expr * expr) { $$.push_back(AttrName(expr)); } - }, $3); + }, std::move($3)); } | attr { $$.push_back(AttrName(state->symbols.create($1))); } | string_attr { std::visit(overloaded { [&](std::string_view str) { $$.push_back(AttrName(state->symbols.create(str))); }, [&](Expr * expr) { $$.push_back(AttrName(expr)); } - }, $1); + }, std::move($1)); } ; @@ -480,7 +480,7 @@ attr ; string_attr - : '"' string_parts '"' { $$ = $2; } + : '"' string_parts '"' { $$ = std::move($2); } | DOLLAR_CURLY expr '}' { $$ = $2; } ; From 50e8d17f3c08a3d929c2a663a9711dca7a656635 Mon Sep 17 00:00:00 2001 From: Taeer Bar-Yam Date: Mon, 27 Oct 2025 18:46:30 +0100 Subject: [PATCH 064/213] parser.y: use emplace_back() for vector --- src/libexpr/parser.y | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 2257e1b45..cf563e869 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -440,11 +440,11 @@ binds1 ; attrs - : attrs attr { $$ = std::move($1); $$.emplace_back(AttrName(state->symbols.create($2)), state->at(@2)); } + : attrs attr { $$ = std::move($1); $$.emplace_back(state->symbols.create($2), state->at(@2)); } | attrs string_attr { $$ = std::move($1); std::visit(overloaded { - [&](std::string_view str) { $$.emplace_back(AttrName(state->symbols.create(str)), state->at(@2)); }, + [&](std::string_view str) { $$.emplace_back(state->symbols.create(str), state->at(@2)); }, [&](Expr * expr) { throw ParseError({ .msg = HintFmt("dynamic attributes not allowed in inherit"), @@ -457,19 +457,19 @@ attrs ; attrpath - : attrpath '.' attr { $$ = std::move($1); $$.push_back(AttrName(state->symbols.create($3))); } + : attrpath '.' attr { $$ = std::move($1); $$.emplace_back(state->symbols.create($3)); } | attrpath '.' string_attr { $$ = std::move($1); std::visit(overloaded { - [&](std::string_view str) { $$.push_back(AttrName(state->symbols.create(str))); }, - [&](Expr * expr) { $$.push_back(AttrName(expr)); } + [&](std::string_view str) { $$.emplace_back(state->symbols.create(str)); }, + [&](Expr * expr) { $$.emplace_back(expr); } }, std::move($3)); } - | attr { $$.push_back(AttrName(state->symbols.create($1))); } + | attr { $$.emplace_back(state->symbols.create($1)); } | string_attr { std::visit(overloaded { - [&](std::string_view str) { $$.push_back(AttrName(state->symbols.create(str))); }, - [&](Expr * expr) { $$.push_back(AttrName(expr)); } + [&](std::string_view str) { $$.emplace_back(state->symbols.create(str)); }, + [&](Expr * expr) { $$.emplace_back(expr); } }, std::move($1)); } ; From 9e9dfe36df18ddcbae8172bbd31438d3758dece6 Mon Sep 17 00:00:00 2001 From: Taeer Bar-Yam Date: Mon, 27 Oct 2025 19:30:17 +0100 Subject: [PATCH 065/213] libexpr: store ExprList data in Exprs::alloc --- src/libexpr/include/nix/expr/nixexpr.hh | 10 ++++++++-- src/libexpr/parser.y | 10 +++++----- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/src/libexpr/include/nix/expr/nixexpr.hh b/src/libexpr/include/nix/expr/nixexpr.hh index 86ad01504..26d5addd5 100644 --- a/src/libexpr/include/nix/expr/nixexpr.hh +++ b/src/libexpr/include/nix/expr/nixexpr.hh @@ -442,8 +442,14 @@ struct ExprAttrs : Expr struct ExprList : Expr { - std::vector elems; - ExprList() {}; + std::span elems; + + ExprList(std::pmr::polymorphic_allocator & alloc, std::vector exprs) + : elems({alloc.allocate_object(exprs.size()), exprs.size()}) + { + std::ranges::copy(exprs, elems.begin()); + }; + COMMON_METHODS Value * maybeThunk(EvalState & state, Env & env) override; diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 93c944dcf..40d93a15b 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -129,7 +129,7 @@ static Expr * makeCall(PosIdx pos, Expr * fn, Expr * arg) { %type start expr expr_function expr_if expr_op %type expr_select expr_simple expr_app %type expr_pipe_from expr_pipe_into -%type expr_list +%type > list %type binds binds1 %type formals formal_set %type formal @@ -334,7 +334,7 @@ expr_simple { $2->pos = CUR_POS; $$ = $2; } | '{' '}' { $$ = new ExprAttrs(CUR_POS); } - | '[' expr_list ']' { $$ = $2; } + | '[' list ']' { $$ = new ExprList(state->alloc, std::move($2)); } ; string_parts @@ -484,9 +484,9 @@ string_attr | DOLLAR_CURLY expr '}' { $$ = $2; } ; -expr_list - : expr_list expr_select { $$ = $1; $1->elems.push_back($2); /* !!! dangerous */; $2->warnIfCursedOr(state->symbols, state->positions); } - | { $$ = new ExprList; } +list + : list expr_select { $$ = std::move($1); $$.push_back($2); /* !!! dangerous */; $2->warnIfCursedOr(state->symbols, state->positions); } + | { } ; formal_set From 91b69e9e70213d7bdb3bef314383a832b5c7aac8 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 13 Oct 2025 00:15:24 -0400 Subject: [PATCH 066/213] `nlohmann::json` instance and JSON Schema for `ContentAddress` Co-authored-by: Robert Hensing --- doc/manual/package.nix | 1 + doc/manual/source/SUMMARY.md.in | 1 + .../source/protocols/json/content-address.md | 21 ++++ .../json/fixup-json-schema-generated-doc.sed | 3 + doc/manual/source/protocols/json/meson.build | 1 + .../protocols/json/schema/content-address-v1 | 1 + .../json/schema/content-address-v1.yaml | 55 +++++++++++ .../protocols/json/schema/derivation-v3.yaml | 17 +--- .../json/schema/deriving-path-v1.yaml | 4 +- .../source/protocols/json/schema/hash-v1.yaml | 4 +- src/json-schema-checks/content-address | 1 + src/json-schema-checks/meson.build | 10 +- src/json-schema-checks/package.nix | 1 + src/libstore-tests/content-address.cc | 97 +++++++++++++++---- .../data/content-address/nar.json | 8 ++ .../data/content-address/text.json | 8 ++ src/libstore/content-address.cc | 34 +++++++ .../include/nix/store/content-address.hh | 12 +++ 18 files changed, 242 insertions(+), 37 deletions(-) create mode 100644 doc/manual/source/protocols/json/content-address.md create mode 120000 doc/manual/source/protocols/json/schema/content-address-v1 create mode 100644 doc/manual/source/protocols/json/schema/content-address-v1.yaml create mode 120000 src/json-schema-checks/content-address create mode 100644 src/libstore-tests/data/content-address/nar.json create mode 100644 src/libstore-tests/data/content-address/text.json diff --git a/doc/manual/package.nix b/doc/manual/package.nix index eb20f8714..140fa9849 100644 --- a/doc/manual/package.nix +++ b/doc/manual/package.nix @@ -35,6 +35,7 @@ mkMesonDerivation (finalAttrs: { ../../.version # For example JSON ../../src/libutil-tests/data/hash + ../../src/libstore-tests/data/content-address ../../src/libstore-tests/data/derived-path # Too many different types of files to filter for now ../../doc/manual diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index b4796f652..abd9422cd 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -118,6 +118,7 @@ - [Formats and Protocols](protocols/index.md) - [JSON Formats](protocols/json/index.md) - [Hash](protocols/json/hash.md) + - [Content Address](protocols/json/content-address.md) - [Store Object Info](protocols/json/store-object-info.md) - [Derivation](protocols/json/derivation.md) - [Deriving Path](protocols/json/deriving-path.md) diff --git a/doc/manual/source/protocols/json/content-address.md b/doc/manual/source/protocols/json/content-address.md new file mode 100644 index 000000000..2284e30aa --- /dev/null +++ b/doc/manual/source/protocols/json/content-address.md @@ -0,0 +1,21 @@ +{{#include content-address-v1-fixed.md}} + +## Examples + +### [Text](@docroot@/store/store-object/content-address.html#method-text) method + +```json +{{#include schema/content-address-v1/text.json}} +``` + +### [Nix Archive](@docroot@/store/store-object/content-address.html#method-nix-archive) method + +```json +{{#include schema/content-address-v1/nar.json}} +``` + + diff --git a/doc/manual/source/protocols/json/fixup-json-schema-generated-doc.sed b/doc/manual/source/protocols/json/fixup-json-schema-generated-doc.sed index 126e666e9..27895d42a 100644 --- a/doc/manual/source/protocols/json/fixup-json-schema-generated-doc.sed +++ b/doc/manual/source/protocols/json/fixup-json-schema-generated-doc.sed @@ -12,3 +12,6 @@ s/\\`/`/g # As we have more such relative links, more replacements of this nature # should appear below. s^\(./hash-v1.yaml\)\?#/$defs/algorithm^[JSON format for `Hash`](./hash.html#algorithm)^g +s^\(./hash-v1.yaml\)^[JSON format for `Hash`](./hash.html)^g +s^\(./content-address-v1.yaml\)\?#/$defs/method^[JSON format for `ContentAddress`](./content-address.html#method)^g +s^\(./content-address-v1.yaml\)^[JSON format for `ContentAddress`](./content-address.html)^g diff --git a/doc/manual/source/protocols/json/meson.build b/doc/manual/source/protocols/json/meson.build index 191ec6dbe..f79667961 100644 --- a/doc/manual/source/protocols/json/meson.build +++ b/doc/manual/source/protocols/json/meson.build @@ -10,6 +10,7 @@ json_schema_config = files('json-schema-for-humans-config.yaml') schemas = [ 'hash-v1', + 'content-address-v1', 'derivation-v3', 'deriving-path-v1', ] diff --git a/doc/manual/source/protocols/json/schema/content-address-v1 b/doc/manual/source/protocols/json/schema/content-address-v1 new file mode 120000 index 000000000..35a0dd865 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/content-address-v1 @@ -0,0 +1 @@ +../../../../../../src/libstore-tests/data/content-address \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/content-address-v1.yaml b/doc/manual/source/protocols/json/schema/content-address-v1.yaml new file mode 100644 index 000000000..d0f759201 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/content-address-v1.yaml @@ -0,0 +1,55 @@ +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/content-address-v1.json" +title: Content Address +description: | + This schema describes the JSON representation of Nix's `ContentAddress` type, which conveys information about [content-addressing store objects](@docroot@/store/store-object/content-address.md). + + > **Note** + > + > For current methods of content addressing, this data type is a bit suspicious, because it is neither simply a content address of a file system object (the `method` is richer), nor simply a content address of a store object (the `hash` doesn't account for the references). + > It should thus only be used in contexts where the references are also known / otherwise made tamper-resistant. + + + +type: object +properties: + method: + "$ref": "#/$defs/method" + hash: + title: Content Address + description: | + This would be the content-address itself. + + For all current methods, this is just a content address of the file system object of the store object, [as described in the store chapter](@docroot@/store/file-system-object/content-address.md), and not of the store object as a whole. + In particular, the references of the store object are *not* taken into account with this hash (and currently-supported methods). + "$ref": "./hash-v1.yaml" +required: +- method +- hash +additionalProperties: false +"$defs": + method: + type: string + enum: [flat, nar, text, git] + title: Content-Addressing Method + description: | + A string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen. + + Valid method strings are: + + - [`flat`](@docroot@/store/store-object/content-address.md#method-flat) (provided the contents are a single file) + - [`nar`](@docroot@/store/store-object/content-address.md#method-nix-archive) + - [`text`](@docroot@/store/store-object/content-address.md#method-text) + - [`git`](@docroot@/store/store-object/content-address.md#method-git) diff --git a/doc/manual/source/protocols/json/schema/derivation-v3.yaml b/doc/manual/source/protocols/json/schema/derivation-v3.yaml index 7c92d475d..c950b839f 100644 --- a/doc/manual/source/protocols/json/schema/derivation-v3.yaml +++ b/doc/manual/source/protocols/json/schema/derivation-v3.yaml @@ -1,5 +1,5 @@ -"$schema": http://json-schema.org/draft-04/schema# -"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/derivation-v3.json +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/derivation-v3.json" title: Derivation description: | Experimental JSON representation of a Nix derivation (version 3). @@ -154,19 +154,10 @@ properties: The output path, if known in advance. method: - type: string - title: Content addressing method - enum: [flat, nar, text, git] + "$ref": "./content-address-v1.yaml#/$defs/method" description: | For an output which will be [content addressed](@docroot@/store/derivation/outputs/content-address.md), a string representing the [method](@docroot@/store/store-object/content-address.md) of content addressing that is chosen. - - Valid method strings are: - - - [`flat`](@docroot@/store/store-object/content-address.md#method-flat) - - [`nar`](@docroot@/store/store-object/content-address.md#method-nix-archive) - - [`text`](@docroot@/store/store-object/content-address.md#method-text) - - [`git`](@docroot@/store/store-object/content-address.md#method-git) - + See the linked original definition for further details. hashAlgo: title: Hash algorithm "$ref": "./hash-v1.yaml#/$defs/algorithm" diff --git a/doc/manual/source/protocols/json/schema/deriving-path-v1.yaml b/doc/manual/source/protocols/json/schema/deriving-path-v1.yaml index 9c0350d3d..7fd74941e 100644 --- a/doc/manual/source/protocols/json/schema/deriving-path-v1.yaml +++ b/doc/manual/source/protocols/json/schema/deriving-path-v1.yaml @@ -1,5 +1,5 @@ -"$schema": http://json-schema.org/draft-04/schema# -"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/deriving-path-v1.json +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/deriving-path-v1.json" title: Deriving Path description: | This schema describes the JSON representation of Nix's [Deriving Path](@docroot@/store/derivation/index.md#deriving-path). diff --git a/doc/manual/source/protocols/json/schema/hash-v1.yaml b/doc/manual/source/protocols/json/schema/hash-v1.yaml index 844959bcd..316fb6d73 100644 --- a/doc/manual/source/protocols/json/schema/hash-v1.yaml +++ b/doc/manual/source/protocols/json/schema/hash-v1.yaml @@ -1,5 +1,5 @@ -"$schema": http://json-schema.org/draft-04/schema# -"$id": https://nix.dev/manual/nix/latest/protocols/json/schema/hash-v1.json +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/hash-v1.json" title: Hash description: | A cryptographic hash value used throughout Nix for content addressing and integrity verification. diff --git a/src/json-schema-checks/content-address b/src/json-schema-checks/content-address new file mode 120000 index 000000000..194a265a1 --- /dev/null +++ b/src/json-schema-checks/content-address @@ -0,0 +1 @@ +../../src/libstore-tests/data/content-address \ No newline at end of file diff --git a/src/json-schema-checks/meson.build b/src/json-schema-checks/meson.build index 09da8770b..745fb5ffa 100644 --- a/src/json-schema-checks/meson.build +++ b/src/json-schema-checks/meson.build @@ -30,6 +30,14 @@ schemas = [ 'blake3-base64.json', ], }, + { + 'stem' : 'content-address', + 'schema' : schema_dir / 'content-address-v1.yaml', + 'files' : [ + 'text.json', + 'nar.json', + ], + }, { 'stem' : 'derivation', 'schema' : schema_dir / 'derivation-v3.yaml', @@ -73,8 +81,6 @@ foreach schema : schemas stem + '-schema-valid', jv, args : [ - '--map', - './hash-v1.yaml=' + schema_dir / 'hash-v1.yaml', 'http://json-schema.org/draft-04/schema', schema_file, ], diff --git a/src/json-schema-checks/package.nix b/src/json-schema-checks/package.nix index cf4e4cb19..6a76c8b28 100644 --- a/src/json-schema-checks/package.nix +++ b/src/json-schema-checks/package.nix @@ -21,6 +21,7 @@ mkMesonDerivation (finalAttrs: { ../../.version ../../doc/manual/source/protocols/json/schema ../../src/libutil-tests/data/hash + ../../src/libstore-tests/data/content-address ../../src/libstore-tests/data/derivation ../../src/libstore-tests/data/derived-path ./. diff --git a/src/libstore-tests/content-address.cc b/src/libstore-tests/content-address.cc index 51d591c38..0474fb2e0 100644 --- a/src/libstore-tests/content-address.cc +++ b/src/libstore-tests/content-address.cc @@ -1,6 +1,7 @@ #include #include "nix/store/content-address.hh" +#include "nix/util/tests/json-characterization.hh" namespace nix { @@ -8,33 +9,93 @@ namespace nix { * ContentAddressMethod::parse, ContentAddressMethod::render * --------------------------------------------------------------------------*/ -TEST(ContentAddressMethod, testRoundTripPrintParse_1) +static auto methods = ::testing::Values( + std::pair{ContentAddressMethod::Raw::Text, "text"}, + std::pair{ContentAddressMethod::Raw::Flat, "flat"}, + std::pair{ContentAddressMethod::Raw::NixArchive, "nar"}, + std::pair{ContentAddressMethod::Raw::Git, "git"}); + +struct ContentAddressMethodTest : ::testing::Test, + ::testing::WithParamInterface> +{}; + +TEST_P(ContentAddressMethodTest, testRoundTripPrintParse_1) { - for (ContentAddressMethod cam : { - ContentAddressMethod::Raw::Text, - ContentAddressMethod::Raw::Flat, - ContentAddressMethod::Raw::NixArchive, - ContentAddressMethod::Raw::Git, - }) { - EXPECT_EQ(ContentAddressMethod::parse(cam.render()), cam); - } + auto & [cam, _] = GetParam(); + EXPECT_EQ(ContentAddressMethod::parse(cam.render()), cam); } -TEST(ContentAddressMethod, testRoundTripPrintParse_2) +TEST_P(ContentAddressMethodTest, testRoundTripPrintParse_2) { - for (const std::string_view camS : { - "text", - "flat", - "nar", - "git", - }) { - EXPECT_EQ(ContentAddressMethod::parse(camS).render(), camS); - } + auto & [cam, camS] = GetParam(); + EXPECT_EQ(ContentAddressMethod::parse(camS).render(), camS); } +INSTANTIATE_TEST_SUITE_P(ContentAddressMethod, ContentAddressMethodTest, methods); + TEST(ContentAddressMethod, testParseContentAddressMethodOptException) { EXPECT_THROW(ContentAddressMethod::parse("narwhal"), UsageError); } +/* ---------------------------------------------------------------------------- + * JSON + * --------------------------------------------------------------------------*/ + +class ContentAddressTest : public virtual CharacterizationTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "content-address"; + +public: + + /** + * We set these in tests rather than the regular globals so we don't have + * to worry about race conditions if the tests run concurrently. + */ + ExperimentalFeatureSettings mockXpSettings; + + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } +}; + +using nlohmann::json; + +struct ContentAddressJsonTest : ContentAddressTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; + +TEST_P(ContentAddressJsonTest, from_json) +{ + auto & [name, expected] = GetParam(); + readJsonTest(name, expected); +} + +TEST_P(ContentAddressJsonTest, to_json) +{ + auto & [name, value] = GetParam(); + writeJsonTest(name, value); +} + +INSTANTIATE_TEST_SUITE_P( + ContentAddressJSON, + ContentAddressJsonTest, + ::testing::Values( + std::pair{ + "text", + ContentAddress{ + .method = ContentAddressMethod::Raw::Text, + .hash = hashString(HashAlgorithm::SHA256, "asdf"), + }, + }, + std::pair{ + "nar", + ContentAddress{ + .method = ContentAddressMethod::Raw::NixArchive, + .hash = hashString(HashAlgorithm::SHA256, "qwer"), + }, + })); + } // namespace nix diff --git a/src/libstore-tests/data/content-address/nar.json b/src/libstore-tests/data/content-address/nar.json new file mode 100644 index 000000000..21e065cd3 --- /dev/null +++ b/src/libstore-tests/data/content-address/nar.json @@ -0,0 +1,8 @@ +{ + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "9vLqj0XYoFfJVmoz+ZR02i5camYE1zYSFlDicwxvsKM=" + }, + "method": "nar" +} diff --git a/src/libstore-tests/data/content-address/text.json b/src/libstore-tests/data/content-address/text.json new file mode 100644 index 000000000..04bc8ac20 --- /dev/null +++ b/src/libstore-tests/data/content-address/text.json @@ -0,0 +1,8 @@ +{ + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "8OTC92xYkW7CWPJGhRvqCR0U1CR6L8PhhpRGGxgW4Ts=" + }, + "method": "text" +} diff --git a/src/libstore/content-address.cc b/src/libstore/content-address.cc index 9a57e3aa6..497c2c5b4 100644 --- a/src/libstore/content-address.cc +++ b/src/libstore/content-address.cc @@ -1,6 +1,7 @@ #include "nix/util/args.hh" #include "nix/store/content-address.hh" #include "nix/util/split.hh" +#include "nix/util/json-utils.hh" namespace nix { @@ -300,3 +301,36 @@ Hash ContentAddressWithReferences::getHash() const } } // namespace nix + +namespace nlohmann { + +using namespace nix; + +ContentAddressMethod adl_serializer::from_json(const json & json) +{ + return ContentAddressMethod::parse(getString(json)); +} + +void adl_serializer::to_json(json & json, const ContentAddressMethod & m) +{ + json = m.render(); +} + +ContentAddress adl_serializer::from_json(const json & json) +{ + auto obj = getObject(json); + return { + .method = adl_serializer::from_json(valueAt(obj, "method")), + .hash = valueAt(obj, "hash"), + }; +} + +void adl_serializer::to_json(json & json, const ContentAddress & ca) +{ + json = { + {"method", ca.method}, + {"hash", ca.hash}, + }; +} + +} // namespace nlohmann diff --git a/src/libstore/include/nix/store/content-address.hh b/src/libstore/include/nix/store/content-address.hh index 0a3dc79bd..41ccc69ae 100644 --- a/src/libstore/include/nix/store/content-address.hh +++ b/src/libstore/include/nix/store/content-address.hh @@ -6,6 +6,7 @@ #include "nix/store/path.hh" #include "nix/util/file-content-address.hh" #include "nix/util/variant-wrapper.hh" +#include "nix/util/json-impls.hh" namespace nix { @@ -308,4 +309,15 @@ struct ContentAddressWithReferences Hash getHash() const; }; +template<> +struct json_avoids_null : std::true_type +{}; + +template<> +struct json_avoids_null : std::true_type +{}; + } // namespace nix + +JSON_IMPL(nix::ContentAddressMethod) +JSON_IMPL(nix::ContentAddress) From 3915b3a111ffe42d1ac9c8162b5506fa7678464f Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Wed, 22 Oct 2025 08:10:20 +0000 Subject: [PATCH 067/213] feat(libstore/s3-binary-cache-store): implement `abortMultipartUpload()` Implement `abortMultipartUpload()` for cleaning up incomplete multipart uploads on error: - Constructs URL with `?uploadId=ID` query parameter - Issues `DELETE` request to abort the multipart upload --- src/libstore/s3-binary-cache-store.cc | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 5d97fb0fd..98f742c70 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -26,6 +26,14 @@ public: private: ref s3Config; + + /** + * Abort a multipart upload + * + * @see + * https://docs.aws.amazon.com/AmazonS3/latest/API/API_AbortMultipartUpload.html#API_AbortMultipartUpload_RequestSyntax + */ + void abortMultipartUpload(std::string_view key, std::string_view uploadId); }; void S3BinaryCacheStore::upsertFile( @@ -37,6 +45,19 @@ void S3BinaryCacheStore::upsertFile( HttpBinaryCacheStore::upsertFile(path, istream, mimeType, sizeHint); } +void S3BinaryCacheStore::abortMultipartUpload(std::string_view key, std::string_view uploadId) +{ + auto req = makeRequest(key); + req.setupForS3(); + + auto url = req.uri.parsed(); + url.query["uploadId"] = uploadId; + req.uri = VerbatimURL(url); + req.method = HttpMethod::DELETE; + + getFileTransfer()->enqueueFileTransfer(req).get(); +} + StringSet S3BinaryCacheStoreConfig::uriSchemes() { return {"s3"}; From 5e220271e2dbafb5205684354057aeaa4a58a5c6 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Sat, 25 Oct 2025 22:38:43 +0000 Subject: [PATCH 068/213] feat(libstore): add scanForReferencesDeep for per-file reference tracking Introduces `scanForReferencesDeep` to provide per-file granularity when scanning for store path references, enabling better diagnostics for cycle detection and `nix why-depends --precise`. --- src/libstore-tests/references.cc | 143 ++++++++++++++++++ .../include/nix/store/path-references.hh | 57 +++++++ src/libstore/path-references.cc | 90 +++++++++++ 3 files changed, 290 insertions(+) diff --git a/src/libstore-tests/references.cc b/src/libstore-tests/references.cc index 27ecad08f..9cecd573e 100644 --- a/src/libstore-tests/references.cc +++ b/src/libstore-tests/references.cc @@ -1,4 +1,6 @@ #include "nix/store/references.hh" +#include "nix/store/path-references.hh" +#include "nix/util/memory-source-accessor.hh" #include @@ -79,4 +81,145 @@ TEST(references, scan) } } +TEST(references, scanForReferencesDeep) +{ + using File = MemorySourceAccessor::File; + + // Create store paths to search for + StorePath path1{"dc04vv14dak1c1r48qa0m23vr9jy8sm0-foo"}; + StorePath path2{"zc842j0rz61mjsp3h3wp5ly71ak6qgdn-bar"}; + StorePath path3{"a5cn2i4b83gnsm60d38l3kgb8qfplm11-baz"}; + + StorePathSet refs{path1, path2, path3}; + + std::string_view hash1 = path1.hashPart(); + std::string_view hash2 = path2.hashPart(); + std::string_view hash3 = path3.hashPart(); + + // Create an in-memory file system with various reference patterns + auto accessor = make_ref(); + accessor->root = File::Directory{ + .contents{ + { + // file1.txt: contains hash1 + "file1.txt", + File::Regular{ + .contents = "This file references " + hash1 + " in its content", + }, + }, + { + // file2.txt: contains hash2 and hash3 + "file2.txt", + File::Regular{ + .contents = "Multiple refs: " + hash2 + " and also " + hash3, + }, + }, + { + // file3.txt: contains no references + "file3.txt", + File::Regular{ + .contents = "This file has no store path references at all", + }, + }, + { + // subdir: a subdirectory + "subdir", + File::Directory{ + .contents{ + { + // subdir/file4.txt: contains hash1 again + "file4.txt", + File::Regular{ + .contents = "Subdirectory file with " + hash1, + }, + }, + }, + }, + }, + { + // link1: a symlink that contains a reference in its target + "link1", + File::Symlink{ + .target = hash2 + "-target", + }, + }, + }, + }; + + // Test the callback-based API + { + std::map foundRefs; + + scanForReferencesDeep(*accessor, CanonPath::root, refs, [&](FileRefScanResult result) { + foundRefs[std::move(result.filePath)] = std::move(result.foundRefs); + }); + + // Verify we found the expected references + EXPECT_EQ(foundRefs.size(), 4); // file1, file2, file4, link1 + + // Check file1.txt found path1 + { + CanonPath f1Path("/file1.txt"); + auto it = foundRefs.find(f1Path); + ASSERT_TRUE(it != foundRefs.end()); + EXPECT_EQ(it->second.size(), 1); + EXPECT_TRUE(it->second.count(path1)); + } + + // Check file2.txt found path2 and path3 + { + CanonPath f2Path("/file2.txt"); + auto it = foundRefs.find(f2Path); + ASSERT_TRUE(it != foundRefs.end()); + EXPECT_EQ(it->second.size(), 2); + EXPECT_TRUE(it->second.count(path2)); + EXPECT_TRUE(it->second.count(path3)); + } + + // Check file3.txt is not in results (no refs) + { + CanonPath f3Path("/file3.txt"); + EXPECT_FALSE(foundRefs.count(f3Path)); + } + + // Check subdir/file4.txt found path1 + { + CanonPath f4Path("/subdir/file4.txt"); + auto it = foundRefs.find(f4Path); + ASSERT_TRUE(it != foundRefs.end()); + EXPECT_EQ(it->second.size(), 1); + EXPECT_TRUE(it->second.count(path1)); + } + + // Check symlink found path2 + { + CanonPath linkPath("/link1"); + auto it = foundRefs.find(linkPath); + ASSERT_TRUE(it != foundRefs.end()); + EXPECT_EQ(it->second.size(), 1); + EXPECT_TRUE(it->second.count(path2)); + } + } + + // Test the map-based convenience API + { + auto results = scanForReferencesDeep(*accessor, CanonPath::root, refs); + + EXPECT_EQ(results.size(), 4); // file1, file2, file4, link1 + + // Verify all expected files are in the results + EXPECT_TRUE(results.count(CanonPath("/file1.txt"))); + EXPECT_TRUE(results.count(CanonPath("/file2.txt"))); + EXPECT_TRUE(results.count(CanonPath("/subdir/file4.txt"))); + EXPECT_TRUE(results.count(CanonPath("/link1"))); + EXPECT_FALSE(results.count(CanonPath("/file3.txt"))); + + // Verify the references found in each file are correct + EXPECT_EQ(results.at(CanonPath("/file1.txt")), StorePathSet{path1}); + EXPECT_EQ(results.at(CanonPath("/file2.txt")), StorePathSet({path2, path3})); + EXPECT_EQ(results.at(CanonPath("/subdir/file4.txt")), StorePathSet{path1}); + EXPECT_EQ(results.at(CanonPath("/link1")), StorePathSet{path2}); + } +} + } // namespace nix diff --git a/src/libstore/include/nix/store/path-references.hh b/src/libstore/include/nix/store/path-references.hh index 66d0da268..6aa506da4 100644 --- a/src/libstore/include/nix/store/path-references.hh +++ b/src/libstore/include/nix/store/path-references.hh @@ -3,6 +3,10 @@ #include "nix/store/references.hh" #include "nix/store/path.hh" +#include "nix/util/source-accessor.hh" + +#include +#include namespace nix { @@ -21,4 +25,57 @@ public: StorePathSet getResultPaths(); }; +/** + * Result of scanning a single file for references. + */ +struct FileRefScanResult +{ + CanonPath filePath; ///< The file that was scanned + StorePathSet foundRefs; ///< Which store paths were found in this file +}; + +/** + * Scan a store path tree and report which references appear in which files. + * + * This is like scanForReferences() but provides per-file granularity. + * Useful for cycle detection and detailed dependency analysis like `nix why-depends --precise`. + * + * The function walks the tree using the provided accessor and streams each file's + * contents through a RefScanSink to detect hash references. For each file that + * contains at least one reference, a callback is invoked with the file path and + * the set of references found. + * + * Note: This function only searches for the hash part of store paths (e.g., + * "dc04vv14dak1c1r48qa0m23vr9jy8sm0"), not the name part. A store path like + * "/nix/store/dc04vv14dak1c1r48qa0m23vr9jy8sm0-foo" will be detected if the + * hash appears anywhere in the scanned content, regardless of the "-foo" suffix. + * + * @param accessor Source accessor to read the tree + * @param rootPath Root path to scan + * @param refs Set of store paths to search for + * @param callback Called for each file that contains at least one reference + */ +void scanForReferencesDeep( + SourceAccessor & accessor, + const CanonPath & rootPath, + const StorePathSet & refs, + std::function callback); + +/** + * Scan a store path tree and return which references appear in which files. + * + * This is a convenience wrapper around the callback-based scanForReferencesDeep() + * that collects all results into a map for efficient lookups. + * + * Note: This function only searches for the hash part of store paths, not the name part. + * See the callback-based overload for details. + * + * @param accessor Source accessor to read the tree + * @param rootPath Root path to scan + * @param refs Set of store paths to search for + * @return Map from file paths to the set of references found in each file + */ +std::map +scanForReferencesDeep(SourceAccessor & accessor, const CanonPath & rootPath, const StorePathSet & refs); + } // namespace nix diff --git a/src/libstore/path-references.cc b/src/libstore/path-references.cc index 8b167e902..3d783bbe4 100644 --- a/src/libstore/path-references.cc +++ b/src/libstore/path-references.cc @@ -1,11 +1,15 @@ #include "nix/store/path-references.hh" #include "nix/util/hash.hh" #include "nix/util/archive.hh" +#include "nix/util/source-accessor.hh" +#include "nix/util/canon-path.hh" +#include "nix/util/logging.hh" #include #include #include #include +#include namespace nix { @@ -54,4 +58,90 @@ StorePathSet scanForReferences(Sink & toTee, const Path & path, const StorePathS return refsSink.getResultPaths(); } +void scanForReferencesDeep( + SourceAccessor & accessor, + const CanonPath & rootPath, + const StorePathSet & refs, + std::function callback) +{ + // Recursive tree walker + auto walk = [&](this auto & self, const CanonPath & path) -> void { + auto stat = accessor.lstat(path); + + switch (stat.type) { + case SourceAccessor::tRegular: { + // Create a fresh sink for each file to independently detect references. + // RefScanSink accumulates found hashes globally - once a hash is found, + // it remains in the result set. If we reused the same sink across files, + // we couldn't distinguish which files contain which references, as a hash + // found in an earlier file wouldn't be reported when found in later files. + PathRefScanSink sink = PathRefScanSink::fromPaths(refs); + + // Scan this file by streaming its contents through the sink + accessor.readFile(path, sink); + + // Get the references found in this file + auto foundRefs = sink.getResultPaths(); + + // Report if we found anything in this file + if (!foundRefs.empty()) { + debug("scanForReferencesDeep: found %d references in %s", foundRefs.size(), path.abs()); + callback(FileRefScanResult{.filePath = path, .foundRefs = std::move(foundRefs)}); + } + break; + } + + case SourceAccessor::tDirectory: { + // Recursively scan directory contents + auto entries = accessor.readDirectory(path); + for (const auto & [name, entryType] : entries) { + self(path / name); + } + break; + } + + case SourceAccessor::tSymlink: { + // Create a fresh sink for the symlink target (same reason as regular files) + PathRefScanSink sink = PathRefScanSink::fromPaths(refs); + + // Scan symlink target for references + auto target = accessor.readLink(path); + sink(std::string_view(target)); + + // Get the references found in this symlink target + auto foundRefs = sink.getResultPaths(); + + if (!foundRefs.empty()) { + debug("scanForReferencesDeep: found %d references in symlink %s", foundRefs.size(), path.abs()); + callback(FileRefScanResult{.filePath = path, .foundRefs = std::move(foundRefs)}); + } + break; + } + + case SourceAccessor::tChar: + case SourceAccessor::tBlock: + case SourceAccessor::tSocket: + case SourceAccessor::tFifo: + case SourceAccessor::tUnknown: + default: + throw Error("file '%s' has an unsupported type", path.abs()); + } + }; + + // Start the recursive walk from the root + walk(rootPath); +} + +std::map +scanForReferencesDeep(SourceAccessor & accessor, const CanonPath & rootPath, const StorePathSet & refs) +{ + std::map results; + + scanForReferencesDeep(accessor, rootPath, refs, [&](FileRefScanResult result) { + results[std::move(result.filePath)] = std::move(result.foundRefs); + }); + + return results; +} + } // namespace nix From 6129aee988132742837d36fd4cf995bfe85b3198 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Sat, 25 Oct 2025 22:55:14 +0000 Subject: [PATCH 069/213] refactor(nix/why-depends): use scanForReferencesDeep for --precise mode Replaces manual tree-walking and reference scanning with the new scanForReferencesDeep function. --- src/nix/why-depends.cc | 79 +++++++++++++++++++----------------------- 1 file changed, 36 insertions(+), 43 deletions(-) diff --git a/src/nix/why-depends.cc b/src/nix/why-depends.cc index dc30fabd7..29da9e953 100644 --- a/src/nix/why-depends.cc +++ b/src/nix/why-depends.cc @@ -1,5 +1,6 @@ #include "nix/cmd/command.hh" #include "nix/store/store-api.hh" +#include "nix/store/path-references.hh" #include "nix/util/source-accessor.hh" #include "nix/main/shared.hh" @@ -191,7 +192,7 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions /* Sort the references by distance to `dependency` to ensure that the shortest path is printed first. */ std::multimap refs; - StringSet hashes; + StorePathSet refPaths; for (auto & ref : node.refs) { if (ref == node.path && packagePath != dependencyPath) @@ -200,7 +201,7 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions if (node2.dist == inf) continue; refs.emplace(node2.dist, &node2); - hashes.insert(std::string(node2.path.hashPart())); + refPaths.insert(node2.path); } /* For each reference, find the files and symlinks that @@ -209,58 +210,50 @@ struct CmdWhyDepends : SourceExprCommand, MixOperateOnOptions auto accessor = store->requireStoreObjectAccessor(node.path); - auto visitPath = [&](this auto && recur, const CanonPath & p) -> void { - auto st = accessor->maybeLstat(p); - assert(st); + auto getColour = [&](const std::string & hash) { + return hash == dependencyPathHash ? ANSI_GREEN : ANSI_BLUE; + }; - auto p2 = p.isRoot() ? p.abs() : p.rel(); + if (precise) { + // Use scanForReferencesDeep to find files containing references + scanForReferencesDeep(*accessor, CanonPath::root, refPaths, [&](FileRefScanResult result) { + auto p2 = result.filePath.isRoot() ? result.filePath.abs() : result.filePath.rel(); + auto st = accessor->lstat(result.filePath); - auto getColour = [&](const std::string & hash) { - return hash == dependencyPathHash ? ANSI_GREEN : ANSI_BLUE; - }; + if (st.type == SourceAccessor::Type::tRegular) { + auto contents = accessor->readFile(result.filePath); - if (st->type == SourceAccessor::Type::tDirectory) { - auto names = accessor->readDirectory(p); - for (auto & [name, type] : names) - recur(p / name); - } - - else if (st->type == SourceAccessor::Type::tRegular) { - auto contents = accessor->readFile(p); - - for (auto & hash : hashes) { - auto pos = contents.find(hash); - if (pos != std::string::npos) { - size_t margin = 32; - auto pos2 = pos >= margin ? pos - margin : 0; - hits[hash].emplace_back( - fmt("%s: …%s…", + // For each reference found in this file, extract context + for (auto & foundRef : result.foundRefs) { + std::string hash(foundRef.hashPart()); + auto pos = contents.find(hash); + if (pos != std::string::npos) { + size_t margin = 32; + auto pos2 = pos >= margin ? pos - margin : 0; + hits[hash].emplace_back(fmt( + "%s: …%s…", p2, hilite( filterPrintable(std::string(contents, pos2, pos - pos2 + hash.size() + margin)), pos - pos2, StorePath::HashLen, getColour(hash)))); + } + } + } else if (st.type == SourceAccessor::Type::tSymlink) { + auto target = accessor->readLink(result.filePath); + + // For each reference found in this symlink, show it + for (auto & foundRef : result.foundRefs) { + std::string hash(foundRef.hashPart()); + auto pos = target.find(hash); + if (pos != std::string::npos) + hits[hash].emplace_back( + fmt("%s -> %s", p2, hilite(target, pos, StorePath::HashLen, getColour(hash)))); } } - } - - else if (st->type == SourceAccessor::Type::tSymlink) { - auto target = accessor->readLink(p); - - for (auto & hash : hashes) { - auto pos = target.find(hash); - if (pos != std::string::npos) - hits[hash].emplace_back( - fmt("%s -> %s", p2, hilite(target, pos, StorePath::HashLen, getColour(hash)))); - } - } - }; - - // FIXME: should use scanForReferences(). - - if (precise) - visitPath(CanonPath::root); + }); + } for (auto & ref : refs) { std::string hash(ref.second->path.hashPart()); From dd716dc9be9d54df959b951d97c51c9eafa37d4d Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 27 Oct 2025 15:48:07 -0400 Subject: [PATCH 070/213] Create default `Store::narFromPath` implementation in terms of `getFSAccessor` This is a good default (the methods that allow for an arbitrary choice of source accessor are generally preferable both to implement and to use). And it also pays its way by allowing us to delete *both* the `DummyStore` and `LocalStore` implementations. --- src/libstore/dummy-store.cc | 12 ------------ src/libstore/include/nix/store/local-fs-store.hh | 1 - src/libstore/include/nix/store/store-api.hh | 2 +- src/libstore/include/nix/store/uds-remote-store.hh | 2 +- src/libstore/local-fs-store.cc | 7 ------- src/libstore/restricted-store.cc | 2 +- src/libstore/ssh-store.cc | 2 +- src/libstore/store-api.cc | 7 +++++++ 8 files changed, 11 insertions(+), 24 deletions(-) diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc index 6c8cb3480..1333e0aed 100644 --- a/src/libstore/dummy-store.cc +++ b/src/libstore/dummy-store.cc @@ -258,18 +258,6 @@ struct DummyStoreImpl : DummyStore }); } - void narFromPath(const StorePath & path, Sink & sink) override - { - bool visited = contents.cvisit(path, [&](const auto & kv) { - const auto & [info, accessor] = kv.second; - SourcePath sourcePath(accessor); - dumpPath(sourcePath, sink, FileSerialisationMethod::NixArchive); - }); - - if (!visited) - throw Error("path '%s' is not valid", printStorePath(path)); - } - void queryRealisationUncached( const DrvOutput & drvOutput, Callback> callback) noexcept override { diff --git a/src/libstore/include/nix/store/local-fs-store.hh b/src/libstore/include/nix/store/local-fs-store.hh index 08f8e1656..100a4110d 100644 --- a/src/libstore/include/nix/store/local-fs-store.hh +++ b/src/libstore/include/nix/store/local-fs-store.hh @@ -78,7 +78,6 @@ struct LocalFSStore : virtual Store, virtual GcStore, virtual LogStore LocalFSStore(const Config & params); - void narFromPath(const StorePath & path, Sink & sink) override; ref getFSAccessor(bool requireValidPath = true) override; std::shared_ptr getFSAccessor(const StorePath & path, bool requireValidPath = true) override; diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index d03e8e010..8fa13de34 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -609,7 +609,7 @@ public: /** * Write a NAR dump of a store path. */ - virtual void narFromPath(const StorePath & path, Sink & sink) = 0; + virtual void narFromPath(const StorePath & path, Sink & sink); /** * For each path, if it's a derivation, build it. Building a diff --git a/src/libstore/include/nix/store/uds-remote-store.hh b/src/libstore/include/nix/store/uds-remote-store.hh index fe6e486f4..764e8768a 100644 --- a/src/libstore/include/nix/store/uds-remote-store.hh +++ b/src/libstore/include/nix/store/uds-remote-store.hh @@ -68,7 +68,7 @@ struct UDSRemoteStore : virtual IndirectRootStore, virtual RemoteStore void narFromPath(const StorePath & path, Sink & sink) override { - LocalFSStore::narFromPath(path, sink); + Store::narFromPath(path, sink); } /** diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc index 28069dcaf..1a38cac3b 100644 --- a/src/libstore/local-fs-store.cc +++ b/src/libstore/local-fs-store.cc @@ -112,13 +112,6 @@ std::shared_ptr LocalFSStore::getFSAccessor(const StorePath & pa return std::make_shared(std::move(absPath)); } -void LocalFSStore::narFromPath(const StorePath & path, Sink & sink) -{ - if (!isValidPath(path)) - throw Error("path '%s' is not valid", printStorePath(path)); - dumpPath(getRealStoreDir() + std::string(printStorePath(path), storeDir.size()), sink); -} - const std::string LocalFSStore::drvsLogDir = "drvs"; std::optional LocalFSStore::getBuildLogExact(const StorePath & path) diff --git a/src/libstore/restricted-store.cc b/src/libstore/restricted-store.cc index 5270f7d10..ef8aaa380 100644 --- a/src/libstore/restricted-store.cc +++ b/src/libstore/restricted-store.cc @@ -226,7 +226,7 @@ void RestrictedStore::narFromPath(const StorePath & path, Sink & sink) { if (!goal.isAllowed(path)) throw InvalidPath("cannot dump unknown path '%s' in recursive Nix", printStorePath(path)); - LocalFSStore::narFromPath(path, sink); + Store::narFromPath(path, sink); } void RestrictedStore::ensurePath(const StorePath & path) diff --git a/src/libstore/ssh-store.cc b/src/libstore/ssh-store.cc index a7e28017f..ce973e734 100644 --- a/src/libstore/ssh-store.cc +++ b/src/libstore/ssh-store.cc @@ -143,7 +143,7 @@ struct MountedSSHStore : virtual SSHStore, virtual LocalFSStore void narFromPath(const StorePath & path, Sink & sink) override { - return LocalFSStore::narFromPath(path, sink); + return Store::narFromPath(path, sink); } ref getFSAccessor(bool requireValidPath) override diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index cdca6a763..08b75c8fa 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -300,6 +300,13 @@ ValidPathInfo Store::addToStoreSlow( return info; } +void Store::narFromPath(const StorePath & path, Sink & sink) +{ + auto accessor = requireStoreObjectAccessor(path); + SourcePath sourcePath{accessor}; + dumpPath(sourcePath, sink, FileSerialisationMethod::NixArchive); +} + StringSet Store::Config::getDefaultSystemFeatures() { auto res = settings.systemFeatures.get(); From 234f029940ce9bfa86f6f49604a47561400d9e27 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 27 Oct 2025 15:39:58 -0400 Subject: [PATCH 071/213] Add consuming `ref` <-> `std::share_ptr` methods/ctrs This can help churning ref counts when we don't need to. --- src/libutil/include/nix/util/ref.hh | 32 ++++++++++++++++++++++------- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/src/libutil/include/nix/util/ref.hh b/src/libutil/include/nix/util/ref.hh index 7cf5ef25e..7ba5349a6 100644 --- a/src/libutil/include/nix/util/ref.hh +++ b/src/libutil/include/nix/util/ref.hh @@ -17,6 +17,12 @@ private: std::shared_ptr p; + void assertNonNull() + { + if (!p) + throw std::invalid_argument("null pointer cast to ref"); + } + public: using element_type = T; @@ -24,15 +30,19 @@ public: explicit ref(const std::shared_ptr & p) : p(p) { - if (!p) - throw std::invalid_argument("null pointer cast to ref"); + assertNonNull(); + } + + explicit ref(std::shared_ptr && p) + : p(std::move(p)) + { + assertNonNull(); } explicit ref(T * p) : p(p) { - if (!p) - throw std::invalid_argument("null pointer cast to ref"); + assertNonNull(); } T * operator->() const @@ -45,14 +55,22 @@ public: return *p; } - operator std::shared_ptr() const + std::shared_ptr get_ptr() const & { return p; } - std::shared_ptr get_ptr() const + std::shared_ptr get_ptr() && { - return p; + return std::move(p); + } + + /** + * Convenience to avoid explicit `get_ptr()` call in some cases. + */ + operator std::shared_ptr(this auto && self) + { + return std::forward(self).get_ptr(); } template From 28b73cabccc74304d3474aea8c2d06d4c248f811 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 13 Oct 2025 15:04:56 -0400 Subject: [PATCH 072/213] Make reading and writing derivations store methods This allows for different representations. --- src/libstore/derivations.cc | 27 ++++++++++++++++++--- src/libstore/include/nix/store/store-api.hh | 9 +++++-- src/libstore/store-api.cc | 2 +- 3 files changed, 31 insertions(+), 7 deletions(-) diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index f44bf3e70..20f1d6ca1 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -105,7 +105,7 @@ bool BasicDerivation::isBuiltin() const return builder.substr(0, 8) == "builtin:"; } -StorePath writeDerivation(Store & store, const Derivation & drv, RepairFlag repair, bool readOnly) +static auto infoForDerivation(Store & store, const Derivation & drv) { auto references = drv.inputSrcs; for (auto & i : drv.inputDrvs.map) @@ -117,13 +117,32 @@ StorePath writeDerivation(Store & store, const Derivation & drv, RepairFlag repa auto contents = drv.unparse(store, false); auto hash = hashString(HashAlgorithm::SHA256, contents); auto ca = TextInfo{.hash = hash, .references = references}; - auto path = store.makeFixedOutputPathFromCA(suffix, ca); + return std::tuple{ + suffix, + contents, + references, + store.makeFixedOutputPathFromCA(suffix, ca), + }; +} - if (readOnly || settings.readOnlyMode || (store.isValidPath(path) && !repair)) +StorePath writeDerivation(Store & store, const Derivation & drv, RepairFlag repair, bool readOnly) +{ + if (readOnly || settings.readOnlyMode) { + auto [_x, _y, _z, path] = infoForDerivation(store, drv); + return path; + } else + return store.writeDerivation(drv, repair); +} + +StorePath Store::writeDerivation(const Derivation & drv, RepairFlag repair) +{ + auto [suffix, contents, references, path] = infoForDerivation(*this, drv); + + if (isValidPath(path) && !repair) return path; StringSource s{contents}; - auto path2 = store.addToStoreFromDump( + auto path2 = addToStoreFromDump( s, suffix, FileSerialisationMethod::Flat, diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index 8fa13de34..522a9a45f 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -778,15 +778,20 @@ public: */ Derivation derivationFromPath(const StorePath & drvPath); + /** + * Write a derivation to the Nix store, and return its path. + */ + virtual StorePath writeDerivation(const Derivation & drv, RepairFlag repair = NoRepair); + /** * Read a derivation (which must already be valid). */ - Derivation readDerivation(const StorePath & drvPath); + virtual Derivation readDerivation(const StorePath & drvPath); /** * Read a derivation from a potentially invalid path. */ - Derivation readInvalidDerivation(const StorePath & drvPath); + virtual Derivation readInvalidDerivation(const StorePath & drvPath); /** * @param [out] out Place in here the set of all store paths in the diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index 08b75c8fa..c292e2e43 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -1170,7 +1170,7 @@ std::optional Store::getBuildDerivationPath(const StorePath & path) // resolved derivation, so we need to get it first auto resolvedDrv = drv.tryResolve(*this); if (resolvedDrv) - return writeDerivation(*this, *resolvedDrv, NoRepair, true); + return ::nix::writeDerivation(*this, *resolvedDrv, NoRepair, true); } return path; From 136825b4a2700ebbd20f4ba143e9b1819be0537c Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 13 Oct 2025 16:00:27 -0400 Subject: [PATCH 073/213] Make Dummy store store derivations separately This makes for more efficiency. Once we have JSON for the dummy store, it will also make for better JSON, too. --- src/libstore-tests/write-derivation.cc | 4 +- src/libstore/dummy-store.cc | 120 ++++++++++++++---- .../include/nix/store/dummy-store-impl.hh | 9 +- 3 files changed, 107 insertions(+), 26 deletions(-) diff --git a/src/libstore-tests/write-derivation.cc b/src/libstore-tests/write-derivation.cc index 3f7de05d3..c320f92fa 100644 --- a/src/libstore-tests/write-derivation.cc +++ b/src/libstore-tests/write-derivation.cc @@ -50,8 +50,8 @@ TEST_F(WriteDerivationTest, addToStoreFromDumpCalledOnce) EXPECT_EQ(path1, path2); EXPECT_THAT( [&] { writeDerivation(*store, drv, Repair); }, - ::testing::ThrowsMessage(testing::HasSubstrIgnoreANSIMatcher( - "operation 'addToStoreFromDump' is not supported by store 'dummy://'"))); + ::testing::ThrowsMessage( + testing::HasSubstrIgnoreANSIMatcher("operation 'writeDerivation' is not supported by store 'dummy://'"))); } } // namespace nix diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc index 1333e0aed..d11fef73f 100644 --- a/src/libstore/dummy-store.cc +++ b/src/libstore/dummy-store.cc @@ -137,12 +137,31 @@ struct DummyStoreImpl : DummyStore void queryPathInfoUncached( const StorePath & path, Callback> callback) noexcept override { - bool visited = contents.cvisit(path, [&](const auto & kv) { - callback(std::make_shared(StorePath{kv.first}, kv.second.info)); - }); + if (path.isDerivation()) { + if (auto accessor_ = getMemoryFSAccessor(path)) { + ref accessor = ref{std::move(accessor_)}; + /* compute path info on demand */ + auto narHash = + hashPath({accessor, CanonPath::root}, FileSerialisationMethod::NixArchive, HashAlgorithm::SHA256); + auto info = std::make_shared(path, UnkeyedValidPathInfo{narHash.hash}); + info->narSize = narHash.numBytesDigested; + info->ca = ContentAddress{ + .method = ContentAddressMethod::Raw::Text, + .hash = hashString( + HashAlgorithm::SHA256, + std::get(accessor->root->raw).contents), + }; + callback(std::move(info)); + return; + } + } else { + if (contents.cvisit(path, [&](const auto & kv) { + callback(std::make_shared(StorePath{kv.first}, kv.second.info)); + })) + return; + } - if (!visited) - callback(nullptr); + callback(nullptr); } /** @@ -169,18 +188,25 @@ struct DummyStoreImpl : DummyStore if (checkSigs) throw Error("checking signatures is not supported for '%s' store", config->getHumanReadableURI()); - auto temp = make_ref(); - MemorySink tempSink{*temp}; + auto accessor = make_ref(); + MemorySink tempSink{*accessor}; parseDump(tempSink, source); auto path = info.path; - auto accessor = make_ref(std::move(*temp)); - contents.insert( - {path, - PathInfoAndContents{ - std::move(info), - accessor, - }}); + if (info.path.isDerivation()) { + warn("back compat supporting `addToStore` for inserting derivations in dummy store"); + writeDerivation( + parseDerivation(*this, accessor->readFile(CanonPath::root), Derivation::nameFromPath(info.path))); + return; + } + + contents.insert({ + path, + PathInfoAndContents{ + std::move(info), + accessor, + }, + }); wholeStoreView->addObject(path.to_string(), accessor); } @@ -193,6 +219,9 @@ struct DummyStoreImpl : DummyStore const StorePathSet & references = StorePathSet(), RepairFlag repair = NoRepair) override { + if (isDerivation(name)) + throw Error("Do not insert derivation into dummy store with `addToStoreFromDump`"); + if (config->readOnly) unsupported("addToStoreFromDump"); @@ -239,17 +268,47 @@ struct DummyStoreImpl : DummyStore auto path = info.path; auto accessor = make_ref(std::move(*temp)); - contents.insert( - {path, - PathInfoAndContents{ - std::move(info), - accessor, - }}); + contents.insert({ + path, + PathInfoAndContents{ + std::move(info), + accessor, + }, + }); wholeStoreView->addObject(path.to_string(), accessor); return path; } + StorePath writeDerivation(const Derivation & drv, RepairFlag repair = NoRepair) override + { + auto drvPath = ::nix::writeDerivation(*this, drv, repair, /*readonly=*/true); + + if (!derivations.contains(drvPath) || repair) { + if (config->readOnly) + unsupported("writeDerivation"); + derivations.insert({drvPath, drv}); + } + + return drvPath; + } + + Derivation readDerivation(const StorePath & drvPath) override + { + if (std::optional res = getConcurrent(derivations, drvPath)) + return *res; + else + throw Error("derivation '%s' is not valid", printStorePath(drvPath)); + } + + /** + * No such thing as an "invalid derivation" with the dummy store + */ + Derivation readInvalidDerivation(const StorePath & drvPath) override + { + return readDerivation(drvPath); + } + void registerDrvOutput(const Realisation & output) override { auto ref = make_ref(output); @@ -273,13 +332,28 @@ struct DummyStoreImpl : DummyStore callback(nullptr); } - std::shared_ptr getFSAccessor(const StorePath & path, bool requireValidPath) override + std::shared_ptr getMemoryFSAccessor(const StorePath & path, bool requireValidPath = true) { - std::shared_ptr res; - contents.cvisit(path, [&](const auto & kv) { res = kv.second.contents.get_ptr(); }); + std::shared_ptr res; + if (path.isDerivation()) + derivations.cvisit(path, [&](const auto & kv) { + /* compute path info on demand */ + auto res2 = make_ref(); + res2->root = MemorySourceAccessor::File::Regular{ + .contents = kv.second.unparse(*this, false), + }; + res = std::move(res2).get_ptr(); + }); + else + contents.cvisit(path, [&](const auto & kv) { res = kv.second.contents.get_ptr(); }); return res; } + std::shared_ptr getFSAccessor(const StorePath & path, bool requireValidPath = true) override + { + return getMemoryFSAccessor(path, requireValidPath); + } + ref getFSAccessor(bool requireValidPath) override { return wholeStoreView; diff --git a/src/libstore/include/nix/store/dummy-store-impl.hh b/src/libstore/include/nix/store/dummy-store-impl.hh index 4c9f54e98..137f81c9b 100644 --- a/src/libstore/include/nix/store/dummy-store-impl.hh +++ b/src/libstore/include/nix/store/dummy-store-impl.hh @@ -2,6 +2,7 @@ ///@file #include "nix/store/dummy-store.hh" +#include "nix/store/derivations.hh" #include @@ -25,11 +26,17 @@ struct DummyStore : virtual Store }; /** - * This is map conceptually owns the file system objects for each + * This map conceptually owns the file system objects for each * store object. */ boost::concurrent_flat_map contents; + /** + * This map conceptually owns every derivation, allowing us to + * avoid "on-disk drv format" serialization round-trips. + */ + boost::concurrent_flat_map derivations; + /** * The build trace maps the pair of a content-addressing (fixed or * floating) derivations an one of its output to a From 18941a2421f40efad264ad1fc4e07339075491c3 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 21 Oct 2025 11:37:14 -0400 Subject: [PATCH 074/213] Optimize `DummyStore::isValidPathUncached` See the API docs for the rationale of why this is needed. --- src/libstore/dummy-store.cc | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/libstore/dummy-store.cc b/src/libstore/dummy-store.cc index d11fef73f..c45a13cc3 100644 --- a/src/libstore/dummy-store.cc +++ b/src/libstore/dummy-store.cc @@ -164,6 +164,15 @@ struct DummyStoreImpl : DummyStore callback(nullptr); } + /** + * Do this to avoid `queryPathInfoUncached` computing `PathInfo` + * that we don't need just to return a `bool`. + */ + bool isValidPathUncached(const StorePath & path) override + { + return path.isDerivation() ? derivations.contains(path) : Store::isValidPathUncached(path); + } + /** * The dummy store is incapable of *not* trusting! :) */ From ad664ce64e90234e6a0349b7b14f00bc9c82bf8e Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Mon, 27 Oct 2025 20:56:54 +0000 Subject: [PATCH 075/213] ci: cancel previous workflow runs on PR updates Add concurrency group configuration to the CI workflow to automatically cancel outdated runs when a PR receives new commits or is force-pushed. This prevents wasting CI resources on superseded code. --- .github/workflows/ci.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8a0820903..67e97b188 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -14,6 +14,10 @@ on: default: true type: boolean +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + permissions: read-all jobs: From 4b6d07d64299e539ba4f421a6589abc4e630c36f Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Fri, 24 Oct 2025 23:53:39 +0000 Subject: [PATCH 076/213] feat(libstore/s3-binary-cache-store): implement `createMultipartUpload()` POST to key with `?uploads` query parameter, optionally set `Content-Encoding` header, parse `uploadId` from XML response using regex --- src/libstore/s3-binary-cache-store.cc | 43 +++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 98f742c70..58cb72776 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -4,6 +4,7 @@ #include #include +#include namespace nix { @@ -27,6 +28,15 @@ public: private: ref s3Config; + /** + * Creates a multipart upload for large objects to S3. + * + * @see + * https://docs.aws.amazon.com/AmazonS3/latest/API/API_CreateMultipartUpload.html#API_CreateMultipartUpload_RequestSyntax + */ + std::string createMultipartUpload( + std::string_view key, std::string_view mimeType, std::optional contentEncoding); + /** * Abort a multipart upload * @@ -45,6 +55,39 @@ void S3BinaryCacheStore::upsertFile( HttpBinaryCacheStore::upsertFile(path, istream, mimeType, sizeHint); } +std::string S3BinaryCacheStore::createMultipartUpload( + std::string_view key, std::string_view mimeType, std::optional contentEncoding) +{ + auto req = makeRequest(key); + + // setupForS3() converts s3:// to https:// but strips query parameters + // So we call it first, then add our multipart parameters + req.setupForS3(); + + auto url = req.uri.parsed(); + url.query["uploads"] = ""; + req.uri = VerbatimURL(url); + + req.method = HttpMethod::POST; + req.data = ""; + req.mimeType = mimeType; + + if (contentEncoding) { + req.headers.emplace_back("Content-Encoding", *contentEncoding); + } + + auto result = getFileTransfer()->enqueueFileTransfer(req).get(); + + std::regex uploadIdRegex("([^<]+)"); + std::smatch match; + + if (std::regex_search(result.data, match, uploadIdRegex)) { + return match[1]; + } + + throw Error("S3 CreateMultipartUpload response missing "); +} + void S3BinaryCacheStore::abortMultipartUpload(std::string_view key, std::string_view uploadId) { auto req = makeRequest(key); From c592090fffde2fc107dec0bfd398ae7a9c0b4f35 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Wed, 22 Oct 2025 08:02:25 +0000 Subject: [PATCH 077/213] feat(libstore/s3-binary-cache-store): implement `uploadPart()` Implement `uploadPart()` for uploading individual parts in S3 multipart uploads: - Constructs URL with `?partNumber=N&uploadId=ID` query parameters - Uploads chunk data with `application/octet-stream` mime type - Extracts and returns `ETag` from response --- src/libstore/s3-binary-cache-store.cc | 31 +++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 58cb72776..828e75b7c 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -37,6 +37,15 @@ private: std::string createMultipartUpload( std::string_view key, std::string_view mimeType, std::optional contentEncoding); + /** + * Uploads a single part of a multipart upload + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/API/API_UploadPart.html#API_UploadPart_RequestSyntax + * + * @returns the [ETag](https://en.wikipedia.org/wiki/HTTP_ETag) + */ + std::string uploadPart(std::string_view key, std::string_view uploadId, uint64_t partNumber, std::string data); + /** * Abort a multipart upload * @@ -88,6 +97,28 @@ std::string S3BinaryCacheStore::createMultipartUpload( throw Error("S3 CreateMultipartUpload response missing "); } +std::string +S3BinaryCacheStore::uploadPart(std::string_view key, std::string_view uploadId, uint64_t partNumber, std::string data) +{ + auto req = makeRequest(key); + req.setupForS3(); + + auto url = req.uri.parsed(); + url.query["partNumber"] = std::to_string(partNumber); + url.query["uploadId"] = uploadId; + req.uri = VerbatimURL(url); + req.data = std::move(data); + req.mimeType = "application/octet-stream"; + + auto result = getFileTransfer()->enqueueFileTransfer(req).get(); + + if (result.etag.empty()) { + throw Error("S3 UploadPart response missing ETag for part %d", partNumber); + } + + return std::move(result.etag); +} + void S3BinaryCacheStore::abortMultipartUpload(std::string_view key, std::string_view uploadId) { auto req = makeRequest(key); From 3775a2a2268bbc18716363e38868e3bf76fd3884 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Oct 2025 22:22:54 +0000 Subject: [PATCH 078/213] build(deps): bump actions/upload-artifact from 4 to 5 Bumps [actions/upload-artifact](https://github.com/actions/upload-artifact) from 4 to 5. - [Release notes](https://github.com/actions/upload-artifact/releases) - [Commits](https://github.com/actions/upload-artifact/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/upload-artifact dependency-version: '5' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 67e97b188..18ae4d8bf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -125,13 +125,13 @@ jobs: cat coverage-reports/index.txt >> $GITHUB_STEP_SUMMARY if: ${{ matrix.instrumented }} - name: Upload coverage reports - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: coverage-reports path: coverage-reports/ if: ${{ matrix.instrumented }} - name: Upload installer tarball - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: installer-${{matrix.os}} path: out/* From ccc06451df3ca9345977ca4cdf7d412f6603dd90 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Oct 2025 22:35:42 +0000 Subject: [PATCH 079/213] build(deps): bump actions/download-artifact from 5 to 6 Bumps [actions/download-artifact](https://github.com/actions/download-artifact) from 5 to 6. - [Release notes](https://github.com/actions/download-artifact/releases) - [Commits](https://github.com/actions/download-artifact/compare/v5...v6) --- updated-dependencies: - dependency-name: actions/download-artifact dependency-version: '6' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 67e97b188..10103847a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -164,7 +164,7 @@ jobs: steps: - uses: actions/checkout@v5 - name: Download installer tarball - uses: actions/download-artifact@v5 + uses: actions/download-artifact@v6 with: name: installer-${{matrix.os}} path: out From c77317b1a9086b9aa8ff1b22da051e520febe871 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Fri, 24 Oct 2025 23:54:49 +0000 Subject: [PATCH 080/213] feat(libstore/s3-binary-cache-store): implement `completeMultipartUpload()` `completeMultipartUpload()`: Build XML with part numbers and `ETags`, POST to key with `?uploadId` to finalize the multipart upload --- src/libstore/s3-binary-cache-store.cc | 42 +++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 828e75b7c..178373778 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -5,6 +5,7 @@ #include #include #include +#include namespace nix { @@ -46,6 +47,19 @@ private: */ std::string uploadPart(std::string_view key, std::string_view uploadId, uint64_t partNumber, std::string data); + struct UploadedPart + { + uint64_t partNumber; + std::string etag; + }; + + /** + * Completes a multipart upload by combining all uploaded parts. + * @see + * https://docs.aws.amazon.com/AmazonS3/latest/API/API_CompleteMultipartUpload.html#API_CompleteMultipartUpload_RequestSyntax + */ + void completeMultipartUpload(std::string_view key, std::string_view uploadId, std::span parts); + /** * Abort a multipart upload * @@ -132,6 +146,34 @@ void S3BinaryCacheStore::abortMultipartUpload(std::string_view key, std::string_ getFileTransfer()->enqueueFileTransfer(req).get(); } +void S3BinaryCacheStore::completeMultipartUpload( + std::string_view key, std::string_view uploadId, std::span parts) +{ + auto req = makeRequest(key); + req.setupForS3(); + + auto url = req.uri.parsed(); + url.query["uploadId"] = uploadId; + req.uri = VerbatimURL(url); + req.method = HttpMethod::POST; + + std::string xml = ""; + for (const auto & part : parts) { + xml += ""; + xml += "" + std::to_string(part.partNumber) + ""; + xml += "" + part.etag + ""; + xml += ""; + } + xml += ""; + + debug("S3 CompleteMultipartUpload XML (%d parts): %s", parts.size(), xml); + + req.data = xml; + req.mimeType = "text/xml"; + + getFileTransfer()->enqueueFileTransfer(req).get(); +} + StringSet S3BinaryCacheStoreConfig::uriSchemes() { return {"s3"}; From 94965a3a3eeac6574a06a36760e6470977a7c1f9 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Wed, 22 Oct 2025 20:15:25 +0000 Subject: [PATCH 081/213] test(nixos): add S3 multipart upload integration tests --- tests/nixos/s3-binary-cache-store.nix | 129 ++++++++++++++++++++++++++ 1 file changed, 129 insertions(+) diff --git a/tests/nixos/s3-binary-cache-store.nix b/tests/nixos/s3-binary-cache-store.nix index a2ede4572..a07375489 100644 --- a/tests/nixos/s3-binary-cache-store.nix +++ b/tests/nixos/s3-binary-cache-store.nix @@ -34,8 +34,10 @@ in pkgA pkgB pkgC + pkgs.coreutils ]; environment.systemPackages = [ pkgs.minio-client ]; + nix.nixPath = [ "nixpkgs=${pkgs.path}" ]; nix.extraOptions = '' experimental-features = nix-command substituters = @@ -639,6 +641,129 @@ in ) print(" ✓ Fetch with versionId parameter works") + @setup_s3() + def test_multipart_upload_basic(bucket): + """Test basic multipart upload with a large file""" + print("\n--- Test: Multipart Upload Basic ---") + + large_file_size = 10 * 1024 * 1024 + large_pkg = server.succeed( + "nix-store --add $(dd if=/dev/urandom of=/tmp/large-file bs=1M count=10 2>/dev/null && echo /tmp/large-file)" + ).strip() + + chunk_size = 5 * 1024 * 1024 + expected_parts = 3 # 10 MB raw becomes ~10.5 MB compressed (NAR + xz overhead) + + store_url = make_s3_url( + bucket, + **{ + "multipart-upload": "true", + "multipart-threshold": str(5 * 1024 * 1024), + "multipart-chunk-size": str(chunk_size), + } + ) + + print(f" Uploading {large_file_size} byte file (expect {expected_parts} parts)") + output = server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {large_pkg} --debug 2>&1") + + if "using S3 multipart upload" not in output: + raise Exception("Expected multipart upload to be used") + + expected_msg = f"{expected_parts} parts uploaded" + if expected_msg not in output: + print("Debug output:") + print(output) + raise Exception(f"Expected '{expected_msg}' in output") + + print(f" ✓ Multipart upload used with {expected_parts} parts") + + client.succeed(f"{ENV_WITH_CREDS} nix copy --from '{store_url}' {large_pkg} --no-check-sigs") + verify_packages_in_store(client, large_pkg, should_exist=True) + + print(" ✓ Large file downloaded and verified") + + @setup_s3() + def test_multipart_threshold(bucket): + """Test that files below threshold use regular upload""" + print("\n--- Test: Multipart Threshold Behavior ---") + + store_url = make_s3_url( + bucket, + **{ + "multipart-upload": "true", + "multipart-threshold": str(1024 * 1024 * 1024), + } + ) + + print(" Uploading small file with high threshold") + output = server.succeed(f"{ENV_WITH_CREDS} nix copy --to '{store_url}' {PKGS['A']} --debug 2>&1") + + if "using S3 multipart upload" in output: + raise Exception("Should not use multipart for file below threshold") + + if "using S3 regular upload" not in output: + raise Exception("Expected regular upload to be used") + + print(" ✓ Regular upload used for file below threshold") + + client.succeed(f"{ENV_WITH_CREDS} nix copy --no-check-sigs --from '{store_url}' {PKGS['A']}") + verify_packages_in_store(client, PKGS['A'], should_exist=True) + + print(" ✓ Small file uploaded and verified") + + @setup_s3() + def test_multipart_with_log_compression(bucket): + """Test multipart upload with compressed build logs""" + print("\n--- Test: Multipart Upload with Log Compression ---") + + # Create a derivation that produces a large text log (12 MB of base64 output) + drv_path = server.succeed( + """ + nix-instantiate --expr ' + let pkgs = import {}; + in derivation { + name = "large-log-builder"; + builder = "/bin/sh"; + args = ["-c" "$coreutils/bin/dd if=/dev/urandom bs=1M count=12 | $coreutils/bin/base64; echo success > $out"]; + coreutils = pkgs.coreutils; + system = builtins.currentSystem; + } + ' + """ + ).strip() + + print(" Building derivation to generate large log") + server.succeed(f"nix-store --realize {drv_path} &>/dev/null") + + # Upload logs with compression and multipart + store_url = make_s3_url( + bucket, + **{ + "multipart-upload": "true", + "multipart-threshold": str(5 * 1024 * 1024), + "multipart-chunk-size": str(5 * 1024 * 1024), + "log-compression": "xz", + } + ) + + print(" Uploading build log with compression and multipart") + output = server.succeed( + f"{ENV_WITH_CREDS} nix store copy-log --to '{store_url}' {drv_path} --debug 2>&1" + ) + + # Should use multipart for the compressed log + if "using S3 multipart upload" not in output or "log/" not in output: + print("Debug output:") + print(output) + raise Exception("Expected multipart upload to be used for compressed log") + + if "parts uploaded" not in output: + print("Debug output:") + print(output) + raise Exception("Expected multipart completion message") + + print(" ✓ Compressed log uploaded with multipart") + # ============================================================================ # Main Test Execution # ============================================================================ @@ -669,6 +794,10 @@ in test_compression_disabled() test_nix_prefetch_url() test_versioned_urls() + # FIXME: enable when multipart fully lands + # test_multipart_upload_basic() + # test_multipart_threshold() + # test_multipart_with_log_compression() print("\n" + "="*80) print("✓ All S3 Binary Cache Store Tests Passed!") From 972915cabd772c4056fc4d08abd0579f1c252147 Mon Sep 17 00:00:00 2001 From: Adam Dinwoodie Date: Tue, 28 Oct 2025 09:36:46 +0000 Subject: [PATCH 082/213] docs: remove incorrect claim re gc --print-dead Per #7591, the `nix-store --gc --print-dead` command does not provide any feedback about the amount of disk space that is used by dead store paths. It looks like this has been the case since 7ab68961e (* Garbage collector: added an option `--use-atime' to delete paths in..., 2008-09-17). Update the nix-store documentation to remove the claim that this is function that `nix-store --gc --print-dead` performs. --- doc/manual/source/command-ref/nix-store/gc.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/doc/manual/source/command-ref/nix-store/gc.md b/doc/manual/source/command-ref/nix-store/gc.md index f432e00eb..8ec59d906 100644 --- a/doc/manual/source/command-ref/nix-store/gc.md +++ b/doc/manual/source/command-ref/nix-store/gc.md @@ -48,8 +48,7 @@ The behaviour of the collector is also influenced by the configuration file. By default, the collector prints the total number of freed bytes when it -finishes (or when it is interrupted). With `--print-dead`, it prints the -number of bytes that would be freed. +finishes (or when it is interrupted). {{#include ./opt-common.md}} From 5fc0c4f1027f673f76768b2e8659321cedda6834 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 22 Sep 2025 14:07:03 +0200 Subject: [PATCH 083/213] doc: Improve libexpr-c docs - Uses the more explicit `@ingroup` most of the time, to avoid problems with nested groups, and to make group membership more explicit. The division into headers is not great for documentation purposes, so this helps. - More attention for memory management details - Various other improvements to doc comments --- .../source/development/documentation.md | 6 + src/external-api-docs/README.md | 2 +- src/libexpr-c/nix_api_expr.h | 43 ++- src/libexpr-c/nix_api_external.h | 12 +- src/libexpr-c/nix_api_value.h | 250 +++++++++++++----- src/libutil-c/nix_api_util.h | 2 + 6 files changed, 237 insertions(+), 78 deletions(-) diff --git a/doc/manual/source/development/documentation.md b/doc/manual/source/development/documentation.md index a2a54175d..6823780cc 100644 --- a/doc/manual/source/development/documentation.md +++ b/doc/manual/source/development/documentation.md @@ -240,3 +240,9 @@ $ configurePhase $ ninja src/external-api-docs/html $ xdg-open src/external-api-docs/html/index.html ``` + +If you use direnv, or otherwise want to run `configurePhase` in a transient shell, use: + +```bash +nix-shell -A devShells.x86_64-linux.native-clangStdenv --command 'mesonFlags="$mesonFlags -Ddoc-gen=true"; mesonConfigurePhase' +``` diff --git a/src/external-api-docs/README.md b/src/external-api-docs/README.md index 8760ac88b..1940cc1c0 100644 --- a/src/external-api-docs/README.md +++ b/src/external-api-docs/README.md @@ -15,7 +15,7 @@ programmatically: 1. Embedding the evaluator 2. Writing language plug-ins -Embedding means you link the Nix C libraries in your program and use them from +Embedding means you link the Nix C API libraries in your program and use them from there. Adding a plug-in means you make a library that gets loaded by the Nix language evaluator, specified through a configuration option. diff --git a/src/libexpr-c/nix_api_expr.h b/src/libexpr-c/nix_api_expr.h index 2be739955..3623ee076 100644 --- a/src/libexpr-c/nix_api_expr.h +++ b/src/libexpr-c/nix_api_expr.h @@ -4,11 +4,14 @@ * @brief Bindings to the Nix language evaluator * * See *[Embedding the Nix Evaluator](@ref nix_evaluator_example)* for an example. - * @{ */ /** @file * @brief Main entry for the libexpr C bindings */ +/** @defgroup libexpr_init Initialization + * @ingroup libexpr + * @{ + */ #include "nix_api_store.h" #include "nix_api_util.h" @@ -45,7 +48,10 @@ typedef struct nix_eval_state_builder nix_eval_state_builder; */ typedef struct EvalState EvalState; // nix::EvalState +/** @} */ + /** @brief A Nix language value, or thunk that may evaluate to a value. + * @ingroup value * * Values are the primary objects manipulated in the Nix language. * They are considered to be immutable from a user's perspective, but the process of evaluating a value changes its @@ -56,7 +62,8 @@ typedef struct EvalState EvalState; // nix::EvalState * * The evaluator manages its own memory, but your use of the C API must follow the reference counting rules. * - * @see value_manip + * @struct nix_value + * @see value_create, value_extract * @see nix_value_incref, nix_value_decref */ typedef struct nix_value nix_value; @@ -65,6 +72,7 @@ NIX_DEPRECATED("use nix_value instead") typedef nix_value Value; // Function prototypes /** * @brief Initialize the Nix language evaluator. + * @ingroup libexpr_init * * This function must be called at least once, * at some point before constructing a EvalState for the first time. @@ -77,6 +85,7 @@ nix_err nix_libexpr_init(nix_c_context * context); /** * @brief Parses and evaluates a Nix expression from a string. + * @ingroup value_create * * @param[out] context Optional, stores error information * @param[in] state The state of the evaluation. @@ -93,6 +102,7 @@ nix_err nix_expr_eval_from_string( /** * @brief Calls a Nix function with an argument. + * @ingroup value_create * * @param[out] context Optional, stores error information * @param[in] state The state of the evaluation. @@ -107,6 +117,7 @@ nix_err nix_value_call(nix_c_context * context, EvalState * state, nix_value * f /** * @brief Calls a Nix function with multiple arguments. + * @ingroup value_create * * Technically these are functions that return functions. It is common for Nix * functions to be curried, so this function is useful for calling them. @@ -126,10 +137,12 @@ nix_err nix_value_call_multi( /** * @brief Calls a Nix function with multiple arguments. + * @ingroup value_create * * Technically these are functions that return functions. It is common for Nix * functions to be curried, so this function is useful for calling them. * + * @def NIX_VALUE_CALL * @param[out] context Optional, stores error information * @param[in] state The state of the evaluation. * @param[out] value The result of the function call. @@ -147,6 +160,7 @@ nix_err nix_value_call_multi( /** * @brief Forces the evaluation of a Nix value. + * @ingroup value_create * * The Nix interpreter is lazy, and not-yet-evaluated values can be * of type NIX_TYPE_THUNK instead of their actual value. @@ -180,18 +194,20 @@ nix_err nix_value_force_deep(nix_c_context * context, EvalState * state, nix_val /** * @brief Create a new nix_eval_state_builder + * @ingroup libexpr_init * * The settings are initialized to their default value. * Values can be sourced elsewhere with nix_eval_state_builder_load. * * @param[out] context Optional, stores error information * @param[in] store The Nix store to use. - * @return A new nix_eval_state_builder or NULL on failure. + * @return A new nix_eval_state_builder or NULL on failure. Call nix_eval_state_builder_free() when you're done. */ nix_eval_state_builder * nix_eval_state_builder_new(nix_c_context * context, Store * store); /** * @brief Read settings from the ambient environment + * @ingroup libexpr_init * * Settings are sourced from environment variables and configuration files, * as documented in the Nix manual. @@ -204,6 +220,7 @@ nix_err nix_eval_state_builder_load(nix_c_context * context, nix_eval_state_buil /** * @brief Set the lookup path for `<...>` expressions + * @ingroup libexpr_init * * @param[in] context Optional, stores error information * @param[in] builder The builder to modify. @@ -214,18 +231,21 @@ nix_err nix_eval_state_builder_set_lookup_path( /** * @brief Create a new Nix language evaluator state + * @ingroup libexpr_init * - * Remember to nix_eval_state_builder_free after building the state. + * The builder becomes unusable after this call. Remember to call nix_eval_state_builder_free() + * after building the state. * * @param[out] context Optional, stores error information * @param[in] builder The builder to use and free - * @return A new Nix state or NULL on failure. + * @return A new Nix state or NULL on failure. Call nix_state_free() when you're done. * @see nix_eval_state_builder_new, nix_eval_state_builder_free */ EvalState * nix_eval_state_build(nix_c_context * context, nix_eval_state_builder * builder); /** * @brief Free a nix_eval_state_builder + * @ingroup libexpr_init * * Does not fail. * @@ -235,19 +255,21 @@ void nix_eval_state_builder_free(nix_eval_state_builder * builder); /** * @brief Create a new Nix language evaluator state + * @ingroup libexpr_init * * For more control, use nix_eval_state_builder * * @param[out] context Optional, stores error information * @param[in] lookupPath Null-terminated array of strings corresponding to entries in NIX_PATH. * @param[in] store The Nix store to use. - * @return A new Nix state or NULL on failure. + * @return A new Nix state or NULL on failure. Call nix_state_free() when you're done. * @see nix_state_builder_new */ EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath, Store * store); /** * @brief Frees a Nix state. + * @ingroup libexpr_init * * Does not fail. * @@ -256,6 +278,7 @@ EvalState * nix_state_create(nix_c_context * context, const char ** lookupPath, void nix_state_free(EvalState * state); /** @addtogroup GC + * @ingroup libexpr * @brief Reference counting and garbage collector operations * * The Nix language evaluator uses a garbage collector. To ease C interop, we implement @@ -286,6 +309,9 @@ nix_err nix_gc_incref(nix_c_context * context, const void * object); /** * @brief Decrement the garbage collector reference counter for the given object * + * @deprecated We are phasing out the general nix_gc_decref() in favor of type-specified free functions, such as + * nix_value_decref(). + * * We also provide typed `nix_*_decref` functions, which are * - safer to use * - easier to integrate when deriving bindings @@ -314,12 +340,11 @@ void nix_gc_now(); */ void nix_gc_register_finalizer(void * obj, void * cd, void (*finalizer)(void * obj, void * cd)); -/** @} */ +/** @} */ // doxygen group GC + // cffi end #ifdef __cplusplus } #endif -/** @} */ - #endif // NIX_API_EXPR_H diff --git a/src/libexpr-c/nix_api_external.h b/src/libexpr-c/nix_api_external.h index f4a327281..96c479d57 100644 --- a/src/libexpr-c/nix_api_external.h +++ b/src/libexpr-c/nix_api_external.h @@ -2,11 +2,12 @@ #define NIX_API_EXTERNAL_H /** @ingroup libexpr * @addtogroup Externals - * @brief Deal with external values + * @brief Externals let Nix expressions work with foreign values that aren't part of the normal Nix value data model * @{ */ /** @file * @brief libexpr C bindings dealing with external values + * @see Externals */ #include "nix_api_expr.h" @@ -115,7 +116,7 @@ typedef struct NixCExternalValueDesc * @brief Try to compare two external values * * Optional, the default is always false. - * If the other object was not a Nix C external value, this comparison will + * If the other object was not a Nix C API external value, this comparison will * also return false * @param[in] self the void* passed to nix_create_external_value * @param[in] other the void* passed to the other object's @@ -168,7 +169,7 @@ typedef struct NixCExternalValueDesc /** * @brief Create an external value, that can be given to nix_init_external * - * Owned by the GC. Use nix_gc_decref when you're done with the pointer. + * Call nix_gc_decref() when you're done with the pointer. * * @param[out] context Optional, stores error information * @param[in] desc a NixCExternalValueDesc, you should keep this alive as long @@ -180,10 +181,11 @@ typedef struct NixCExternalValueDesc ExternalValue * nix_create_external_value(nix_c_context * context, NixCExternalValueDesc * desc, void * v); /** - * @brief Extract the pointer from a nix c external value. + * @brief Extract the pointer from a Nix C API external value. * @param[out] context Optional, stores error information * @param[in] b The external value - * @returns The pointer, or null if the external value was not from nix c. + * @returns The pointer, valid while the external value is valid, or null if the external value was not from the Nix C + * API. * @see nix_get_external */ void * nix_get_external_value_content(nix_c_context * context, ExternalValue * b); diff --git a/src/libexpr-c/nix_api_value.h b/src/libexpr-c/nix_api_value.h index 835eaec6e..5bd45da90 100644 --- a/src/libexpr-c/nix_api_value.h +++ b/src/libexpr-c/nix_api_value.h @@ -1,9 +1,6 @@ #ifndef NIX_API_VALUE_H #define NIX_API_VALUE_H -/** @addtogroup libexpr - * @{ - */ /** @file * @brief libexpr C bindings dealing with values */ @@ -20,18 +17,89 @@ extern "C" { #endif // cffi start +/** @defgroup value Value + * @ingroup libexpr + * @brief nix_value type and core operations for working with Nix values + * @see value_create + * @see value_extract + */ + +/** @defgroup value_create Value Creation + * @ingroup libexpr + * @brief Functions for allocating and initializing Nix values + * + * Values are usually created with `nix_alloc_value` followed by `nix_init_*` functions. + * In primop callbacks, allocation is already done and only initialization is needed. + */ + +/** @defgroup value_extract Value Extraction + * @ingroup libexpr + * @brief Functions for extracting data from Nix values + */ + +/** @defgroup primops PrimOps and Builtins + * @ingroup libexpr + */ + // Type definitions +/** @brief Represents the state of a Nix value + * + * Thunk values (NIX_TYPE_THUNK) change to their final, unchanging type when forced. + * + * @see https://nix.dev/manual/nix/latest/language/evaluation.html + * @enum ValueType + * @ingroup value + */ typedef enum { + /** Unevaluated expression + * + * Thunks often contain an expression and closure, but may contain other + * representations too. + * + * Their state is mutable, unlike that of the other types. + */ NIX_TYPE_THUNK, + /** + * A 64 bit signed integer. + */ NIX_TYPE_INT, + /** @brief IEEE 754 double precision floating point number + * @see https://nix.dev/manual/nix/latest/language/types.html#type-float + */ NIX_TYPE_FLOAT, + /** @brief Boolean true or false value + * @see https://nix.dev/manual/nix/latest/language/types.html#type-bool + */ NIX_TYPE_BOOL, + /** @brief String value with context + * + * String content may contain arbitrary bytes, not necessarily UTF-8. + * @see https://nix.dev/manual/nix/latest/language/types.html#type-string + */ NIX_TYPE_STRING, + /** @brief Filesystem path + * @see https://nix.dev/manual/nix/latest/language/types.html#type-path + */ NIX_TYPE_PATH, + /** @brief Null value + * @see https://nix.dev/manual/nix/latest/language/types.html#type-null + */ NIX_TYPE_NULL, + /** @brief Attribute set (key-value mapping) + * @see https://nix.dev/manual/nix/latest/language/types.html#type-attrs + */ NIX_TYPE_ATTRS, + /** @brief Ordered list of values + * @see https://nix.dev/manual/nix/latest/language/types.html#type-list + */ NIX_TYPE_LIST, + /** @brief Function (lambda or builtin) + * @see https://nix.dev/manual/nix/latest/language/types.html#type-function + */ NIX_TYPE_FUNCTION, + /** @brief External value from C++ plugins or C API + * @see Externals + */ NIX_TYPE_EXTERNAL } ValueType; @@ -39,22 +107,41 @@ typedef enum { typedef struct nix_value nix_value; typedef struct EvalState EvalState; +/** @deprecated Use nix_value instead */ [[deprecated("use nix_value instead")]] typedef nix_value Value; // type defs /** @brief Stores an under-construction set of bindings - * @ingroup value_manip + * @ingroup value_create * - * Do not reuse. + * Each builder can only be used once. After calling nix_make_attrs(), the builder + * becomes invalid and must not be used again. Call nix_bindings_builder_free() to release it. + * + * Typical usage pattern: + * 1. Create with nix_make_bindings_builder() + * 2. Insert attributes with nix_bindings_builder_insert() + * 3. Create final attribute set with nix_make_attrs() + * 4. Free builder with nix_bindings_builder_free() + * + * @struct BindingsBuilder * @see nix_make_bindings_builder, nix_bindings_builder_free, nix_make_attrs * @see nix_bindings_builder_insert */ typedef struct BindingsBuilder BindingsBuilder; /** @brief Stores an under-construction list - * @ingroup value_manip + * @ingroup value_create * - * Do not reuse. + * Each builder can only be used once. After calling nix_make_list(), the builder + * becomes invalid and must not be used again. Call nix_list_builder_free() to release it. + * + * Typical usage pattern: + * 1. Create with nix_make_list_builder() + * 2. Insert elements with nix_list_builder_insert() + * 3. Create final list with nix_make_list() + * 4. Free builder with nix_list_builder_free() + * + * @struct ListBuilder * @see nix_make_list_builder, nix_list_builder_free, nix_make_list * @see nix_list_builder_insert */ @@ -63,25 +150,28 @@ typedef struct ListBuilder ListBuilder; /** @brief PrimOp function * @ingroup primops * - * Owned by the GC - * @see nix_alloc_primop, nix_init_primop + * Can be released with nix_gc_decref() when necessary. + * @struct PrimOp + * @see nix_alloc_primop, nix_init_primop, nix_register_primop */ typedef struct PrimOp PrimOp; /** @brief External Value * @ingroup Externals * - * Owned by the GC + * Can be released with nix_gc_decref() when necessary. + * @struct ExternalValue + * @see nix_create_external_value, nix_init_external, nix_get_external */ typedef struct ExternalValue ExternalValue; /** @brief String without placeholders, and realised store paths + * @struct nix_realised_string + * @see nix_string_realise, nix_realised_string_free */ typedef struct nix_realised_string nix_realised_string; -/** @defgroup primops Adding primops - * @{ - */ /** @brief Function pointer for primops + * @ingroup primops * * When you want to return an error, call nix_set_err_msg(context, NIX_ERR_UNKNOWN, "your error message here"). * @@ -97,9 +187,9 @@ typedef void (*PrimOpFun)( void * user_data, nix_c_context * context, EvalState * state, nix_value ** args, nix_value * ret); /** @brief Allocate a PrimOp + * @ingroup primops * - * Owned by the garbage collector. - * Use nix_gc_decref() when you're done with the returned PrimOp. + * Call nix_gc_decref() when you're done with the returned PrimOp. * * @param[out] context Optional, stores error information * @param[in] fun callback @@ -121,35 +211,38 @@ PrimOp * nix_alloc_primop( void * user_data); /** @brief add a primop to the `builtins` attribute set + * @ingroup primops * * Only applies to States created after this call. * - * Moves your PrimOp content into the global evaluator - * registry, meaning your input PrimOp pointer is no longer usable. - * You are free to remove your references to it, - * after which it will be garbage collected. + * Moves your PrimOp content into the global evaluator registry, meaning + * your input PrimOp pointer becomes invalid. The PrimOp must not be used + * with nix_init_primop() before or after this call, as this would cause + * undefined behavior. + * You must call nix_gc_decref() on the original PrimOp pointer + * after this call to release your reference. * * @param[out] context Optional, stores error information - * @return primop, or null in case of errors - * + * @param[in] primOp PrimOp to register + * @return error code, NIX_OK on success */ nix_err nix_register_primop(nix_c_context * context, PrimOp * primOp); -/** @} */ // Function prototypes /** @brief Allocate a Nix value + * @ingroup value_create * - * Owned by the GC. Use nix_gc_decref() when you're done with the pointer + * Call nix_value_decref() when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] state nix evaluator state * @return value, or null in case of errors - * */ nix_value * nix_alloc_value(nix_c_context * context, EvalState * state); /** * @brief Increment the garbage collector reference counter for the given `nix_value`. + * @ingroup value * * The Nix language evaluator C API keeps track of alive objects by reference counting. * When you're done with a refcounted pointer, call nix_value_decref(). @@ -161,21 +254,19 @@ nix_err nix_value_incref(nix_c_context * context, nix_value * value); /** * @brief Decrement the garbage collector reference counter for the given object + * @ingroup value + * + * When the counter reaches zero, the `nix_value` object becomes invalid. + * The data referenced by `nix_value` may not be deallocated until the memory + * garbage collector has run, but deallocation is not guaranteed. * * @param[out] context Optional, stores error information * @param[in] value The object to stop referencing */ nix_err nix_value_decref(nix_c_context * context, nix_value * value); -/** @addtogroup value_manip Manipulating values - * @brief Functions to inspect and change Nix language values, represented by nix_value. - * @{ - */ -/** @anchor getters - * @name Getters - */ -/**@{*/ /** @brief Get value type + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @return type of nix value @@ -183,14 +274,15 @@ nix_err nix_value_decref(nix_c_context * context, nix_value * value); ValueType nix_get_type(nix_c_context * context, const nix_value * value); /** @brief Get type name of value as defined in the evaluator + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect - * @return type name, owned string - * @todo way to free the result + * @return type name string, free with free() */ const char * nix_get_typename(nix_c_context * context, const nix_value * value); /** @brief Get boolean value + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @return true or false, error info via context @@ -198,6 +290,7 @@ const char * nix_get_typename(nix_c_context * context, const nix_value * value); bool nix_get_bool(nix_c_context * context, const nix_value * value); /** @brief Get the raw string + * @ingroup value_extract * * This may contain placeholders. * @@ -205,21 +298,21 @@ bool nix_get_bool(nix_c_context * context, const nix_value * value); * @param[in] value Nix value to inspect * @param[in] callback Called with the string value. * @param[in] user_data optional, arbitrary data, passed to the callback when it's called. - * @return string * @return error code, NIX_OK on success. */ nix_err nix_get_string(nix_c_context * context, const nix_value * value, nix_get_string_callback callback, void * user_data); /** @brief Get path as string + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect - * @return string, if the type is NIX_TYPE_PATH - * @return NULL in case of error. + * @return string valid while value is valid, NULL in case of error */ const char * nix_get_path_string(nix_c_context * context, const nix_value * value); /** @brief Get the length of a list + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @return length of list, error info via context @@ -227,6 +320,7 @@ const char * nix_get_path_string(nix_c_context * context, const nix_value * valu unsigned int nix_get_list_size(nix_c_context * context, const nix_value * value); /** @brief Get the element count of an attrset + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @return attrset element count, error info via context @@ -234,6 +328,7 @@ unsigned int nix_get_list_size(nix_c_context * context, const nix_value * value) unsigned int nix_get_attrs_size(nix_c_context * context, const nix_value * value); /** @brief Get float value in 64 bits + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @return float contents, error info via context @@ -241,6 +336,7 @@ unsigned int nix_get_attrs_size(nix_c_context * context, const nix_value * value double nix_get_float(nix_c_context * context, const nix_value * value); /** @brief Get int value + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @return int contents, error info via context @@ -248,15 +344,18 @@ double nix_get_float(nix_c_context * context, const nix_value * value); int64_t nix_get_int(nix_c_context * context, const nix_value * value); /** @brief Get external reference + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect - * @return reference to external, NULL in case of error + * @return reference valid while value is valid. Call nix_gc_incref() if you need it to live longer, then only in that + * case call nix_gc_decref() when done. NULL in case of error */ ExternalValue * nix_get_external(nix_c_context * context, nix_value * value); /** @brief Get the ix'th element of a list + * @ingroup value_extract * - * Owned by the GC. Use nix_gc_decref when you're done with the pointer + * Call nix_value_decref() when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @param[in] state nix evaluator state @@ -266,11 +365,12 @@ ExternalValue * nix_get_external(nix_c_context * context, nix_value * value); nix_value * nix_get_list_byidx(nix_c_context * context, const nix_value * value, EvalState * state, unsigned int ix); /** @brief Get the ix'th element of a list without forcing evaluation of the element + * @ingroup value_extract * * Returns the list element without forcing its evaluation, allowing access to lazy values. * The list value itself must already be evaluated. * - * Owned by the GC. Use nix_gc_decref when you're done with the pointer + * Call nix_value_decref() when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect (must be an evaluated list) * @param[in] state nix evaluator state @@ -281,8 +381,9 @@ nix_value * nix_get_list_byidx_lazy(nix_c_context * context, const nix_value * value, EvalState * state, unsigned int ix); /** @brief Get an attr by name + * @ingroup value_extract * - * Use nix_gc_decref when you're done with the pointer + * Call nix_value_decref() when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @param[in] state nix evaluator state @@ -292,11 +393,12 @@ nix_get_list_byidx_lazy(nix_c_context * context, const nix_value * value, EvalSt nix_value * nix_get_attr_byname(nix_c_context * context, const nix_value * value, EvalState * state, const char * name); /** @brief Get an attribute value by attribute name, without forcing evaluation of the attribute's value + * @ingroup value_extract * * Returns the attribute value without forcing its evaluation, allowing access to lazy values. * The attribute set value itself must already be evaluated. * - * Use nix_gc_decref when you're done with the pointer + * Call nix_value_decref() when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect (must be an evaluated attribute set) * @param[in] state nix evaluator state @@ -307,6 +409,7 @@ nix_value * nix_get_attr_byname_lazy(nix_c_context * context, const nix_value * value, EvalState * state, const char * name); /** @brief Check if an attribute name exists on a value + * @ingroup value_extract * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @param[in] state nix evaluator state @@ -316,6 +419,7 @@ nix_get_attr_byname_lazy(nix_c_context * context, const nix_value * value, EvalS bool nix_has_attr_byname(nix_c_context * context, const nix_value * value, EvalState * state, const char * name); /** @brief Get an attribute by index + * @ingroup value_extract * * Also gives you the name. * @@ -329,18 +433,19 @@ bool nix_has_attr_byname(nix_c_context * context, const nix_value * value, EvalS * lexicographic order by Unicode scalar value for valid UTF-8). We recommend * applying this same ordering for consistency. * - * Use nix_gc_decref when you're done with the pointer + * Call nix_value_decref() when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @param[in] state nix evaluator state * @param[in] i attribute index - * @param[out] name will store a pointer to the attribute name + * @param[out] name will store a pointer to the attribute name, valid until state is freed * @return value, NULL in case of errors */ nix_value * nix_get_attr_byidx(nix_c_context * context, nix_value * value, EvalState * state, unsigned int i, const char ** name); /** @brief Get an attribute by index, without forcing evaluation of the attribute's value + * @ingroup value_extract * * Also gives you the name. * @@ -357,18 +462,19 @@ nix_get_attr_byidx(nix_c_context * context, nix_value * value, EvalState * state * lexicographic order by Unicode scalar value for valid UTF-8). We recommend * applying this same ordering for consistency. * - * Use nix_gc_decref when you're done with the pointer + * Call nix_value_decref() when you're done with the pointer * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect (must be an evaluated attribute set) * @param[in] state nix evaluator state * @param[in] i attribute index - * @param[out] name will store a pointer to the attribute name + * @param[out] name will store a pointer to the attribute name, valid until state is freed * @return value, NULL in case of errors */ nix_value * nix_get_attr_byidx_lazy( nix_c_context * context, nix_value * value, EvalState * state, unsigned int i, const char ** name); /** @brief Get an attribute name by index + * @ingroup value_extract * * Returns the attribute name without forcing evaluation of the attribute's value. * @@ -382,16 +488,14 @@ nix_value * nix_get_attr_byidx_lazy( * lexicographic order by Unicode scalar value for valid UTF-8). We recommend * applying this same ordering for consistency. * - * Owned by the nix EvalState * @param[out] context Optional, stores error information * @param[in] value Nix value to inspect * @param[in] state nix evaluator state * @param[in] i attribute index - * @return name, NULL in case of errors + * @return name string valid until state is freed, NULL in case of errors */ const char * nix_get_attr_name_byidx(nix_c_context * context, nix_value * value, EvalState * state, unsigned int i); -/**@}*/ /** @name Initializers * * Values are typically "returned" by initializing already allocated memory that serves as the return value. @@ -401,6 +505,7 @@ const char * nix_get_attr_name_byidx(nix_c_context * context, nix_value * value, */ /**@{*/ /** @brief Set boolean value + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] b the boolean value @@ -409,6 +514,7 @@ const char * nix_get_attr_name_byidx(nix_c_context * context, nix_value * value, nix_err nix_init_bool(nix_c_context * context, nix_value * value, bool b); /** @brief Set a string + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] str the string, copied @@ -417,6 +523,7 @@ nix_err nix_init_bool(nix_c_context * context, nix_value * value, bool b); nix_err nix_init_string(nix_c_context * context, nix_value * value, const char * str); /** @brief Set a path + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] str the path string, copied @@ -425,6 +532,7 @@ nix_err nix_init_string(nix_c_context * context, nix_value * value, const char * nix_err nix_init_path_string(nix_c_context * context, EvalState * s, nix_value * value, const char * str); /** @brief Set a float + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] d the float, 64-bits @@ -433,6 +541,7 @@ nix_err nix_init_path_string(nix_c_context * context, EvalState * s, nix_value * nix_err nix_init_float(nix_c_context * context, nix_value * value, double d); /** @brief Set an int + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] i the int @@ -441,6 +550,7 @@ nix_err nix_init_float(nix_c_context * context, nix_value * value, double d); nix_err nix_init_int(nix_c_context * context, nix_value * value, int64_t i); /** @brief Set null + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @return error code, NIX_OK on success. @@ -448,6 +558,7 @@ nix_err nix_init_int(nix_c_context * context, nix_value * value, int64_t i); nix_err nix_init_null(nix_c_context * context, nix_value * value); /** @brief Set the value to a thunk that will perform a function application when needed. + * @ingroup value_create * * Thunks may be put into attribute sets and lists to perform some computation lazily; on demand. * However, note that in some places, a thunk must not be returned, such as in the return value of a PrimOp. @@ -464,6 +575,7 @@ nix_err nix_init_null(nix_c_context * context, nix_value * value); nix_err nix_init_apply(nix_c_context * context, nix_value * value, nix_value * fn, nix_value * arg); /** @brief Set an external value + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] val the external value to set. Will be GC-referenced by the value. @@ -472,18 +584,25 @@ nix_err nix_init_apply(nix_c_context * context, nix_value * value, nix_value * f nix_err nix_init_external(nix_c_context * context, nix_value * value, ExternalValue * val); /** @brief Create a list from a list builder + * @ingroup value_create + * + * After this call, the list builder becomes invalid and cannot be used again. + * The only necessary next step is to free it with nix_list_builder_free(). + * * @param[out] context Optional, stores error information - * @param[in] list_builder list builder to use. Make sure to unref this afterwards. + * @param[in] list_builder list builder to use * @param[out] value Nix value to modify * @return error code, NIX_OK on success. + * @see nix_list_builder_free */ nix_err nix_make_list(nix_c_context * context, ListBuilder * list_builder, nix_value * value); /** @brief Create a list builder + * @ingroup value_create * @param[out] context Optional, stores error information * @param[in] state nix evaluator state * @param[in] capacity how many bindings you'll add. Don't exceed. - * @return owned reference to a list builder. Make sure to unref when you're done. + * @return list builder. Call nix_list_builder_free() when you're done. */ ListBuilder * nix_make_list_builder(nix_c_context * context, EvalState * state, size_t capacity); @@ -505,14 +624,21 @@ nix_list_builder_insert(nix_c_context * context, ListBuilder * list_builder, uns void nix_list_builder_free(ListBuilder * list_builder); /** @brief Create an attribute set from a bindings builder + * @ingroup value_create + * + * After this call, the bindings builder becomes invalid and cannot be used again. + * The only necessary next step is to free it with nix_bindings_builder_free(). + * * @param[out] context Optional, stores error information * @param[out] value Nix value to modify - * @param[in] b bindings builder to use. Make sure to unref this afterwards. + * @param[in] b bindings builder to use * @return error code, NIX_OK on success. + * @see nix_bindings_builder_free */ nix_err nix_make_attrs(nix_c_context * context, nix_value * value, BindingsBuilder * b); /** @brief Set primop + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] op primop, will be gc-referenced by the value @@ -521,6 +647,7 @@ nix_err nix_make_attrs(nix_c_context * context, nix_value * value, BindingsBuild */ nix_err nix_init_primop(nix_c_context * context, nix_value * value, PrimOp * op); /** @brief Copy from another value + * @ingroup value_create * @param[out] context Optional, stores error information * @param[out] value Nix value to modify * @param[in] source value to copy from @@ -530,12 +657,11 @@ nix_err nix_copy_value(nix_c_context * context, nix_value * value, const nix_val /**@}*/ /** @brief Create a bindings builder -* @param[out] context Optional, stores error information -* @param[in] state nix evaluator state -* @param[in] capacity how many bindings you'll add. Don't exceed. -* @return owned reference to a bindings builder. Make sure to unref when you're -done. -*/ + * @param[out] context Optional, stores error information + * @param[in] state nix evaluator state + * @param[in] capacity how many bindings you'll add. Don't exceed. + * @return bindings builder. Call nix_bindings_builder_free() when you're done. + */ BindingsBuilder * nix_make_bindings_builder(nix_c_context * context, EvalState * state, size_t capacity); /** @brief Insert bindings into a builder @@ -554,7 +680,6 @@ nix_bindings_builder_insert(nix_c_context * context, BindingsBuilder * builder, * @param[in] builder the builder to free */ void nix_bindings_builder_free(BindingsBuilder * builder); -/**@}*/ /** @brief Realise a string context. * @@ -571,13 +696,13 @@ void nix_bindings_builder_free(BindingsBuilder * builder); * @param[in] isIFD If true, disallow derivation outputs if setting `allow-import-from-derivation` is false. You should set this to true when this call is part of a primop. You should set this to false when building for your application's purpose. - * @return NULL if failed, are a new nix_realised_string, which must be freed with nix_realised_string_free + * @return NULL if failed, or a new nix_realised_string, which must be freed with nix_realised_string_free */ nix_realised_string * nix_string_realise(nix_c_context * context, EvalState * state, nix_value * value, bool isIFD); /** @brief Start of the string * @param[in] realised_string - * @return pointer to the start of the string. It may not be null-terminated. + * @return pointer to the start of the string, valid until realised_string is freed. It may not be null-terminated. */ const char * nix_realised_string_get_buffer_start(nix_realised_string * realised_string); @@ -596,7 +721,7 @@ size_t nix_realised_string_get_store_path_count(nix_realised_string * realised_s /** @brief Get a store path. The store paths are stored in an arbitrary order. * @param[in] realised_string * @param[in] index index of the store path, must be less than the count - * @return store path + * @return store path valid until realised_string is freed */ const StorePath * nix_realised_string_get_store_path(nix_realised_string * realised_string, size_t index); @@ -610,5 +735,4 @@ void nix_realised_string_free(nix_realised_string * realised_string); } #endif -/** @} */ #endif // NIX_API_VALUE_H diff --git a/src/libutil-c/nix_api_util.h b/src/libutil-c/nix_api_util.h index 4d7f394fa..d301e5743 100644 --- a/src/libutil-c/nix_api_util.h +++ b/src/libutil-c/nix_api_util.h @@ -155,6 +155,8 @@ typedef struct nix_c_context nix_c_context; /** * @brief Called to get the value of a string owned by Nix. * + * The `start` data is borrowed and the function must not assume that the buffer persists after it returns. + * * @param[in] start the string to copy. * @param[in] n the string length. * @param[in] user_data optional, arbitrary data, passed to the nix_get_string_callback when it's called. From 883860c7ff6638f8069d8a6bb1be6ba2065c4608 Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Tue, 28 Oct 2025 11:14:31 -0700 Subject: [PATCH 084/213] Move docker documentation to docker.io --- doc/manual/source/installation/installing-docker.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/manual/source/installation/installing-docker.md b/doc/manual/source/installation/installing-docker.md index 9354c1a72..92fa55e1c 100644 --- a/doc/manual/source/installation/installing-docker.md +++ b/doc/manual/source/installation/installing-docker.md @@ -3,14 +3,14 @@ To run the latest stable release of Nix with Docker run the following command: ```console -$ docker run -ti ghcr.io/nixos/nix -Unable to find image 'ghcr.io/nixos/nix:latest' locally -latest: Pulling from ghcr.io/nixos/nix +$ docker run -ti docker.io/nixos/nix +Unable to find image 'docker.io/nixos/nix:latest' locally +latest: Pulling from docker.io/nixos/nix 5843afab3874: Pull complete b52bf13f109c: Pull complete 1e2415612aa3: Pull complete Digest: sha256:27f6e7f60227e959ee7ece361f75d4844a40e1cc6878b6868fe30140420031ff -Status: Downloaded newer image for ghcr.io/nixos/nix:latest +Status: Downloaded newer image for docker.io/nixos/nix:latest 35ca4ada6e96:/# nix --version nix (Nix) 2.3.12 35ca4ada6e96:/# exit From 943788754fc695dbe1b8cb3057f7fc1a16858e2c Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Tue, 28 Oct 2025 11:16:37 -0700 Subject: [PATCH 085/213] Add ghcr for pre-release --- doc/manual/source/installation/installing-docker.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/manual/source/installation/installing-docker.md b/doc/manual/source/installation/installing-docker.md index 92fa55e1c..ccc75be5a 100644 --- a/doc/manual/source/installation/installing-docker.md +++ b/doc/manual/source/installation/installing-docker.md @@ -16,6 +16,8 @@ nix (Nix) 2.3.12 35ca4ada6e96:/# exit ``` +> If you want the latest pre-release you can use ghcr.io/nixos/nix and view them at https://github.com/nixos/nix/pkgs/container/nix + # What is included in Nix's Docker image? The official Docker image is created using `pkgs.dockerTools.buildLayeredImage` From f5aafbd6ed5ea7a38d27d51cf82d77634d341a05 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 28 Oct 2025 19:39:04 +0100 Subject: [PATCH 086/213] .coderabbit.yaml: Disable auto-review --- .coderabbit.yaml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 .coderabbit.yaml diff --git a/.coderabbit.yaml b/.coderabbit.yaml new file mode 100644 index 000000000..5122f01e0 --- /dev/null +++ b/.coderabbit.yaml @@ -0,0 +1,6 @@ +# Disable CodeRabbit auto-review to prevent verbose comments on PRs. +# When enabled: false, CodeRabbit won't attempt reviews and won't post +# "Review skipped" or other automated comments. +reviews: + auto_review: + enabled: false From e3246301a6dcd2c722241f4756484d40bc06f48a Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 28 Oct 2025 14:49:04 -0400 Subject: [PATCH 087/213] Enable JSON schema testing for derivation outputs I figured out what the problem was: the fragment needs to start with a `/`. --- src/json-schema-checks/meson.build | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/src/json-schema-checks/meson.build b/src/json-schema-checks/meson.build index 745fb5ffa..8e8ac57c4 100644 --- a/src/json-schema-checks/meson.build +++ b/src/json-schema-checks/meson.build @@ -46,20 +46,19 @@ schemas = [ 'simple-derivation.json', ], }, - # # Not sure how to make subschema work - # { - # 'stem': 'derivation', - # 'schema': schema_dir / 'derivation-v3.yaml#output', - # 'files' : [ - # 'output-caFixedFlat.json', - # 'output-caFixedNAR.json', - # 'output-caFixedText.json', - # 'output-caFloating.json', - # 'output-deferred.json', - # 'output-impure.json', - # 'output-inputAddressed.json', - # ], - # }, + { + 'stem' : 'derivation', + 'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output', + 'files' : [ + 'output-caFixedFlat.json', + 'output-caFixedNAR.json', + 'output-caFixedText.json', + 'output-caFloating.json', + 'output-deferred.json', + 'output-impure.json', + 'output-inputAddressed.json', + ], + }, { 'stem' : 'deriving-path', 'schema' : schema_dir / 'deriving-path-v1.yaml', From 84a5bee424ab25bd0dbc89b3abc6adb208142396 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Tue, 28 Oct 2025 21:41:20 +0100 Subject: [PATCH 088/213] coderabbit: disable reporting review status --- .coderabbit.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.coderabbit.yaml b/.coderabbit.yaml index 5122f01e0..815dc27a5 100644 --- a/.coderabbit.yaml +++ b/.coderabbit.yaml @@ -4,3 +4,4 @@ reviews: auto_review: enabled: false + review_status: false From fe8cdbc3e41ecab02d451c8864e6309507d3c7ea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Tue, 28 Oct 2025 21:48:33 +0100 Subject: [PATCH 089/213] coderabbit: disable high_level_summary/poem/github status/sequence_diagrams --- .coderabbit.yaml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.coderabbit.yaml b/.coderabbit.yaml index 815dc27a5..00244700a 100644 --- a/.coderabbit.yaml +++ b/.coderabbit.yaml @@ -5,3 +5,10 @@ reviews: auto_review: enabled: false review_status: false + high_level_summary: false + poem: false + sequence_diagrams: false + changed_files_summary: false + tools: + github-checks: + enabled: false From be2572ed8d0c9dd626462229436ba7aaf2369690 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 28 Oct 2025 17:16:38 -0400 Subject: [PATCH 090/213] Make `inputDrvs` JSON schema more precise It now captures the stable non-recursive format (just an output set) and the unstable recursive form for dynamic derivations. --- .../protocols/json/schema/derivation-v3.yaml | 32 +++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/doc/manual/source/protocols/json/schema/derivation-v3.yaml b/doc/manual/source/protocols/json/schema/derivation-v3.yaml index c950b839f..30fddf699 100644 --- a/doc/manual/source/protocols/json/schema/derivation-v3.yaml +++ b/doc/manual/source/protocols/json/schema/derivation-v3.yaml @@ -103,6 +103,13 @@ properties: > ``` > > specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`. + additionalProperties: + title: Store Path + description: | + A store path to a derivation, mapped to the outputs of that derivation. + oneOf: + - "$ref": "#/$defs/outputNames" + - "$ref": "#/$defs/dynamicOutputs" system: type: string @@ -167,3 +174,28 @@ properties: title: Expected hash value description: | For fixed-output derivations, the expected content hash in base-16. + + outputName: + type: string + title: Output name + description: Name of the derivation output to depend on + + outputNames: + type: array + title: Output Names + description: Set of names of derivation outputs to depend on + items: + "$ref": "#/$defs/outputName" + + dynamicOutputs: + type: object + title: Dynamic Outputs + description: | + **Experimental feature**: [`dynamic-derivations`](@docroot@/development/experimental-features.md#xp-feature-dynamic-derivations) + + This recursive data type allows for depending on outputs of outputs. + properties: + outputs: + "$ref": "#/$defs/outputNames" + dynamicOutputs: + "$ref": "#/$defs/dynamicOutputs" From c67966418f99120a31e3d15c58a0aa253abfb151 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 28 Oct 2025 16:59:35 -0400 Subject: [PATCH 091/213] Create JSON Schema for Store Paths We immediately use this in the JSON schemas for Derivation and Deriving Path, but we cannot yet use it in Store Object Info because those paths *do* include the store dir currently. --- doc/manual/package.nix | 1 + doc/manual/source/SUMMARY.md.in | 1 + doc/manual/source/protocols/json/meson.build | 1 + .../protocols/json/schema/derivation-v3.yaml | 20 ++++++------ .../json/schema/deriving-path-v1.yaml | 2 +- .../protocols/json/schema/store-path-v1 | 1 + .../protocols/json/schema/store-path-v1.yaml | 32 +++++++++++++++++++ .../source/protocols/json/store-path.md | 15 +++++++++ src/json-schema-checks/meson.build | 7 ++++ src/json-schema-checks/package.nix | 1 + src/json-schema-checks/store-path | 1 + 11 files changed, 72 insertions(+), 10 deletions(-) create mode 120000 doc/manual/source/protocols/json/schema/store-path-v1 create mode 100644 doc/manual/source/protocols/json/schema/store-path-v1.yaml create mode 100644 doc/manual/source/protocols/json/store-path.md create mode 120000 src/json-schema-checks/store-path diff --git a/doc/manual/package.nix b/doc/manual/package.nix index 140fa9849..b7c9503ef 100644 --- a/doc/manual/package.nix +++ b/doc/manual/package.nix @@ -36,6 +36,7 @@ mkMesonDerivation (finalAttrs: { # For example JSON ../../src/libutil-tests/data/hash ../../src/libstore-tests/data/content-address + ../../src/libstore-tests/data/store-path ../../src/libstore-tests/data/derived-path # Too many different types of files to filter for now ../../doc/manual diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index abd9422cd..7f3b1a103 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -119,6 +119,7 @@ - [JSON Formats](protocols/json/index.md) - [Hash](protocols/json/hash.md) - [Content Address](protocols/json/content-address.md) + - [Store Path](protocols/json/store-path.md) - [Store Object Info](protocols/json/store-object-info.md) - [Derivation](protocols/json/derivation.md) - [Deriving Path](protocols/json/deriving-path.md) diff --git a/doc/manual/source/protocols/json/meson.build b/doc/manual/source/protocols/json/meson.build index f79667961..e8546d813 100644 --- a/doc/manual/source/protocols/json/meson.build +++ b/doc/manual/source/protocols/json/meson.build @@ -11,6 +11,7 @@ json_schema_config = files('json-schema-for-humans-config.yaml') schemas = [ 'hash-v1', 'content-address-v1', + 'store-path-v1', 'derivation-v3', 'deriving-path-v1', ] diff --git a/doc/manual/source/protocols/json/schema/derivation-v3.yaml b/doc/manual/source/protocols/json/schema/derivation-v3.yaml index 30fddf699..3275bcdd9 100644 --- a/doc/manual/source/protocols/json/schema/derivation-v3.yaml +++ b/doc/manual/source/protocols/json/schema/derivation-v3.yaml @@ -85,7 +85,7 @@ properties: > ] > ``` items: - type: string + $ref: "store-path-v1.yaml" inputDrvs: type: object @@ -103,13 +103,15 @@ properties: > ``` > > specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`. - additionalProperties: - title: Store Path - description: | - A store path to a derivation, mapped to the outputs of that derivation. - oneOf: - - "$ref": "#/$defs/outputNames" - - "$ref": "#/$defs/dynamicOutputs" + patternProperties: + "^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+\\.drv$": + title: Store Path + description: | + A store path to a derivation, mapped to the outputs of that derivation. + oneOf: + - "$ref": "#/$defs/outputNames" + - "$ref": "#/$defs/dynamicOutputs" + additionalProperties: false system: type: string @@ -155,7 +157,7 @@ properties: type: object properties: path: - type: string + $ref: "store-path-v1.yaml" title: Output path description: | The output path, if known in advance. diff --git a/doc/manual/source/protocols/json/schema/deriving-path-v1.yaml b/doc/manual/source/protocols/json/schema/deriving-path-v1.yaml index 7fd74941e..11a784d06 100644 --- a/doc/manual/source/protocols/json/schema/deriving-path-v1.yaml +++ b/doc/manual/source/protocols/json/schema/deriving-path-v1.yaml @@ -7,7 +7,7 @@ oneOf: - title: Constant description: | See [Constant](@docroot@/store/derivation/index.md#deriving-path-constant) deriving path. - type: string + $ref: "store-path-v1.yaml" - title: Output description: | See [Output](@docroot@/store/derivation/index.md#deriving-path-output) deriving path. diff --git a/doc/manual/source/protocols/json/schema/store-path-v1 b/doc/manual/source/protocols/json/schema/store-path-v1 new file mode 120000 index 000000000..31e7a6b2a --- /dev/null +++ b/doc/manual/source/protocols/json/schema/store-path-v1 @@ -0,0 +1 @@ +../../../../../../src/libstore-tests/data/store-path \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/store-path-v1.yaml b/doc/manual/source/protocols/json/schema/store-path-v1.yaml new file mode 100644 index 000000000..2012aab99 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/store-path-v1.yaml @@ -0,0 +1,32 @@ +"$schema": "http://json-schema.org/draft-07/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/store-path-v1.json" +title: Store Path +description: | + A [store path](@docroot@/store/store-path.md) identifying a store object. + + This schema describes the JSON representation of store paths as used in various Nix JSON APIs. + + > **Warning** + > + > This JSON format is currently + > [**experimental**](@docroot@/development/experimental-features.md#xp-feature-nix-command) + > and subject to change. + + ## Format + + Store paths in JSON are represented as strings containing just the hash and name portion, without the store directory prefix. + + For example: `"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv"` + + (If the store dir is `/nix/store`, then this corresponds to the path `/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv`.) + + ## Structure + + The format follows this pattern: `${digest}-${name}` + + - **hash**: Digest rendered in a custom variant of [Base32](https://en.wikipedia.org/wiki/Base32) (20 arbitrary bytes become 32 ASCII characters) + - **name**: The package name and optional version/suffix information + +type: string +pattern: "^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+$" +minLength: 34 diff --git a/doc/manual/source/protocols/json/store-path.md b/doc/manual/source/protocols/json/store-path.md new file mode 100644 index 000000000..02ecc8068 --- /dev/null +++ b/doc/manual/source/protocols/json/store-path.md @@ -0,0 +1,15 @@ +{{#include store-path-v1-fixed.md}} + +## Examples + +### Simple store path + +```json +{{#include schema/store-path-v1/simple.json}} +``` + + diff --git a/src/json-schema-checks/meson.build b/src/json-schema-checks/meson.build index 745fb5ffa..f3e52e544 100644 --- a/src/json-schema-checks/meson.build +++ b/src/json-schema-checks/meson.build @@ -38,6 +38,13 @@ schemas = [ 'nar.json', ], }, + { + 'stem' : 'store-path', + 'schema' : schema_dir / 'store-path-v1.yaml', + 'files' : [ + 'simple.json', + ], + }, { 'stem' : 'derivation', 'schema' : schema_dir / 'derivation-v3.yaml', diff --git a/src/json-schema-checks/package.nix b/src/json-schema-checks/package.nix index 6a76c8b28..0122b5493 100644 --- a/src/json-schema-checks/package.nix +++ b/src/json-schema-checks/package.nix @@ -22,6 +22,7 @@ mkMesonDerivation (finalAttrs: { ../../doc/manual/source/protocols/json/schema ../../src/libutil-tests/data/hash ../../src/libstore-tests/data/content-address + ../../src/libstore-tests/data/store-path ../../src/libstore-tests/data/derivation ../../src/libstore-tests/data/derived-path ./. diff --git a/src/json-schema-checks/store-path b/src/json-schema-checks/store-path new file mode 120000 index 000000000..003b1dbbb --- /dev/null +++ b/src/json-schema-checks/store-path @@ -0,0 +1 @@ +../../src/libstore-tests/data/store-path \ No newline at end of file From c874e7071b0f81406a4078e5ce0aec50770ccd53 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Wed, 29 Oct 2025 01:47:18 +0300 Subject: [PATCH 092/213] libstore/http-binary-cache-store: Improve error messages in HttpBinaryCacheStore::upsertFile MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Now the error message doesn't cram everything into a single line and we now instead get: error: … while uploading to HTTP binary cache at 's3://my-cache?endpoint=http://localhost:9000?compression%3Dzstd®ion=eu-west-1' error: unable to download 'http://localhost:9000/my-cache/nar/1125zqba8cx8wbfa632vy458a3j3xja0qpcqafsfdildyl9dqa7x.nar.xz': Operation was aborted by an application callback (42) --- src/libstore/http-binary-cache-store.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 738db132d..1f9ee4100 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -157,7 +157,9 @@ void HttpBinaryCacheStore::upsertFile( try { getFileTransfer()->upload(req); } catch (FileTransferError & e) { - throw UploadToHTTP("while uploading to HTTP binary cache at '%s': %s", config->cacheUri.to_string(), e.msg()); + UploadToHTTP err(e.message()); + err.addTrace({}, "while uploading to HTTP binary cache at '%s'", config->cacheUri.to_string()); + throw err; } } From ae49074548bb3485a0a263ca862f6aee95cfb09f Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Wed, 29 Oct 2025 02:48:26 +0300 Subject: [PATCH 093/213] libstore/filetransfer: Add HttpMethod::PUT This got lost in f1968ea38e51201b37962a9cfd80775989a56d46 and now we had incorrect logs that confused "downloading" when we were in fact "uploading" things. --- src/libstore/filetransfer.cc | 4 +++- src/libstore/http-binary-cache-store.cc | 2 +- src/libstore/include/nix/store/filetransfer.hh | 3 +++ src/libstore/s3-binary-cache-store.cc | 1 + 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/libstore/filetransfer.cc b/src/libstore/filetransfer.cc index 6b9c6602b..304984d99 100644 --- a/src/libstore/filetransfer.cc +++ b/src/libstore/filetransfer.cc @@ -394,9 +394,11 @@ struct curlFileTransfer : public FileTransfer if (request.method == HttpMethod::POST) { curl_easy_setopt(req, CURLOPT_POST, 1L); curl_easy_setopt(req, CURLOPT_POSTFIELDSIZE_LARGE, (curl_off_t) request.data->length()); - } else { + } else if (request.method == HttpMethod::PUT) { curl_easy_setopt(req, CURLOPT_UPLOAD, 1L); curl_easy_setopt(req, CURLOPT_INFILESIZE_LARGE, (curl_off_t) request.data->length()); + } else { + unreachable(); } curl_easy_setopt(req, CURLOPT_READFUNCTION, readCallbackWrapper); curl_easy_setopt(req, CURLOPT_READDATA, this); diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc index 738db132d..089c7873a 100644 --- a/src/libstore/http-binary-cache-store.cc +++ b/src/libstore/http-binary-cache-store.cc @@ -141,7 +141,7 @@ void HttpBinaryCacheStore::upsertFile( uint64_t sizeHint) { auto req = makeRequest(path); - + req.method = HttpMethod::PUT; auto data = StreamToSourceAdapter(istream).drain(); auto compressionMethod = getCompressionMethod(path); diff --git a/src/libstore/include/nix/store/filetransfer.hh b/src/libstore/include/nix/store/filetransfer.hh index 305c33af1..08a2b6329 100644 --- a/src/libstore/include/nix/store/filetransfer.hh +++ b/src/libstore/include/nix/store/filetransfer.hh @@ -88,6 +88,7 @@ extern const unsigned int RETRY_TIME_MS_DEFAULT; */ enum struct HttpMethod { GET, + PUT, HEAD, POST, DELETE, @@ -147,7 +148,9 @@ struct FileTransferRequest case HttpMethod::HEAD: case HttpMethod::GET: return "download"; + case HttpMethod::PUT: case HttpMethod::POST: + assert(data); return "upload"; case HttpMethod::DELETE: return "delet"; diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 828e75b7c..417355b68 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -101,6 +101,7 @@ std::string S3BinaryCacheStore::uploadPart(std::string_view key, std::string_view uploadId, uint64_t partNumber, std::string data) { auto req = makeRequest(key); + req.method = HttpMethod::PUT; req.setupForS3(); auto url = req.uri.parsed(); From 6280905638aac9d15c09fc4d38aa469ee63d17be Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 28 Oct 2025 13:24:05 -0400 Subject: [PATCH 094/213] Convert store path info JSON docs to formal JSON Schema, and test This continues the work for formalizing our current JSON docs. Note that in the process, a few bugs were caught: - `closureSize` was repeated twice, forgot `closureDownloadSize` - `file*` fields should be `download*`. They are in fact called that in the line-oriented `.narinfo` file, but were renamed in the JSON format. --- doc/manual/package.nix | 2 + .../source/protocols/json/derivation.md | 2 +- doc/manual/source/protocols/json/hash.md | 2 +- doc/manual/source/protocols/json/meson.build | 1 + .../source/protocols/json/schema/nar-info-v1 | 1 + .../json/schema/store-object-info-v1 | 1 + .../json/schema/store-object-info-v1.yaml | 235 ++++++++++++++++++ .../protocols/json/store-object-info.md | 117 +++------ .../source/protocols/json/store-path.md | 2 +- src/json-schema-checks/meson.build | 50 ++++ src/json-schema-checks/nar-info | 1 + src/json-schema-checks/package.nix | 2 + src/json-schema-checks/store-object-info | 1 + 13 files changed, 327 insertions(+), 90 deletions(-) create mode 120000 doc/manual/source/protocols/json/schema/nar-info-v1 create mode 120000 doc/manual/source/protocols/json/schema/store-object-info-v1 create mode 100644 doc/manual/source/protocols/json/schema/store-object-info-v1.yaml create mode 120000 src/json-schema-checks/nar-info create mode 120000 src/json-schema-checks/store-object-info diff --git a/doc/manual/package.nix b/doc/manual/package.nix index b7c9503ef..7d29df3c3 100644 --- a/doc/manual/package.nix +++ b/doc/manual/package.nix @@ -38,6 +38,8 @@ mkMesonDerivation (finalAttrs: { ../../src/libstore-tests/data/content-address ../../src/libstore-tests/data/store-path ../../src/libstore-tests/data/derived-path + ../../src/libstore-tests/data/path-info + ../../src/libstore-tests/data/nar-info # Too many different types of files to filter for now ../../doc/manual ./. diff --git a/doc/manual/source/protocols/json/derivation.md b/doc/manual/source/protocols/json/derivation.md index 602ab67e4..a4a4ea79d 100644 --- a/doc/manual/source/protocols/json/derivation.md +++ b/doc/manual/source/protocols/json/derivation.md @@ -1,6 +1,6 @@ {{#include derivation-v3-fixed.md}} - diff --git a/doc/manual/source/protocols/json/store-path.md b/doc/manual/source/protocols/json/store-path.md index 02ecc8068..cd18f6595 100644 --- a/doc/manual/source/protocols/json/store-path.md +++ b/doc/manual/source/protocols/json/store-path.md @@ -8,7 +8,7 @@ {{#include schema/store-path-v1/simple.json}} ``` - For instance, in Nixpkgs, if the attribute `enableParallelBuilding` for the `mkDerivation` build helper is set to `true`, it passes the `-j${NIX_BUILD_CORES}` flag to GNU Make. - If set to `0`, nix will detect the number of CPU cores and pass this number via NIX_BUILD_CORES. + If set to `0`, nix will detect the number of CPU cores and pass this number via `NIX_BUILD_CORES`. > **Note** > diff --git a/src/nix/unix/daemon.cc b/src/nix/unix/daemon.cc index cb105a385..33ad8757a 100644 --- a/src/nix/unix/daemon.cc +++ b/src/nix/unix/daemon.cc @@ -87,7 +87,7 @@ struct AuthorizationSettings : Config {"*"}, "allowed-users", R"( - A list user names, separated by whitespace. + A list of user names, separated by whitespace. These users are allowed to connect to the Nix daemon. You can specify groups by prefixing names with `@`. From 4ea32d0b03f04143c54344363affea50fc804681 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sun, 2 Nov 2025 14:00:07 +0100 Subject: [PATCH 126/213] Improve "resolution failed" error Previously: error: Cannot build '/nix/store/cqc798lwy2njwbdzgd0319z4r19j2d1w-nix-manual-2.33.0pre20251101_e4e4063.drv'. Reason: 1 dependency failed. Output paths: /nix/store/f1kln1c6z9r7rlhj0h9shcpch7j5g1fj-nix-manual-2.33.0pre20251101_e4e4063-man /nix/store/k65203rx5g1kcagpcz3c3a09bghcj92a-nix-manual-2.33.0pre20251101_e4e4063 error: Cannot build '/nix/store/ajk2fb6r7ijn2fc5c3h85n6zdi36xlfl-nixops-manual.drv'. Reason: 1 dependency failed. Output paths: /nix/store/0anr0998as8ry4hr5g3f3iarszx5aisx-nixops-manual error: resolution failed Now: error: Cannot build '/nix/store/cqc798lwy2njwbdzgd0319z4r19j2d1w-nix-manual-2.33.0pre20251101_e4e4063.drv'. Reason: 1 dependency failed. Output paths: /nix/store/f1kln1c6z9r7rlhj0h9shcpch7j5g1fj-nix-manual-2.33.0pre20251101_e4e4063-man /nix/store/k65203rx5g1kcagpcz3c3a09bghcj92a-nix-manual-2.33.0pre20251101_e4e4063 error: Cannot build '/nix/store/ajk2fb6r7ijn2fc5c3h85n6zdi36xlfl-nixops-manual.drv'. Reason: 1 dependency failed. Output paths: /nix/store/0anr0998as8ry4hr5g3f3iarszx5aisx-nixops-manual error: Build failed due to failed dependency --- src/libstore/build/derivation-goal.cc | 2 +- tests/functional/build.sh | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/libstore/build/derivation-goal.cc b/src/libstore/build/derivation-goal.cc index 717d6890a..14aa044ea 100644 --- a/src/libstore/build/derivation-goal.cc +++ b/src/libstore/build/derivation-goal.cc @@ -147,7 +147,7 @@ Goal::Co DerivationGoal::haveDerivation(bool storeDerivation) co_await await(std::move(waitees)); } if (nrFailed != 0) { - co_return doneFailure({BuildResult::Failure::DependencyFailed, "resolution failed"}); + co_return doneFailure({BuildResult::Failure::DependencyFailed, "Build failed due to failed dependency"}); } if (resolutionGoal->resolvedDrv) { diff --git a/tests/functional/build.sh b/tests/functional/build.sh index c9a39438d..0b06dcd91 100755 --- a/tests/functional/build.sh +++ b/tests/functional/build.sh @@ -184,6 +184,7 @@ test "$status" = 1 if isDaemonNewer "2.29pre"; then <<<"$out" grepQuiet -E "error: Cannot build '.*-x4\\.drv'" <<<"$out" grepQuiet -E "Reason: 1 dependency failed." + <<<"$out" grepQuiet -E "Build failed due to failed dependency" else <<<"$out" grepQuiet -E "error: 1 dependencies of derivation '.*-x4\\.drv' failed to build" fi From 233bd250d175719896ef4985acb4a41613cb34c9 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sun, 2 Nov 2025 14:10:12 +0100 Subject: [PATCH 127/213] flake: Update, nixos-25.05-small -> nixos-25.05 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Flake lock file updates: • Updated input 'nixpkgs': 'github:NixOS/nixpkgs/d98ce345cdab58477ca61855540999c86577d19d?narHash=sha256-O2CIn7HjZwEGqBrwu9EU76zlmA5dbmna7jL1XUmAId8%3D' (2025-08-26) → 'github:NixOS/nixpkgs/daf6dc47aa4b44791372d6139ab7b25269184d55?narHash=sha256-wxX7u6D2rpkJLWkZ2E932SIvDJW8%2BON/0Yy8%2Ba5vsDU%3D' (2025-10-27) --- flake.lock | 8 ++++---- flake.nix | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/flake.lock b/flake.lock index cc2b2f27e..63290ef86 100644 --- a/flake.lock +++ b/flake.lock @@ -63,16 +63,16 @@ }, "nixpkgs": { "locked": { - "lastModified": 1756178832, - "narHash": "sha256-O2CIn7HjZwEGqBrwu9EU76zlmA5dbmna7jL1XUmAId8=", + "lastModified": 1761597516, + "narHash": "sha256-wxX7u6D2rpkJLWkZ2E932SIvDJW8+ON/0Yy8+a5vsDU=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "d98ce345cdab58477ca61855540999c86577d19d", + "rev": "daf6dc47aa4b44791372d6139ab7b25269184d55", "type": "github" }, "original": { "owner": "NixOS", - "ref": "nixos-25.05-small", + "ref": "nixos-25.05", "repo": "nixpkgs", "type": "github" } diff --git a/flake.nix b/flake.nix index 418f3180f..e25722d46 100644 --- a/flake.nix +++ b/flake.nix @@ -1,7 +1,7 @@ { description = "The purely functional package manager"; - inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05-small"; + inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-25.05"; inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2"; inputs.nixpkgs-23-11.url = "github:NixOS/nixpkgs/a62e6edd6d5e1fa0329b8653c801147986f8d446"; From bf947bfc26704b3a21da222f3c67fb9d773383b9 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Tue, 21 Oct 2025 06:19:17 +0000 Subject: [PATCH 128/213] feat(libstore/s3-binary-cache-store): add multipart upload config settings Add three configuration settings to `S3BinaryCacheStoreConfig` to control multipart upload behavior: - `bool multipart-upload` (default `false`): Enable/disable multipart uploads - `uint64_t multipart-chunk-size` (default 5 MiB): Size of each upload part - `uint64_t multipart-threshold` (default 100 MiB): Minimum file size for multipart The feature is disabled by default. --- .../nix/store/s3-binary-cache-store.hh | 32 +++++++++++++++++++ src/libstore/s3-binary-cache-store.cc | 23 +++++++++++++ 2 files changed, 55 insertions(+) diff --git a/src/libstore/include/nix/store/s3-binary-cache-store.hh b/src/libstore/include/nix/store/s3-binary-cache-store.hh index 81a2d3f3f..bf86d0671 100644 --- a/src/libstore/include/nix/store/s3-binary-cache-store.hh +++ b/src/libstore/include/nix/store/s3-binary-cache-store.hh @@ -61,6 +61,38 @@ struct S3BinaryCacheStoreConfig : HttpBinaryCacheStoreConfig > addressing instead of virtual host based addressing. )"}; + const Setting multipartUpload{ + this, + false, + "multipart-upload", + R"( + Whether to use multipart uploads for large files. When enabled, + files exceeding the multipart threshold will be uploaded in + multiple parts, which is required for files larger than 5 GiB and + can improve performance and reliability for large uploads. + )"}; + + const Setting multipartChunkSize{ + this, + 5 * 1024 * 1024, + "multipart-chunk-size", + R"( + The size (in bytes) of each part in multipart uploads. Must be + at least 5 MiB (AWS S3 requirement). Larger chunk sizes reduce the + number of requests but use more memory. Default is 5 MiB. + )", + {"buffer-size"}}; + + const Setting multipartThreshold{ + this, + 100 * 1024 * 1024, + "multipart-threshold", + R"( + The minimum file size (in bytes) for using multipart uploads. + Files smaller than this threshold will use regular PUT requests. + Default is 100 MiB. Only takes effect when multipart-upload is enabled. + )"}; + /** * Set of settings that are part of the S3 URI itself. * These are needed for region specification and other S3-specific settings. diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 9303a80f8..4cf5f987a 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -15,6 +15,7 @@ namespace nix { MakeError(UploadToS3, Error); +static constexpr uint64_t AWS_MIN_PART_SIZE = 5 * 1024 * 1024; // 5MiB static constexpr uint64_t AWS_MAX_PART_SIZE = 5ULL * 1024 * 1024 * 1024; // 5GiB class S3BinaryCacheStore : public virtual HttpBinaryCacheStore @@ -253,6 +254,28 @@ S3BinaryCacheStoreConfig::S3BinaryCacheStoreConfig( cacheUri.query[key] = value; } } + + if (multipartChunkSize < AWS_MIN_PART_SIZE) { + throw UsageError( + "multipart-chunk-size must be at least %s, got %s", + renderSize(AWS_MIN_PART_SIZE), + renderSize(multipartChunkSize.get())); + } + + if (multipartChunkSize > AWS_MAX_PART_SIZE) { + throw UsageError( + "multipart-chunk-size must be at most %s, got %s", + renderSize(AWS_MAX_PART_SIZE), + renderSize(multipartChunkSize.get())); + } + + if (multipartUpload && multipartThreshold < multipartChunkSize) { + warn( + "multipart-threshold (%s) is less than multipart-chunk-size (%s), " + "which may result in single-part multipart uploads", + renderSize(multipartThreshold.get()), + renderSize(multipartChunkSize.get())); + } } std::string S3BinaryCacheStoreConfig::getHumanReadableURI() const From 040d1aae41a3bfda86c29910eb1495d75598fd35 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Wed, 22 Oct 2025 08:42:32 +0000 Subject: [PATCH 129/213] feat(libstore/s3-binary-cache-store): implement `uploadMultipart()` Implement `uploadMultipart()`, the main method that orchestrates S3 multipart uploads --- src/libstore/s3-binary-cache-store.cc | 228 ++++++++++++++++++++++---- 1 file changed, 192 insertions(+), 36 deletions(-) diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 4cf5f987a..37264dfae 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -7,6 +7,7 @@ #include "nix/util/util.hh" #include +#include #include #include #include @@ -17,6 +18,7 @@ MakeError(UploadToS3, Error); static constexpr uint64_t AWS_MIN_PART_SIZE = 5 * 1024 * 1024; // 5MiB static constexpr uint64_t AWS_MAX_PART_SIZE = 5ULL * 1024 * 1024 * 1024; // 5GiB +static constexpr uint64_t AWS_MAX_PART_COUNT = 10000; class S3BinaryCacheStore : public virtual HttpBinaryCacheStore { @@ -51,9 +53,48 @@ private: std::optional contentEncoding); /** - * Uploads a file to S3 (CompressedSource overload). + * Uploads a file to S3 using multipart upload. + * + * This method is suitable for large files that exceed the multipart threshold. + * It orchestrates the complete multipart upload process: creating the upload, + * splitting the data into parts, uploading each part, and completing the upload. + * If any error occurs, the multipart upload is automatically aborted. + * + * @see https://docs.aws.amazon.com/AmazonS3/latest/userguide/mpuoverview.html */ - void upload(std::string_view path, CompressedSource & source, std::string_view mimeType); + void uploadMultipart( + std::string_view path, + RestartableSource & source, + uint64_t sizeHint, + std::string_view mimeType, + std::optional contentEncoding); + + /** + * A Sink that manages a complete S3 multipart upload lifecycle. + * Creates the upload on construction, buffers and uploads chunks as data arrives, + * and completes or aborts the upload appropriately. + */ + struct MultipartSink : Sink + { + S3BinaryCacheStore & store; + std::string_view path; + std::string uploadId; + std::string::size_type chunkSize; + + std::vector partEtags; + std::string buffer; + + MultipartSink( + S3BinaryCacheStore & store, + std::string_view path, + uint64_t sizeHint, + std::string_view mimeType, + std::optional contentEncoding); + + void operator()(std::string_view data) override; + void finish(); + void uploadChunk(std::string chunk); + }; /** * Creates a multipart upload for large objects to S3. @@ -73,18 +114,13 @@ private: */ std::string uploadPart(std::string_view key, std::string_view uploadId, uint64_t partNumber, std::string data); - struct UploadedPart - { - uint64_t partNumber; - std::string etag; - }; - /** * Completes a multipart upload by combining all uploaded parts. * @see * https://docs.aws.amazon.com/AmazonS3/latest/API/API_CompleteMultipartUpload.html#API_CompleteMultipartUpload_RequestSyntax */ - void completeMultipartUpload(std::string_view key, std::string_view uploadId, std::span parts); + void + completeMultipartUpload(std::string_view key, std::string_view uploadId, std::span partEtags); /** * Abort a multipart upload @@ -92,17 +128,31 @@ private: * @see * https://docs.aws.amazon.com/AmazonS3/latest/API/API_AbortMultipartUpload.html#API_AbortMultipartUpload_RequestSyntax */ - void abortMultipartUpload(std::string_view key, std::string_view uploadId); + void abortMultipartUpload(std::string_view key, std::string_view uploadId) noexcept; }; void S3BinaryCacheStore::upsertFile( const std::string & path, RestartableSource & source, const std::string & mimeType, uint64_t sizeHint) { - if (auto compressionMethod = getCompressionMethod(path)) { - CompressedSource compressed(source, *compressionMethod); - upload(path, compressed, mimeType); - } else { - upload(path, source, sizeHint, mimeType, std::nullopt); + auto doUpload = [&](RestartableSource & src, uint64_t size, std::optional encoding) { + if (s3Config->multipartUpload && size > s3Config->multipartThreshold) { + uploadMultipart(path, src, size, mimeType, encoding); + } else { + upload(path, src, size, mimeType, encoding); + } + }; + + try { + if (auto compressionMethod = getCompressionMethod(path)) { + CompressedSource compressed(source, *compressionMethod); + doUpload(compressed, compressed.size(), compressed.getCompressionMethod()); + } else { + doUpload(source, sizeHint, std::nullopt); + } + } catch (FileTransferError & e) { + UploadToS3 err(e.message()); + err.addTrace({}, "while uploading to S3 binary cache at '%s'", config->cacheUri.to_string()); + throw err; } } @@ -120,18 +170,112 @@ void S3BinaryCacheStore::upload( renderSize(sizeHint), renderSize(AWS_MAX_PART_SIZE)); - try { - HttpBinaryCacheStore::upload(path, source, sizeHint, mimeType, contentEncoding); - } catch (FileTransferError & e) { - UploadToS3 err(e.message()); - err.addTrace({}, "while uploading to S3 binary cache at '%s'", config->cacheUri.to_string()); - throw err; + HttpBinaryCacheStore::upload(path, source, sizeHint, mimeType, contentEncoding); +} + +void S3BinaryCacheStore::uploadMultipart( + std::string_view path, + RestartableSource & source, + uint64_t sizeHint, + std::string_view mimeType, + std::optional contentEncoding) +{ + debug("using S3 multipart upload for '%s' (%d bytes)", path, sizeHint); + MultipartSink sink(*this, path, sizeHint, mimeType, contentEncoding); + source.drainInto(sink); + sink.finish(); +} + +S3BinaryCacheStore::MultipartSink::MultipartSink( + S3BinaryCacheStore & store, + std::string_view path, + uint64_t sizeHint, + std::string_view mimeType, + std::optional contentEncoding) + : store(store) + , path(path) +{ + // Calculate chunk size and estimated parts + chunkSize = store.s3Config->multipartChunkSize; + uint64_t estimatedParts = (sizeHint + chunkSize - 1) / chunkSize; // ceil division + + if (estimatedParts > AWS_MAX_PART_COUNT) { + // Equivalent to ceil(sizeHint / AWS_MAX_PART_COUNT) + uint64_t minChunkSize = (sizeHint + AWS_MAX_PART_COUNT - 1) / AWS_MAX_PART_COUNT; + + if (minChunkSize > AWS_MAX_PART_SIZE) { + throw Error( + "file too large for S3 multipart upload: %s would require chunk size of %s " + "(max %s) to stay within %d part limit", + renderSize(sizeHint), + renderSize(minChunkSize), + renderSize(AWS_MAX_PART_SIZE), + AWS_MAX_PART_COUNT); + } + + warn( + "adjusting S3 multipart chunk size from %s to %s " + "to stay within %d part limit for %s file", + renderSize(store.s3Config->multipartChunkSize.get()), + renderSize(minChunkSize), + AWS_MAX_PART_COUNT, + renderSize(sizeHint)); + + chunkSize = minChunkSize; + estimatedParts = AWS_MAX_PART_COUNT; + } + + buffer.reserve(chunkSize); + partEtags.reserve(estimatedParts); + uploadId = store.createMultipartUpload(path, mimeType, contentEncoding); +} + +void S3BinaryCacheStore::MultipartSink::operator()(std::string_view data) +{ + buffer.append(data); + + while (buffer.size() >= chunkSize) { + // Move entire buffer, extract excess, copy back remainder + auto chunk = std::move(buffer); + auto excessSize = chunk.size() > chunkSize ? chunk.size() - chunkSize : 0; + if (excessSize > 0) { + buffer.resize(excessSize); + std::memcpy(buffer.data(), chunk.data() + chunkSize, excessSize); + } + chunk.resize(std::min(chunkSize, chunk.size())); + uploadChunk(std::move(chunk)); } } -void S3BinaryCacheStore::upload(std::string_view path, CompressedSource & source, std::string_view mimeType) +void S3BinaryCacheStore::MultipartSink::finish() { - upload(path, static_cast(source), source.size(), mimeType, source.getCompressionMethod()); + if (!buffer.empty()) { + uploadChunk(std::move(buffer)); + } + + try { + if (partEtags.empty()) { + throw Error("no data read from stream"); + } + store.completeMultipartUpload(path, uploadId, partEtags); + } catch (Error & e) { + store.abortMultipartUpload(path, uploadId); + e.addTrace({}, "while finishing an S3 multipart upload"); + throw; + } +} + +void S3BinaryCacheStore::MultipartSink::uploadChunk(std::string chunk) +{ + auto partNumber = partEtags.size() + 1; + try { + std::string etag = store.uploadPart(path, uploadId, partNumber, std::move(chunk)); + partEtags.push_back(std::move(etag)); + } catch (Error & e) { + store.abortMultipartUpload(path, uploadId); + e.addTrace({}, "while uploading part %d of an S3 multipart upload", partNumber); + throw; + } } std::string S3BinaryCacheStore::createMultipartUpload( @@ -171,6 +315,10 @@ std::string S3BinaryCacheStore::createMultipartUpload( std::string S3BinaryCacheStore::uploadPart(std::string_view key, std::string_view uploadId, uint64_t partNumber, std::string data) { + if (partNumber > AWS_MAX_PART_COUNT) { + throw Error("S3 multipart upload exceeded %d part limit", AWS_MAX_PART_COUNT); + } + auto req = makeRequest(key); req.method = HttpMethod::PUT; req.setupForS3(); @@ -189,24 +337,29 @@ S3BinaryCacheStore::uploadPart(std::string_view key, std::string_view uploadId, throw Error("S3 UploadPart response missing ETag for part %d", partNumber); } + debug("Part %d uploaded, ETag: %s", partNumber, result.etag); return std::move(result.etag); } -void S3BinaryCacheStore::abortMultipartUpload(std::string_view key, std::string_view uploadId) +void S3BinaryCacheStore::abortMultipartUpload(std::string_view key, std::string_view uploadId) noexcept { - auto req = makeRequest(key); - req.setupForS3(); + try { + auto req = makeRequest(key); + req.setupForS3(); - auto url = req.uri.parsed(); - url.query["uploadId"] = uploadId; - req.uri = VerbatimURL(url); - req.method = HttpMethod::DELETE; + auto url = req.uri.parsed(); + url.query["uploadId"] = uploadId; + req.uri = VerbatimURL(url); + req.method = HttpMethod::DELETE; - getFileTransfer()->enqueueFileTransfer(req).get(); + getFileTransfer()->enqueueFileTransfer(req).get(); + } catch (...) { + ignoreExceptionInDestructor(); + } } void S3BinaryCacheStore::completeMultipartUpload( - std::string_view key, std::string_view uploadId, std::span parts) + std::string_view key, std::string_view uploadId, std::span partEtags) { auto req = makeRequest(key); req.setupForS3(); @@ -217,21 +370,24 @@ void S3BinaryCacheStore::completeMultipartUpload( req.method = HttpMethod::POST; std::string xml = ""; - for (const auto & part : parts) { + for (const auto & [idx, etag] : enumerate(partEtags)) { xml += ""; - xml += "" + std::to_string(part.partNumber) + ""; - xml += "" + part.etag + ""; + // S3 part numbers are 1-indexed, but vector indices are 0-indexed + xml += "" + std::to_string(idx + 1) + ""; + xml += "" + etag + ""; xml += ""; } xml += ""; - debug("S3 CompleteMultipartUpload XML (%d parts): %s", parts.size(), xml); + debug("S3 CompleteMultipartUpload XML (%d parts): %s", partEtags.size(), xml); StringSource payload{xml}; req.data = {payload}; req.mimeType = "text/xml"; getFileTransfer()->enqueueFileTransfer(req).get(); + + debug("S3 multipart upload completed: %d parts uploaded for '%s'", partEtags.size(), key); } StringSet S3BinaryCacheStoreConfig::uriSchemes() From 965d6be7c1962b87d47eb229153e2b5685c64739 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Thu, 30 Oct 2025 19:22:06 +0000 Subject: [PATCH 130/213] tests(nixos/s3-binary-cache-store): enable multipart --- tests/nixos/s3-binary-cache-store.nix | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/nixos/s3-binary-cache-store.nix b/tests/nixos/s3-binary-cache-store.nix index a07375489..a2ba1dae6 100644 --- a/tests/nixos/s3-binary-cache-store.nix +++ b/tests/nixos/s3-binary-cache-store.nix @@ -794,10 +794,9 @@ in test_compression_disabled() test_nix_prefetch_url() test_versioned_urls() - # FIXME: enable when multipart fully lands - # test_multipart_upload_basic() - # test_multipart_threshold() - # test_multipart_with_log_compression() + test_multipart_upload_basic() + test_multipart_threshold() + test_multipart_with_log_compression() print("\n" + "="*80) print("✓ All S3 Binary Cache Store Tests Passed!") From 3448d4fa4c7fca0d62487fa6ac0dfded72ff18de Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Sat, 25 Oct 2025 05:05:10 +0000 Subject: [PATCH 131/213] docs(rl-next/s3-curl-implementation): update with multipart uploads --- doc/manual/rl-next/s3-curl-implementation.md | 24 ++++++++++++++++---- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/doc/manual/rl-next/s3-curl-implementation.md b/doc/manual/rl-next/s3-curl-implementation.md index fab010010..2647ac581 100644 --- a/doc/manual/rl-next/s3-curl-implementation.md +++ b/doc/manual/rl-next/s3-curl-implementation.md @@ -1,6 +1,6 @@ --- synopsis: "Improved S3 binary cache support via HTTP" -prs: [13823, 14026, 14120, 14131, 14135, 14144, 14170, 14190, 14198, 14206, 14209, 14222, 14223, 13752] +prs: [13752, 13823, 14026, 14120, 14131, 14135, 14144, 14170, 14190, 14198, 14206, 14209, 14222, 14223, 14330, 14333, 14335, 14336, 14337, 14350, 14356, 14357, 14374, 14375, 14376, 14377, 14391, 14393, 14420, 14421] issues: [13084, 12671, 11748, 12403] --- @@ -18,9 +18,23 @@ improvements: The new implementation requires curl >= 7.75.0 and `aws-crt-cpp` for credential management. -All existing S3 URL formats and parameters remain supported, with the notable -exception of multi-part uploads, which are no longer supported. +All existing S3 URL formats and parameters remain supported, however the store +settings for configuring multipart uploads have changed: + +- **`multipart-upload`** (default: `false`): Enable multipart uploads for large + files. When enabled, files exceeding the multipart threshold will be uploaded + in multiple parts. + +- **`multipart-threshold`** (default: `100 MiB`): Minimum file size for using + multipart uploads. Files smaller than this will use regular PUT requests. + Only takes effect when `multipart-upload` is enabled. + +- **`multipart-chunk-size`** (default: `5 MiB`): Size of each part in multipart + uploads. Must be at least 5 MiB (AWS S3 requirement). Larger chunk sizes + reduce the number of requests but use more memory. + +- **`buffer-size`**: Has been replaced by `multipart-chunk-size` and is now an alias to it. Note that this change also means Nix now supports S3 binary cache stores even -if build without `aws-crt-cpp`, but only for public buckets which do not -require auth. +if built without `aws-crt-cpp`, but only for public buckets which do not +require authentication. From 81a2809a526e4fcc887d3178c8b48646320a25e8 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Mon, 3 Nov 2025 12:01:55 +0100 Subject: [PATCH 132/213] Apply updated nixfmt --- doc/manual/generate-store-types.nix | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/manual/generate-store-types.nix b/doc/manual/generate-store-types.nix index a03d3d621..4e06c7f60 100644 --- a/doc/manual/generate-store-types.nix +++ b/doc/manual/generate-store-types.nix @@ -24,9 +24,9 @@ let in concatStringsSep "\n" (map showEntry storesList); - "index.md" = - replaceStrings [ "@store-types@" ] [ index ] - (readFile ./source/store/types/index.md.in); + "index.md" = replaceStrings [ "@store-types@" ] [ index ] ( + readFile ./source/store/types/index.md.in + ); tableOfContents = let From bd420928730cf268ffe33071292e044118a0c57c Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 1 Nov 2025 16:54:22 -0400 Subject: [PATCH 133/213] Use less `c_str()` in the evaluator, and other cleanups It is better to avoid null termination for performance and memory safety, wherever possible. These are good cleanups extracted from the Pascal String work that we can land by themselves first, shrinking the diff in that PR. Co-Authored-By: Aspen Smith Co-Authored-By: Sergei Zimmerman --- src/libexpr-c/nix_api_value.cc | 2 +- .../include/nix/expr/tests/libexpr.hh | 2 +- src/libexpr-tests/value/value.cc | 18 +++++++++++++++ src/libexpr/eval-cache.cc | 6 ++--- src/libexpr/eval.cc | 23 +++++++++++-------- src/libexpr/get-drvs.cc | 8 +++---- src/libexpr/include/nix/expr/get-drvs.hh | 2 +- src/libexpr/include/nix/expr/value.hh | 7 +++++- src/libexpr/nixexpr.cc | 2 +- src/libexpr/primops.cc | 6 ++--- src/libexpr/value-to-json.cc | 2 +- src/libexpr/value-to-xml.cc | 6 ++--- src/libflake/flake.cc | 6 ++--- src/libutil-c/nix_api_util.cc | 4 ++-- src/libutil-c/nix_api_util_internal.h | 2 +- src/nix/nix-env/nix-env.cc | 6 ++--- 16 files changed, 64 insertions(+), 38 deletions(-) diff --git a/src/libexpr-c/nix_api_value.cc b/src/libexpr-c/nix_api_value.cc index 3b8c7dd04..e231c36f4 100644 --- a/src/libexpr-c/nix_api_value.cc +++ b/src/libexpr-c/nix_api_value.cc @@ -235,7 +235,7 @@ nix_get_string(nix_c_context * context, const nix_value * value, nix_get_string_ try { auto & v = check_value_in(value); assert(v.type() == nix::nString); - call_nix_get_string_callback(v.c_str(), callback, user_data); + call_nix_get_string_callback(v.string_view(), callback, user_data); } NIXC_CATCH_ERRS } diff --git a/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh b/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh index a1320e14a..daae00802 100644 --- a/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh +++ b/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh @@ -106,7 +106,7 @@ MATCHER_P(IsStringEq, s, fmt("The string is equal to \"%1%\"", s)) if (arg.type() != nString) { return false; } - return std::string_view(arg.c_str()) == s; + return arg.string_view() == s; } MATCHER_P(IsIntEq, v, fmt("The string is equal to \"%1%\"", v)) diff --git a/src/libexpr-tests/value/value.cc b/src/libexpr-tests/value/value.cc index 63501dd49..229e449db 100644 --- a/src/libexpr-tests/value/value.cc +++ b/src/libexpr-tests/value/value.cc @@ -1,6 +1,7 @@ #include "nix/expr/value.hh" #include "nix/store/tests/libstore.hh" +#include namespace nix { @@ -22,4 +23,21 @@ TEST_F(ValueTest, vInt) ASSERT_EQ(true, vInt.isValid()); } +TEST_F(ValueTest, staticString) +{ + Value vStr1; + Value vStr2; + vStr1.mkStringNoCopy("foo"); + vStr2.mkStringNoCopy("foo"); + + auto sd1 = vStr1.string_view(); + auto sd2 = vStr2.string_view(); + + // The strings should be the same + ASSERT_EQ(sd1, sd2); + + // The strings should also be backed by the same (static) allocation + ASSERT_EQ(sd1.data(), sd2.data()); +} + } // namespace nix diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 480ca72c7..de74d2143 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -406,7 +406,7 @@ Value & AttrCursor::forceValue() if (root->db && (!cachedValue || std::get_if(&cachedValue->second))) { if (v.type() == nString) - cachedValue = {root->db->setString(getKey(), v.c_str(), v.context()), string_t{v.c_str(), {}}}; + cachedValue = {root->db->setString(getKey(), v.string_view(), v.context()), string_t{v.string_view(), {}}}; else if (v.type() == nPath) { auto path = v.path().path; cachedValue = {root->db->setString(getKey(), path.abs()), string_t{path.abs(), {}}}; @@ -541,7 +541,7 @@ std::string AttrCursor::getString() if (v.type() != nString && v.type() != nPath) root->state.error("'%s' is not a string but %s", getAttrPathStr(), showType(v)).debugThrow(); - return v.type() == nString ? v.c_str() : v.path().to_string(); + return v.type() == nString ? std::string(v.string_view()) : v.path().to_string(); } string_t AttrCursor::getStringWithContext() @@ -580,7 +580,7 @@ string_t AttrCursor::getStringWithContext() if (v.type() == nString) { NixStringContext context; copyContext(v, context); - return {v.c_str(), std::move(context)}; + return {std::string{v.string_view()}, std::move(context)}; } else if (v.type() == nPath) return {v.path().to_string(), {}}; else diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 873b88986..e2687148b 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2366,12 +2366,15 @@ BackedStringView EvalState::coerceToString( } if (v.type() == nPath) { - return !canonicalizePath && !copyToStore - ? // FIXME: hack to preserve path literals that end in a - // slash, as in /foo/${x}. - v.pathStr() - : copyToStore ? store->printStorePath(copyPathToStore(context, v.path())) - : std::string(v.path().path.abs()); + if (!canonicalizePath && !copyToStore) { + // FIXME: hack to preserve path literals that end in a + // slash, as in /foo/${x}. + return v.pathStrView(); + } else if (copyToStore) { + return store->printStorePath(copyPathToStore(context, v.path())); + } else { + return std::string{v.path().path.abs()}; + } } if (v.type() == nAttrs) { @@ -2624,7 +2627,7 @@ void EvalState::assertEqValues(Value & v1, Value & v2, const PosIdx pos, std::st return; case nString: - if (strcmp(v1.c_str(), v2.c_str()) != 0) { + if (v1.string_view() != v2.string_view()) { error( "string '%s' is not equal to string '%s'", ValuePrinter(*this, v1, errorPrintOptions), @@ -2641,7 +2644,7 @@ void EvalState::assertEqValues(Value & v1, Value & v2, const PosIdx pos, std::st ValuePrinter(*this, v2, errorPrintOptions)) .debugThrow(); } - if (strcmp(v1.pathStr(), v2.pathStr()) != 0) { + if (v1.pathStrView() != v2.pathStrView()) { error( "path '%s' is not equal to path '%s'", ValuePrinter(*this, v1, errorPrintOptions), @@ -2807,12 +2810,12 @@ bool EvalState::eqValues(Value & v1, Value & v2, const PosIdx pos, std::string_v return v1.boolean() == v2.boolean(); case nString: - return strcmp(v1.c_str(), v2.c_str()) == 0; + return v1.string_view() == v2.string_view(); case nPath: return // FIXME: compare accessors by their fingerprint. - v1.pathAccessor() == v2.pathAccessor() && strcmp(v1.pathStr(), v2.pathStr()) == 0; + v1.pathAccessor() == v2.pathAccessor() && v1.pathStrView() == v2.pathStrView(); case nNull: return true; diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc index 5a7281b2b..c4a2b00af 100644 --- a/src/libexpr/get-drvs.cc +++ b/src/libexpr/get-drvs.cc @@ -168,7 +168,7 @@ PackageInfo::Outputs PackageInfo::queryOutputs(bool withPaths, bool onlyOutputsT for (auto elem : outTI->listView()) { if (elem->type() != nString) throw errMsg; - auto out = outputs.find(elem->c_str()); + auto out = outputs.find(elem->string_view()); if (out == outputs.end()) throw errMsg; result.insert(*out); @@ -245,7 +245,7 @@ std::string PackageInfo::queryMetaString(const std::string & name) Value * v = queryMeta(name); if (!v || v->type() != nString) return ""; - return v->c_str(); + return std::string{v->string_view()}; } NixInt PackageInfo::queryMetaInt(const std::string & name, NixInt def) @@ -258,7 +258,7 @@ NixInt PackageInfo::queryMetaInt(const std::string & name, NixInt def) if (v->type() == nString) { /* Backwards compatibility with before we had support for integer meta fields. */ - if (auto n = string2Int(v->c_str())) + if (auto n = string2Int(v->string_view())) return NixInt{*n}; } return def; @@ -274,7 +274,7 @@ NixFloat PackageInfo::queryMetaFloat(const std::string & name, NixFloat def) if (v->type() == nString) { /* Backwards compatibility with before we had support for float meta fields. */ - if (auto n = string2Float(v->c_str())) + if (auto n = string2Float(v->string_view())) return *n; } return def; diff --git a/src/libexpr/include/nix/expr/get-drvs.hh b/src/libexpr/include/nix/expr/get-drvs.hh index 3d42188bf..4beccabe2 100644 --- a/src/libexpr/include/nix/expr/get-drvs.hh +++ b/src/libexpr/include/nix/expr/get-drvs.hh @@ -15,7 +15,7 @@ namespace nix { struct PackageInfo { public: - typedef std::map> Outputs; + typedef std::map, std::less<>> Outputs; private: EvalState * state; diff --git a/src/libexpr/include/nix/expr/value.hh b/src/libexpr/include/nix/expr/value.hh index 22d85dc99..706a4fe3f 100644 --- a/src/libexpr/include/nix/expr/value.hh +++ b/src/libexpr/include/nix/expr/value.hh @@ -1109,7 +1109,7 @@ public: std::string_view string_view() const noexcept { - return std::string_view(getStorage().c_str); + return std::string_view{getStorage().c_str}; } const char * c_str() const noexcept @@ -1177,6 +1177,11 @@ public: return getStorage().path; } + std::string_view pathStrView() const noexcept + { + return std::string_view{getStorage().path}; + } + SourceAccessor * pathAccessor() const noexcept { return getStorage().accessor; diff --git a/src/libexpr/nixexpr.cc b/src/libexpr/nixexpr.cc index b183f1bbf..a1d1b7e4b 100644 --- a/src/libexpr/nixexpr.cc +++ b/src/libexpr/nixexpr.cc @@ -45,7 +45,7 @@ void ExprString::show(const SymbolTable & symbols, std::ostream & str) const void ExprPath::show(const SymbolTable & symbols, std::ostream & str) const { - str << v.pathStr(); + str << v.pathStrView(); } void ExprVar::show(const SymbolTable & symbols, std::ostream & str) const diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 04196bc1f..96e79fedd 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -691,12 +691,12 @@ struct CompareValues case nFloat: return v1->fpoint() < v2->fpoint(); case nString: - return strcmp(v1->c_str(), v2->c_str()) < 0; + return v1->string_view() < v2->string_view(); case nPath: // Note: we don't take the accessor into account // since it's not obvious how to compare them in a // reproducible way. - return strcmp(v1->pathStr(), v2->pathStr()) < 0; + return v1->pathStrView() < v2->pathStrView(); case nList: // Lexicographic comparison for (size_t i = 0;; i++) { @@ -2930,7 +2930,7 @@ static void prim_attrNames(EvalState & state, const PosIdx pos, Value ** args, V for (const auto & [n, i] : enumerate(*args[0]->attrs())) list[n] = Value::toPtr(state.symbols[i.name]); - std::sort(list.begin(), list.end(), [](Value * v1, Value * v2) { return strcmp(v1->c_str(), v2->c_str()) < 0; }); + std::sort(list.begin(), list.end(), [](Value * v1, Value * v2) { return v1->string_view() < v2->string_view(); }); v.mkList(list); } diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc index 2cd853f60..03b14b83c 100644 --- a/src/libexpr/value-to-json.cc +++ b/src/libexpr/value-to-json.cc @@ -33,7 +33,7 @@ json printValueAsJSON( case nString: copyContext(v, context); - out = v.c_str(); + out = v.string_view(); break; case nPath: diff --git a/src/libexpr/value-to-xml.cc b/src/libexpr/value-to-xml.cc index d5959e894..0a7a334f4 100644 --- a/src/libexpr/value-to-xml.cc +++ b/src/libexpr/value-to-xml.cc @@ -82,7 +82,7 @@ static void printValueAsXML( case nString: /* !!! show the context? */ copyContext(v, context); - doc.writeEmptyElement("string", singletonAttrs("value", v.c_str())); + doc.writeEmptyElement("string", singletonAttrs("value", v.string_view())); break; case nPath: @@ -102,14 +102,14 @@ static void printValueAsXML( if (strict) state.forceValue(*a->value, a->pos); if (a->value->type() == nString) - xmlAttrs["drvPath"] = drvPath = a->value->c_str(); + xmlAttrs["drvPath"] = drvPath = a->value->string_view(); } if (auto a = v.attrs()->get(state.s.outPath)) { if (strict) state.forceValue(*a->value, a->pos); if (a->value->type() == nString) - xmlAttrs["outPath"] = a->value->c_str(); + xmlAttrs["outPath"] = a->value->string_view(); } XMLOpenElement _(doc, "derivation", xmlAttrs); diff --git a/src/libflake/flake.cc b/src/libflake/flake.cc index 42385712c..dc60dbf08 100644 --- a/src/libflake/flake.cc +++ b/src/libflake/flake.cc @@ -97,7 +97,7 @@ static void parseFlakeInputAttr(EvalState & state, const Attr & attr, fetchers:: #pragma GCC diagnostic ignored "-Wswitch-enum" switch (attr.value->type()) { case nString: - attrs.emplace(state.symbols[attr.name], attr.value->c_str()); + attrs.emplace(state.symbols[attr.name], std::string(attr.value->string_view())); break; case nBool: attrs.emplace(state.symbols[attr.name], Explicit{attr.value->boolean()}); @@ -177,7 +177,7 @@ static FlakeInput parseFlakeInput( parseFlakeInputs(state, attr.value, attr.pos, lockRootAttrPath, flakeDir, false).first; } else if (attr.name == sFollows) { expectType(state, nString, *attr.value, attr.pos); - auto follows(parseInputAttrPath(attr.value->c_str())); + auto follows(parseInputAttrPath(attr.value->string_view())); follows.insert(follows.begin(), lockRootAttrPath.begin(), lockRootAttrPath.end()); input.follows = follows; } else @@ -264,7 +264,7 @@ static Flake readFlake( if (auto description = vInfo.attrs()->get(state.s.description)) { expectType(state, nString, *description->value, description->pos); - flake.description = description->value->c_str(); + flake.description = description->value->string_view(); } auto sInputs = state.symbols.create("inputs"); diff --git a/src/libutil-c/nix_api_util.cc b/src/libutil-c/nix_api_util.cc index 3903823aa..5934e8479 100644 --- a/src/libutil-c/nix_api_util.cc +++ b/src/libutil-c/nix_api_util.cc @@ -153,9 +153,9 @@ nix_err nix_err_code(const nix_c_context * read_context) } // internal -nix_err call_nix_get_string_callback(const std::string str, nix_get_string_callback callback, void * user_data) +nix_err call_nix_get_string_callback(const std::string_view str, nix_get_string_callback callback, void * user_data) { - callback(str.c_str(), str.size(), user_data); + callback(str.data(), str.size(), user_data); return NIX_OK; } diff --git a/src/libutil-c/nix_api_util_internal.h b/src/libutil-c/nix_api_util_internal.h index 92bb9c1d2..e4c5e93bb 100644 --- a/src/libutil-c/nix_api_util_internal.h +++ b/src/libutil-c/nix_api_util_internal.h @@ -32,7 +32,7 @@ nix_err nix_context_error(nix_c_context * context); * @return NIX_OK if there were no errors. * @see nix_get_string_callback */ -nix_err call_nix_get_string_callback(const std::string str, nix_get_string_callback callback, void * user_data); +nix_err call_nix_get_string_callback(const std::string_view str, nix_get_string_callback callback, void * user_data); #define NIXC_CATCH_ERRS \ catch (...) \ diff --git a/src/nix/nix-env/nix-env.cc b/src/nix/nix-env/nix-env.cc index 01c8ccf4b..2a0984d18 100644 --- a/src/nix/nix-env/nix-env.cc +++ b/src/nix/nix-env/nix-env.cc @@ -1228,7 +1228,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) else { if (v->type() == nString) { attrs2["type"] = "string"; - attrs2["value"] = v->c_str(); + attrs2["value"] = v->string_view(); xml.writeEmptyElement("meta", attrs2); } else if (v->type() == nInt) { attrs2["type"] = "int"; @@ -1249,7 +1249,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) if (elem->type() != nString) continue; XMLAttrs attrs3; - attrs3["value"] = elem->c_str(); + attrs3["value"] = elem->string_view(); xml.writeEmptyElement("string", attrs3); } } else if (v->type() == nAttrs) { @@ -1260,7 +1260,7 @@ static void opQuery(Globals & globals, Strings opFlags, Strings opArgs) continue; XMLAttrs attrs3; attrs3["type"] = globals.state->symbols[i.name]; - attrs3["value"] = i.value->c_str(); + attrs3["value"] = i.value->string_view(); xml.writeEmptyElement("string", attrs3); } } From 2f6c865e25ee41ec2ba5b8f087a29512ad7aff82 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 3 Nov 2025 13:22:28 +0100 Subject: [PATCH 134/213] getAccessorFromCommit(): Remove superfluous infoAttrs variable --- src/libfetchers/git.cc | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index c8311c17f..710d2f315 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -735,13 +735,10 @@ struct GitInputScheme : InputScheme auto rev = *input.getRev(); - Attrs infoAttrs({ - {"rev", rev.gitRev()}, - {"lastModified", getLastModified(*input.settings, repoInfo, repoDir, rev)}, - }); + input.attrs.insert_or_assign("lastModified", getLastModified(*input.settings, repoInfo, repoDir, rev)); if (!getShallowAttr(input)) - infoAttrs.insert_or_assign("revCount", getRevCount(*input.settings, repoInfo, repoDir, rev)); + input.attrs.insert_or_assign("revCount", getRevCount(*input.settings, repoInfo, repoDir, rev)); printTalkative("using revision %s of repo '%s'", rev.gitRev(), repoInfo.locationToArg()); @@ -797,9 +794,6 @@ struct GitInputScheme : InputScheme } assert(!origRev || origRev == rev); - if (!getShallowAttr(input)) - input.attrs.insert_or_assign("revCount", getIntAttr(infoAttrs, "revCount")); - input.attrs.insert_or_assign("lastModified", getIntAttr(infoAttrs, "lastModified")); return {accessor, std::move(input)}; } From 4a0ccc89d9721fd41dc66f74b475f39df60ed20f Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 3 Nov 2025 13:58:23 +0100 Subject: [PATCH 135/213] ThreadPool::enqueue(): Use move semantics This avoids a superfluous copy of the work item. --- src/libutil/include/nix/util/thread-pool.hh | 2 +- src/libutil/thread-pool.cc | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libutil/include/nix/util/thread-pool.hh b/src/libutil/include/nix/util/thread-pool.hh index 811c03d88..a07354146 100644 --- a/src/libutil/include/nix/util/thread-pool.hh +++ b/src/libutil/include/nix/util/thread-pool.hh @@ -36,7 +36,7 @@ public: /** * Enqueue a function to be executed by the thread pool. */ - void enqueue(const work_t & t); + void enqueue(work_t t); /** * Execute work items until the queue is empty. diff --git a/src/libutil/thread-pool.cc b/src/libutil/thread-pool.cc index b7740bc3e..24bdeef86 100644 --- a/src/libutil/thread-pool.cc +++ b/src/libutil/thread-pool.cc @@ -41,12 +41,12 @@ void ThreadPool::shutdown() thr.join(); } -void ThreadPool::enqueue(const work_t & t) +void ThreadPool::enqueue(work_t t) { auto state(state_.lock()); if (quit) throw ThreadPoolShutDown("cannot enqueue a work item while the thread pool is shutting down"); - state->pending.push(t); + state->pending.push(std::move(t)); /* Note: process() also executes items, so count it as a worker. */ if (state->pending.size() > state->workers.size() + 1 && state->workers.size() + 1 < maxThreads) state->workers.emplace_back(&ThreadPool::doWork, this, false); From 53b4ea6c85e2d000b8badc923066866ba8de324c Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Mon, 27 Oct 2025 11:26:46 -0700 Subject: [PATCH 136/213] Add documentation for NAR spec in kaitai * Add a new flake check * Add unit tests * Add Kaitai spec * Updated documentation --- doc/manual/source/SUMMARY.md.in | 2 +- .../{nix-archive.md => nix-archive/index.md} | 12 ++ .../source/protocols/nix-archive/nar.ksy | 169 ++++++++++++++++++ .../file-system-object/content-address.md | 2 +- flake.nix | 4 + meson.build | 1 + packaging/components.nix | 5 + packaging/dev-shell.nix | 4 +- packaging/hydra.nix | 1 + src/kaitai-struct-checks/.version | 1 + src/kaitai-struct-checks/meson.build | 77 ++++++++ src/kaitai-struct-checks/nar.ksy | 1 + src/kaitai-struct-checks/nars | 1 + .../nix-meson-build-support | 1 + src/kaitai-struct-checks/package.nix | 75 ++++++++ src/kaitai-struct-checks/test-parse-nar.cc | 48 +++++ src/nix/nar.md | 2 +- 17 files changed, 402 insertions(+), 4 deletions(-) rename doc/manual/source/protocols/{nix-archive.md => nix-archive/index.md} (73%) create mode 100644 doc/manual/source/protocols/nix-archive/nar.ksy create mode 120000 src/kaitai-struct-checks/.version create mode 100644 src/kaitai-struct-checks/meson.build create mode 120000 src/kaitai-struct-checks/nar.ksy create mode 120000 src/kaitai-struct-checks/nars create mode 120000 src/kaitai-struct-checks/nix-meson-build-support create mode 100644 src/kaitai-struct-checks/package.nix create mode 100644 src/kaitai-struct-checks/test-parse-nar.cc diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 7f3b1a103..287dff872 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -125,7 +125,7 @@ - [Deriving Path](protocols/json/deriving-path.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) - [Store Path Specification](protocols/store-path.md) - - [Nix Archive (NAR) Format](protocols/nix-archive.md) + - [Nix Archive (NAR) Format](protocols/nix-archive/index.md) - [Derivation "ATerm" file format](protocols/derivation-aterm.md) - [C API](c-api.md) - [Glossary](glossary.md) diff --git a/doc/manual/source/protocols/nix-archive.md b/doc/manual/source/protocols/nix-archive/index.md similarity index 73% rename from doc/manual/source/protocols/nix-archive.md rename to doc/manual/source/protocols/nix-archive/index.md index 02a8dd464..4d25f63e2 100644 --- a/doc/manual/source/protocols/nix-archive.md +++ b/doc/manual/source/protocols/nix-archive/index.md @@ -41,3 +41,15 @@ The `str` function / parameterized rule is defined as follows: - `int(n)` = the 64-bit little endian representation of the number `n` - `pad(s)` = the byte sequence `s`, padded with 0s to a multiple of 8 byte + +## Kaitai Struct Specification + +The Nix Archive (NAR) format is also formally described using [Kaitai Struct](https://kaitai.io/), an Interface Description Language (IDL) for defining binary data structures. + +> Kaitai Struct provides a language-agnostic, machine-readable specification that can be compiled into parsers for various programming languages (e.g., C++, Python, Java, Rust). + +```yaml +{{#include nar.ksy}} +``` + +The source of the spec can be found [here](https://github.com/nixos/nix/blob/master/src/nix-manual/source/protocols/nix-archive/nar.ksy). Contributions and improvements to the spec are welcomed. \ No newline at end of file diff --git a/doc/manual/source/protocols/nix-archive/nar.ksy b/doc/manual/source/protocols/nix-archive/nar.ksy new file mode 100644 index 000000000..1cad09097 --- /dev/null +++ b/doc/manual/source/protocols/nix-archive/nar.ksy @@ -0,0 +1,169 @@ +meta: + id: nix_nar + title: Nix Archive (NAR) + file-extension: nar + endian: le +doc: | + Nix Archive (NAR) format. A simple, reproducible binary archive + format used by the Nix package manager to serialize file system objects. +doc-ref: 'https://nixos.org/manual/nix/stable/command-ref/nix-store.html#nar-format' + +seq: + - id: magic + type: padded_str + doc: "Magic string, must be 'nix-archive-1'." + valid: + expr: _.body == 'nix-archive-1' + - id: root_node + type: node + doc: "The root of the archive, which is always a single node." + +types: + padded_str: + doc: | + A string, prefixed with its length (u8le) and + padded with null bytes to the next 8-byte boundary. + seq: + - id: len_str + type: u8 + - id: body + type: str + size: len_str + encoding: 'ascii' + - id: padding + size: (8 - (len_str % 8)) % 8 + + node: + doc: "A single filesystem node (file, directory, or symlink)." + seq: + - id: open_paren + type: padded_str + doc: "Must be '(', a token starting the node definition." + valid: + expr: _.body == '(' + - id: type_key + type: padded_str + doc: "Must be 'type'." + valid: + expr: _.body == 'type' + - id: type_val + type: padded_str + doc: "The type of the node: 'regular', 'directory', or 'symlink'." + - id: body + type: + switch-on: type_val.body + cases: + "'directory'": type_directory + "'regular'": type_regular + "'symlink'": type_symlink + - id: close_paren + type: padded_str + valid: + expr: _.body == ')' + if: "type_val.body != 'directory'" + doc: "Must be ')', a token ending the node definition." + + type_directory: + doc: "A directory node, containing a list of entries. Entries must be ordered by their names." + seq: + - id: entries + type: dir_entry + repeat: until + repeat-until: _.kind.body == ')' + types: + dir_entry: + doc: "A single entry within a directory, or a terminator." + seq: + - id: kind + type: padded_str + valid: + expr: _.body == 'entry' or _.body == ')' + doc: "Must be 'entry' (for a child node) or '' (for terminator)." + - id: open_paren + type: padded_str + valid: + expr: _.body == '(' + if: 'kind.body == "entry"' + - id: name_key + type: padded_str + valid: + expr: _.body == 'name' + if: 'kind.body == "entry"' + - id: name + type: padded_str + if: 'kind.body == "entry"' + - id: node_key + type: padded_str + valid: + expr: _.body == 'node' + if: 'kind.body == "entry"' + - id: node + type: node + if: 'kind.body == "entry"' + doc: "The child node, present only if kind is 'entry'." + - id: close_paren + type: padded_str + valid: + expr: _.body == ')' + if: 'kind.body == "entry"' + instances: + is_terminator: + value: kind.body == ')' + + type_regular: + doc: "A regular file node." + seq: + # Read attributes (like 'executable') until we hit 'contents' + - id: attributes + type: reg_attribute + repeat: until + repeat-until: _.key.body == "contents" + # After the 'contents' token, read the file data + - id: file_data + type: file_content + instances: + is_executable: + value: 'attributes[0].key.body == "executable"' + doc: "True if the file has the 'executable' attribute." + types: + reg_attribute: + doc: "An attribute of the file, e.g., 'executable' or 'contents'." + seq: + - id: key + type: padded_str + doc: "Attribute key, e.g., 'executable' or 'contents'." + valid: + expr: _.body == 'executable' or _.body == 'contents' + - id: value + type: padded_str + if: 'key.body == "executable"' + valid: + expr: _.body == '' + doc: "Must be '' if key is 'executable'." + file_content: + doc: "The raw data of the file, prefixed by length." + seq: + - id: len_contents + type: u8 + # # This relies on the property of instances that they are lazily evaluated and cached. + - size: 0 + if: nar_offset < 0 + - id: contents + size: len_contents + - id: padding + size: (8 - (len_contents % 8)) % 8 + instances: + nar_offset: + value: _io.pos + + type_symlink: + doc: "A symbolic link node." + seq: + - id: target_key + type: padded_str + doc: "Must be 'target'." + valid: + expr: _.body == 'target' + - id: target_val + type: padded_str + doc: "The destination path of the symlink." diff --git a/doc/manual/source/store/file-system-object/content-address.md b/doc/manual/source/store/file-system-object/content-address.md index 04a1021f1..5685de03e 100644 --- a/doc/manual/source/store/file-system-object/content-address.md +++ b/doc/manual/source/store/file-system-object/content-address.md @@ -46,7 +46,7 @@ be many different serialisations. For these reasons, Nix has its very own archive format—the Nix Archive (NAR) format, which is carefully designed to avoid the problems described above. -The exact specification of the Nix Archive format is in [specified here](../../protocols/nix-archive.md). +The exact specification of the Nix Archive format is in [specified here](../../protocols/nix-archive/index.md). ## Content addressing File System Objects beyond a single serialisation pass diff --git a/flake.nix b/flake.nix index e25722d46..a70617b74 100644 --- a/flake.nix +++ b/flake.nix @@ -417,6 +417,10 @@ supportsCross = false; }; + "nix-kaitai-struct-checks" = { + supportsCross = false; + }; + "nix-perl-bindings" = { supportsCross = false; }; diff --git a/meson.build b/meson.build index f3158ea6d..c493dfad6 100644 --- a/meson.build +++ b/meson.build @@ -61,3 +61,4 @@ if get_option('unit-tests') endif subproject('nix-functional-tests') subproject('json-schema-checks') +subproject('kaitai-struct-checks') diff --git a/packaging/components.nix b/packaging/components.nix index f9d7b109a..bbd6208b9 100644 --- a/packaging/components.nix +++ b/packaging/components.nix @@ -443,6 +443,11 @@ in */ nix-json-schema-checks = callPackage ../src/json-schema-checks/package.nix { }; + /** + Kaitai struct schema validation checks + */ + nix-kaitai-struct-checks = callPackage ../src/kaitai-struct-checks/package.nix { }; + nix-perl-bindings = callPackage ../src/perl/package.nix { }; /** diff --git a/packaging/dev-shell.nix b/packaging/dev-shell.nix index 153e7a3eb..ea12e079f 100644 --- a/packaging/dev-shell.nix +++ b/packaging/dev-shell.nix @@ -109,6 +109,7 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( ++ pkgs.nixComponents2.nix-external-api-docs.nativeBuildInputs ++ pkgs.nixComponents2.nix-functional-tests.externalNativeBuildInputs ++ pkgs.nixComponents2.nix-json-schema-checks.externalNativeBuildInputs + ++ pkgs.nixComponents2.nix-kaitai-struct-checks.externalNativeBuildInputs ++ lib.optional ( !buildCanExecuteHost # Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479 @@ -148,6 +149,7 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( ++ pkgs.nixComponents2.nix-expr.externalPropagatedBuildInputs ++ pkgs.nixComponents2.nix-cmd.buildInputs ++ lib.optionals havePerl pkgs.nixComponents2.nix-perl-bindings.externalBuildInputs - ++ lib.optional havePerl pkgs.perl; + ++ lib.optional havePerl pkgs.perl + ++ pkgs.nixComponents2.nix-kaitai-struct-checks.externalBuildInputs; } ) diff --git a/packaging/hydra.nix b/packaging/hydra.nix index 3bbb6c15b..67e2c0dfd 100644 --- a/packaging/hydra.nix +++ b/packaging/hydra.nix @@ -63,6 +63,7 @@ let "nix-cli" "nix-functional-tests" "nix-json-schema-checks" + "nix-kaitai-struct-checks" ] ++ lib.optionals enableBindings [ "nix-perl-bindings" diff --git a/src/kaitai-struct-checks/.version b/src/kaitai-struct-checks/.version new file mode 120000 index 000000000..b7badcd0c --- /dev/null +++ b/src/kaitai-struct-checks/.version @@ -0,0 +1 @@ +../../.version \ No newline at end of file diff --git a/src/kaitai-struct-checks/meson.build b/src/kaitai-struct-checks/meson.build new file mode 100644 index 000000000..f705a6744 --- /dev/null +++ b/src/kaitai-struct-checks/meson.build @@ -0,0 +1,77 @@ +# Run with: +# meson test --suite kaitai-struct +# Run with: (without shell / configure) +# nix build .#nix-kaitai-struct-checks + +project( + 'nix-kaitai-struct-checks', + 'cpp', + version : files('.version'), + default_options : [ + 'cpp_std=c++23', + # TODO(Qyriad): increase the warning level + 'warning_level=1', + 'errorlogs=true', # Please print logs for tests that fail + ], + meson_version : '>= 1.1', + license : 'LGPL-2.1-or-later', +) + +kaitai_runtime_dep = dependency('kaitai-struct-cpp-stl-runtime', required : true) +gtest_dep = dependency('gtest') +gtest_main_dep = dependency('gtest_main', required : true) + +# Find the Kaitai Struct compiler +ksc = find_program('ksc', required : true) + +kaitai_generated_srcs = custom_target( + 'kaitai-generated-sources', + input : [ 'nar.ksy' ], + output : [ 'nix_nar.cpp', 'nix_nar.h' ], + command : [ + ksc, + '@INPUT@', + '--target', 'cpp_stl', + '--outdir', + meson.current_build_dir(), + ], +) + +nar_kaitai_lib = library( + 'nix-nar-kaitai-lib', + kaitai_generated_srcs, + dependencies : [ kaitai_runtime_dep ], + install : true, +) + +nar_kaitai_dep = declare_dependency( + link_with : nar_kaitai_lib, + sources : kaitai_generated_srcs[1], +) + +# The nar directory is a committed symlink to the actual nars location +nars_dir = meson.current_source_dir() / 'nars' + +# Get all example files +nars = [ + 'dot.nar', +] + +test_deps = [ + nar_kaitai_dep, + kaitai_runtime_dep, + gtest_main_dep, +] + +this_exe = executable( + meson.project_name(), + 'test-parse-nar.cc', + dependencies : test_deps, +) + +test( + meson.project_name(), + this_exe, + env : [ 'NIX_NARS_DIR=' + nars_dir ], + protocol : 'gtest', +) diff --git a/src/kaitai-struct-checks/nar.ksy b/src/kaitai-struct-checks/nar.ksy new file mode 120000 index 000000000..c3a79a3b6 --- /dev/null +++ b/src/kaitai-struct-checks/nar.ksy @@ -0,0 +1 @@ +../../doc/manual/source/protocols/nix-archive/nar.ksy \ No newline at end of file diff --git a/src/kaitai-struct-checks/nars b/src/kaitai-struct-checks/nars new file mode 120000 index 000000000..ed0b4ecc7 --- /dev/null +++ b/src/kaitai-struct-checks/nars @@ -0,0 +1 @@ +../libutil-tests/data/nars \ No newline at end of file diff --git a/src/kaitai-struct-checks/nix-meson-build-support b/src/kaitai-struct-checks/nix-meson-build-support new file mode 120000 index 000000000..0b140f56b --- /dev/null +++ b/src/kaitai-struct-checks/nix-meson-build-support @@ -0,0 +1 @@ +../../nix-meson-build-support \ No newline at end of file diff --git a/src/kaitai-struct-checks/package.nix b/src/kaitai-struct-checks/package.nix new file mode 100644 index 000000000..263dd6fd1 --- /dev/null +++ b/src/kaitai-struct-checks/package.nix @@ -0,0 +1,75 @@ +# Run with: nix build .#nix-kaitai-struct-checks +{ + lib, + mkMesonDerivation, + gtest, + meson, + ninja, + pkg-config, + kaitai-struct-compiler, + fetchzip, + kaitai-struct-cpp-stl-runtime, + # Configuration Options + version, +}: +let + inherit (lib) fileset; +in +mkMesonDerivation (finalAttrs: { + pname = "nix-kaitai-struct-checks"; + inherit version; + + workDir = ./.; + fileset = lib.fileset.unions [ + ../../nix-meson-build-support + ./nix-meson-build-support + ./.version + ../../.version + ../../doc/manual/source/protocols/nix-archive/nar.ksy + ./nars + ../../src/libutil-tests/data + ./meson.build + ./nar.ksy + (fileset.fileFilter (file: file.hasExt "cc") ./.) + (fileset.fileFilter (file: file.hasExt "hh") ./.) + ]; + + outputs = [ "out" ]; + + passthru.externalNativeBuildInputs = [ + # This can go away when we bump up to 25.11 + (kaitai-struct-compiler.overrideAttrs (finalAttrs: { + version = "0.11"; + src = fetchzip { + url = "https://github.com/kaitai-io/kaitai_struct_compiler/releases/download/${version}/kaitai-struct-compiler-${version}.zip"; + sha256 = "sha256-j9TEilijqgIiD0GbJfGKkU1FLio9aTopIi1v8QT1b+A="; + }; + })) + ]; + + passthru.externalBuildInputs = [ + gtest + kaitai-struct-cpp-stl-runtime + ]; + + buildInputs = finalAttrs.passthru.externalBuildInputs; + + nativeBuildInputs = [ + meson + ninja + pkg-config + ] + ++ finalAttrs.passthru.externalNativeBuildInputs; + + doCheck = true; + + mesonCheckFlags = [ "--print-errorlogs" ]; + + postInstall = '' + touch $out + ''; + + meta = { + platforms = lib.platforms.all; + }; +}) diff --git a/src/kaitai-struct-checks/test-parse-nar.cc b/src/kaitai-struct-checks/test-parse-nar.cc new file mode 100644 index 000000000..456ffb127 --- /dev/null +++ b/src/kaitai-struct-checks/test-parse-nar.cc @@ -0,0 +1,48 @@ +#include +#include +#include +#include +#include + +#include + +#include +#include +#include + +#include "nix_nar.h" + +static const std::vector NarFiles = { + "empty.nar", + "dot.nar", + "dotdot.nar", + "executable-after-contents.nar", + "invalid-tag-instead-of-contents.nar", + "name-after-node.nar", + "nul-character.nar", + "slash.nar", +}; + +class NarParseTest : public ::testing::TestWithParam +{}; + +TEST_P(NarParseTest, ParseSucceeds) +{ + const auto nar_file = GetParam(); + + const char * nars_dir_env = std::getenv("NIX_NARS_DIR"); + if (nars_dir_env == nullptr) { + FAIL() << "NIX_NARS_DIR environment variable not set."; + } + + const std::filesystem::path nar_file_path = std::filesystem::path(nars_dir_env) / "dot.nar"; + ASSERT_TRUE(std::filesystem::exists(nar_file_path)) << "Missing test file: " << nar_file_path; + + std::ifstream ifs(nar_file_path, std::ifstream::binary); + ASSERT_TRUE(ifs.is_open()) << "Failed to open file: " << nar_file; + kaitai::kstream ks(&ifs); + nix_nar_t nar(&ks); + ASSERT_TRUE(nar.root_node() != nullptr) << "Failed to parse NAR file: " << nar_file; +} + +INSTANTIATE_TEST_SUITE_P(AllNarFiles, NarParseTest, ::testing::ValuesIn(NarFiles)); diff --git a/src/nix/nar.md b/src/nix/nar.md index b0f70ce93..c29c2092a 100644 --- a/src/nix/nar.md +++ b/src/nix/nar.md @@ -8,7 +8,7 @@ R""( # File format For the definition of the Nix Archive file format, see -[within the protocols chapter](@docroot@/protocols/nix-archive.md) +[within the protocols chapter](@docroot@/protocols/nix-archive/index.md) of the manual. [Nix Archive]: @docroot@/store/file-system-object/content-address.md#serial-nix-archive From 72d0f7b61941225eb06762095131f2b42cd3a56a Mon Sep 17 00:00:00 2001 From: John Ericson Date: Wed, 29 Oct 2025 02:16:50 -0400 Subject: [PATCH 137/213] Document "hash derivation quotiented", resolution, and build trace Progress on #13405, which asks for an explicit characterisation of the equivalence relation like the one given here. Also progress on #11895, because we're using the term "build trace entry" instead of "realisation". Mention #9259, a future work item. Co-authored-by: Robert Hensing --- doc/manual/book.toml.in | 1 + doc/manual/meson.build | 2 + doc/manual/source/SUMMARY.md.in | 3 + .../source/protocols/derivation-aterm.md | 4 +- .../protocols/json/schema/derivation-v3.yaml | 4 +- doc/manual/source/store/build-trace.md | 53 +++++ doc/manual/source/store/derivation/index.md | 2 +- .../derivation/outputs/content-address.md | 4 +- .../store/derivation/outputs/input-address.md | 225 ++++++++++++++++-- doc/manual/source/store/math-notation.md | 16 ++ doc/manual/source/store/resolution.md | 219 +++++++++++++++++ doc/manual/theme/head.hbs | 15 ++ src/libstore/include/nix/store/derivations.hh | 2 +- 13 files changed, 528 insertions(+), 22 deletions(-) create mode 100644 doc/manual/source/store/build-trace.md create mode 100644 doc/manual/source/store/math-notation.md create mode 100644 doc/manual/source/store/resolution.md create mode 100644 doc/manual/theme/head.hbs diff --git a/doc/manual/book.toml.in b/doc/manual/book.toml.in index 34acf642e..bacca59ff 100644 --- a/doc/manual/book.toml.in +++ b/doc/manual/book.toml.in @@ -7,6 +7,7 @@ additional-css = ["custom.css"] additional-js = ["redirects.js"] edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}" git-repository-url = "https://github.com/NixOS/nix" +mathjax-support = true # Handles replacing @docroot@ with a path to ./source relative to that markdown file, # {{#include handlebars}}, and the @generated@ syntax used within these. it mostly diff --git a/doc/manual/meson.build b/doc/manual/meson.build index fdea40098..231f7b9f8 100644 --- a/doc/manual/meson.build +++ b/doc/manual/meson.build @@ -92,6 +92,8 @@ manual = custom_target( (cd @2@; RUST_LOG=warn @1@ build -d @2@ 3>&2 2>&1 1>&3) | { grep -Fv "because fragment resolution isn't implemented" || :; } 3>&2 2>&1 1>&3 rm -rf @2@/manual mv @2@/html @2@/manual + # Remove Mathjax 2.7, because we will actually use MathJax 3.x + find @2@/manual | grep .html | xargs sed -i -e '/2.7.1.MathJax.js/d' find @2@/manual -iname meson.build -delete '''.format( python.full_path(), diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 287dff872..b87bf93a3 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -26,9 +26,12 @@ - [Derivation Outputs and Types of Derivations](store/derivation/outputs/index.md) - [Content-addressing derivation outputs](store/derivation/outputs/content-address.md) - [Input-addressing derivation outputs](store/derivation/outputs/input-address.md) + - [Build Trace](store/build-trace.md) + - [Derivation Resolution](store/resolution.md) - [Building](store/building.md) - [Store Types](store/types/index.md) {{#include ./store/types/SUMMARY.md}} + - [Appendix: Math notation](store/math-notation.md) - [Nix Language](language/index.md) - [Data Types](language/types.md) - [String context](language/string-context.md) diff --git a/doc/manual/source/protocols/derivation-aterm.md b/doc/manual/source/protocols/derivation-aterm.md index 99e3c2be6..523678e66 100644 --- a/doc/manual/source/protocols/derivation-aterm.md +++ b/doc/manual/source/protocols/derivation-aterm.md @@ -1,6 +1,8 @@ # Derivation "ATerm" file format -For historical reasons, [store derivations][store derivation] are stored on-disk in [ATerm](https://homepages.cwi.nl/~daybuild/daily-books/technology/aterm-guide/aterm-guide.html) format. +For historical reasons, [store derivations][store derivation] are stored on-disk in "Annotated Term" (ATerm) format +([guide](https://homepages.cwi.nl/~daybuild/daily-books/technology/aterm-guide/aterm-guide.html), +[paper](https://doi.org/10.1002/(SICI)1097-024X(200003)30:3%3C259::AID-SPE298%3E3.0.CO;2-Y)). ## The ATerm format used diff --git a/doc/manual/source/protocols/json/schema/derivation-v3.yaml b/doc/manual/source/protocols/json/schema/derivation-v3.yaml index 9c0210bb7..fa68adcb1 100644 --- a/doc/manual/source/protocols/json/schema/derivation-v3.yaml +++ b/doc/manual/source/protocols/json/schema/derivation-v3.yaml @@ -39,9 +39,9 @@ properties: This is a guard that allows us to continue evolving this format. The choice of `3` is fairly arbitrary, but corresponds to this informal version: - - Version 0: A-Term format + - Version 0: ATerm format - - Version 1: Original JSON format, with ugly `"r:sha256"` inherited from A-Term format. + - Version 1: Original JSON format, with ugly `"r:sha256"` inherited from ATerm format. - Version 2: Separate `method` and `hashAlgo` fields in output specs diff --git a/doc/manual/source/store/build-trace.md b/doc/manual/source/store/build-trace.md new file mode 100644 index 000000000..1086dcb88 --- /dev/null +++ b/doc/manual/source/store/build-trace.md @@ -0,0 +1,53 @@ +# Build Trace + +> **Warning** +> +> This entire concept is currently +> [**experimental**](@docroot@/development/experimental-features.md#xp-feature-ca-derivations) +> and subject to change. + +The *build trace* is a [memoization table](https://en.wikipedia.org/wiki/Memoization) for builds. +It maps the inputs of builds to the outputs of builds. +Concretely, that means it maps [derivations][derivation] to maps of [output] names to [store objects][store object]. + +In general the derivations used as a key should be [*resolved*](./resolution.md). +A build trace with all-resolved-derivation keys is also called a *base build trace* for extra clarity. +If all the resolved inputs of a derivation are content-addressed, that means the inputs will be fully determined, leaving no ambiguity for what build was performed. +(Input-addressed inputs however are still ambiguous. They too should be locked down, but this is left as future work.) + +Accordingly, to look up an unresolved derivation, one must first resolve it to get a resolved derivation. +Resolving itself involves looking up entries in the build trace, so this is a mutually recursive process that will end up inspecting possibly many entries. + +Except for the issue with input-addressed paths called out above, base build traces are trivially *coherent* -- incoherence is not possible. +That means that the claims that each key-value base build try entry makes are independent, and no mapping invalidates another mapping. + +Whether the mappings are *true*, i.e. the faithful recording of actual builds performed, is another matter. +Coherence is about the multiple claims of the build trace being mutually consistent, not about whether the claims are individually true or false. + +In general, there is no way to audit a build trace entry except for by performing the build again from scratch. +And even in that case, a different result doesn't mean the original entry was a "lie", because the derivation being built may be non-deterministic. +As such, the decision of whether to trust a counterparty's build trace is a fundamentally subject policy choice. +Build trace entries are typically *signed* in order to enable arbitrary public-key-based trust polices. + +## Derived build traces + +Implementations that wish to memoize the above may also keep additional *derived* build trace entries that do map unresolved derivations. +But if they do so, they *must* also keep the underlying base entries with resolved derivation keys around. +Firstly, this ensures that the derived entries are merely cache, which could be recomputed from scratch. +Secondly, this ensures the coherence of the derived build trace. + +Unlike with base build traces, incoherence with derived build traces is possible. +The key ingredient is that derivation resolution is only deterministic with respect to a fixed base build trace. +Without fixing the base build trace, it inherits the subjectivity of base build traces themselves. + +Concretely, suppose there are three derivations \\(a\\), \\(b\\), and \((c\\). +Let \\(a\\) be a resolved derivation, but let \\(b\\) and \((c\\) be unresolved and both take as an input an output of \\(a\\). +Now suppose that derived entries are made for \\(b\\) and \((c\\) based on two different entries of \\(a\\). +(This could happen if \\(a\\) is non-deterministic, \\(a\\) and \\(b\\) are built in one store, \\(a\\) and \\(c\\) are built in another store, and then a third store substitutes from both of the first two stores.) + +If trusting the derived build trace entries for \\(b\\) and \((c\\) requires that each's underlying entry for \\(a\\) be also trusted, the two different mappings for \\(a\\) will be caught. +However, if \\(b\\) and \((c\\)'s entries can be combined in isolation, there will be nothing to catch the contradiction in their hidden assumptions about \\(a\\)'s output. + +[derivation]: ./derivation/index.md +[output]: ./derivation/outputs/index.md +[store object]: @docroot@/store/store-object.md diff --git a/doc/manual/source/store/derivation/index.md b/doc/manual/source/store/derivation/index.md index 5b179273d..61c5335ff 100644 --- a/doc/manual/source/store/derivation/index.md +++ b/doc/manual/source/store/derivation/index.md @@ -245,7 +245,7 @@ If those other derivations *also* abide by this common case (and likewise for tr > note the ".drv" > ``` -## Extending the model to be higher-order +## Extending the model to be higher-order {#dynamic} **Experimental feature**: [`dynamic-derivations`](@docroot@/development/experimental-features.md#xp-feature-dynamic-derivations) diff --git a/doc/manual/source/store/derivation/outputs/content-address.md b/doc/manual/source/store/derivation/outputs/content-address.md index 4d5130348..aa65fbe49 100644 --- a/doc/manual/source/store/derivation/outputs/content-address.md +++ b/doc/manual/source/store/derivation/outputs/content-address.md @@ -167,10 +167,10 @@ It is only in the potential for that check to fail that they are different. > > In a future world where floating content-addressing is also stable, we in principle no longer need separate [fixed](#fixed) content-addressing. > Instead, we could always use floating content-addressing, and separately assert the precise value content address of a given store object to be used as an input (of another derivation). -> A stand-alone assertion object of this sort is not yet implemented, but its possible creation is tracked in [Issue #11955](https://github.com/NixOS/nix/issues/11955). +> A stand-alone assertion object of this sort is not yet implemented, but its possible creation is tracked in [issue #11955](https://github.com/NixOS/nix/issues/11955). > > In the current version of Nix, fixed outputs which fail their hash check are still registered as valid store objects, just not registered as outputs of the derivation which produced them. -> This is an optimization that means if the wrong output hash is specified in a derivation, and then the derivation is recreated with the right output hash, derivation does not need to be rebuilt --- avoiding downloading potentially large amounts of data twice. +> This is an optimization that means if the wrong output hash is specified in a derivation, and then the derivation is recreated with the right output hash, derivation does not need to be rebuilt — avoiding downloading potentially large amounts of data twice. > This optimisation prefigures the design above: > If the output hash assertion was removed outside the derivation itself, Nix could additionally not only register that outputted store object like today, but could also make note that derivation did in fact successfully download some data. For example, for the "fetch URL" example above, making such a note is tantamount to recording what data is available at the time of download at the given URL. diff --git a/doc/manual/source/store/derivation/outputs/input-address.md b/doc/manual/source/store/derivation/outputs/input-address.md index e2e15a801..3fd20f17d 100644 --- a/doc/manual/source/store/derivation/outputs/input-address.md +++ b/doc/manual/source/store/derivation/outputs/input-address.md @@ -6,26 +6,221 @@ That is to say, an input-addressed output's store path is a function not of the output itself, but of the derivation that produced it. Even if two store paths have the same contents, if they are produced in different ways, and one is input-addressed, then they will have different store paths, and thus guaranteed to not be the same store object. - +type FirstOrderDerivingPath = ConstantPath | FirstOrderOutputPath; +type Inputs = Set; +``` + +For the algorithm below, we adopt a derivation where the two types of (first order) derived paths are partitioned into two sets, as follows: +```typescript +type Derivation = { + // inputs: Set; // replaced + inputSrcs: Set; // new instead + inputDrvOutputs: Set; // new instead + // ...other fields... +}; +``` + +In the [currently-experimental][xp-feature-dynamic-derivations] higher-order case where outputs of outputs are allowed as [deriving paths][deriving-path] and thus derivation inputs, derivations using that generalization are not valid arguments to this function. +Those derivations must be (partially) [resolved](@docroot@/store/resolution.md) enough first, to the point where no such higher-order inputs remain. +Then, and only then, can input addresses be assigned. + +``` +function hashQuotientDerivation(drv) -> Hash: + assert(drv.outputs are input-addressed) + drv′ ← drv with { + inputDrvOutputs = ⋃( + assert(drvPath is store path) + case hashOutputsOrQuotientDerivation(readDrv(drvPath)) of + drvHash : Hash → + (drvHash.toBase16(), output) + outputHashes : Map[String, Hash] → + (outputHashes[output].toBase16(), "out") + | (drvPath, output) ∈ drv.inputDrvOutputs + ) + } + return hashSHA256(printDrv(drv′)) + +function hashOutputsOrQuotientDerivation(drv) -> Map[String, Hash] | Hash: + if drv.outputs are content-addressed: + return { + outputName ↦ hashSHA256( + "fixed:out:" + ca.printMethodAlgo() + + ":" + ca.hash.toBase16() + + ":" + ca.makeFixedOutputPath(drv.name, outputName)) + | (outputName ↦ output) ∈ drv.outputs + , ca = output.contentAddress // or get from build trace if floating + } + else: // drv.outputs are input-addressed + return hashQuotientDerivation(drv) +``` + +### `hashQuotientDerivation` + +We replace each element in the derivation's `inputDrvOutputs` using data from a call to `hashOutputsOrQuotientDerivation` on the `drvPath` of that element. +When `hashOutputsOrQuotientDerivation` returns a single drv hash (because the input derivation in question is input-addressing), we simply swap out the `drvPath` for that hash, and keep the same output name. +When `hashOutputsOrQuotientDerivation` returns a map of content addresses per-output, we look up the output in question, and pair it with the output name `out`. + +The resulting pseudo-derivation (with hashes instead of store paths in `inputDrvs`) is then printed (in the ["ATerm" format](@docroot@/protocols/derivation-aterm.md)) and hashed, and this becomes the hash of the "quotient derivation". + +When calculating output hashes, `hashQuotientDerivation` is called on an almost-complete input-addressing derivation, which is just missing its input-addressed outputs paths. +The derivation hash is then used to calculate output paths for each output. + +Those output paths can then be substituted into the almost-complete input-addressed derivation to complete it. + +> **Note** +> +> There may be an unintentional deviation from specification currently implemented in the `(outputHashes[output].toBase16(), "out")` case. +> This is not fatal because the deviation would only apply for content-addressing derivations with more than one output, and that only occurs in the floating case, which is [experimental][xp-feature-ca-derivations]. +> Once this bug is fixed, this note will be removed. + +### `hashOutputsOrQuotientDerivation` + +How does `hashOutputsOrQuotientDerivation` in turn work? +It consists of two main cases, based on whether the outputs of the derivation are to be input-addressed or content-addressed. + +#### Input-addressed outputs case + +In the input-addressed case, it just calls `hashQuotientDerivation`, and returns that derivation hash. +This makes `hashQuotientDerivation` and `hashOutputsOrQuotientDerivation` mutually-recursive. + +> **Note** +> +> In this case, `hashQuotientDerivation` is being called on a *complete* input-addressing derivation that already has its output paths calculated. +> The `inputDrvs` substitution takes place anyways. + +#### Content-addressed outputs case + +If the outputs are [content-addressed](./content-address.md), then it computes a hash for each output derived from the content-address of that output. + +> **Note** +> +> In the [fixed](./content-address.md#fixed) content-addressing case, the outputs' content addresses are statically specified in advance, so this always just works. +> (The fixed case is what the pseudo-code shows.) +> +> In the [floating](./content-address.md#floating) case, the content addresses are not specified in advance. +> This is what the "or get from [build trace](@docroot@/store/build-trace.md) if floating" comment refers to. +> In this case, the algorithm is *stuck* until the input in question is built, and we know what the actual contents of the output in question is. +> +> That is OK however, because there is no problem with delaying the assigning of input addresses (which, remember, is what `hashQuotientDerivation` is ultimately for) until all inputs are known. + +### Performance + +The recursion in the algorithm is potentially inefficient: +it could call itself once for each path by which a subderivation can be reached, i.e., `O(V^k)` times for a derivation graph with `V` derivations and with out-degree of at most `k`. +In the actual implementation, [memoisation](https://en.wikipedia.org/wiki/Memoization) is used to reduce this cost to be proportional to the total number of `inputDrvOutputs` encountered. + +### Semantic properties + +*See [this chapter's appendix](@docroot@/store/math-notation.md) on grammar and metavariable conventions.* + +In essence, `hashQuotientDerivation` partitions input-addressing derivations into equivalence classes: every derivation in that equivalence class is mapped to the same derivation hash. +We can characterize this equivalence relation directly, by working bottom up. + +We start by defining an equivalence relation on first-order output deriving paths that refer content-addressed derivation outputs. Two such paths are equivalent if they refer to the same store object: + +\\[ +\\begin{prooftree} +\\AxiomC{$d\_1$ is content-addressing} +\\AxiomC{$d\_2$ is content-addressing} +\\AxiomC{$ + {}^\*(\text{path}(d\_1), o\_1) + \= + {}^\*(\text{path}(d\_2), o\_2) +$} +\\TrinaryInfC{$(\text{path}(d\_1), o\_1) \\,\\sim_{\\mathrm{CA}}\\, (d\_2, o\_2)$} +\\end{prooftree} +\\] + +where \\({}^*(s, o)\\) denotes the store object that the output deriving path refers to. + +We will also need the following construction to lift any equivalence relation on \\(X\\) to an equivalence relation on (finite) sets of \\(X\\) (in short, \\(\\mathcal{P}(X)\\)): + +\\[ +\\begin{prooftree} +\\AxiomC{$\\forall a \\in A. \\exists b \\in B. a \\,\\sim\_X\\, b$} +\\AxiomC{$\\forall b \\in B. \\exists a \\in A. b \\,\\sim\_X\\, a$} +\\BinaryInfC{$A \\,\\sim_{\\mathcal{P}(X)}\\, B$} +\\end{prooftree} +\\] + +Now we can define the equivalence relation \\(\\sim_\\mathrm{IA}\\) on input-addressed derivation outputs. Two input-addressed outputs are equivalent if their derivations are equivalent (via the yet-to-be-defined \\(\\sim_{\\mathrm{IADrv}}\\) relation) and their output names are the same: + +\\[ +\\begin{prooftree} +\\AxiomC{$d\_1$ is input-addressing} +\\AxiomC{$d\_2$ is input-addressing} +\\AxiomC{$d\_1 \\,\\sim_{\\mathrm{IADrv}}\\, d\_2$} +\\AxiomC{$o\_1 = o\_2$} +\\QuaternaryInfC{$(\text{path}(d\_1), o\_1) \\,\\sim_{\\mathrm{IA}}\\, (\text{path}(d\_2), o\_2)$} +\\end{prooftree} +\\] + +And now we can define \\(\\sim_{\\mathrm{IADrv}}\\). +Two input-addressed derivations are equivalent if their content-addressed inputs are equivalent, their input-addressed inputs are also equivalent, and they are otherwise equal: + + + +\\[ +\\begin{prooftree} +\\alwaysNoLine +\\AxiomC{$ + \\mathrm{caInputs}(d\_1) + \\,\\sim_{\\mathcal{P}(\\mathrm{CA})}\\, + \\mathrm{caInputs}(d\_2) +$} +\\AxiomC{$ + \\mathrm{iaInputs}(d\_1) + \\,\\sim_{\\mathcal{P}(\\mathrm{IA})}\\, + \\mathrm{iaInputs}(d\_2) +$} +\\BinaryInfC{$ + d\_1\left[\\mathrm{inputDrvOutputs} := \\{\\}\right] + \= + d\_2\left[\\mathrm{inputDrvOutputs} := \\{\\}\right] +$} +\\alwaysSingleLine +\\UnaryInfC{$d\_1 \\,\\sim_{\\mathrm{IADrv}}\\, d\_2$} +\\end{prooftree} +\\] + +where \\(\\mathrm{caInputs}(d)\\) returns the content-addressed inputs of \\(d\\) and \\(\\mathrm{iaInputs}(d)\\) returns the input-addressed inputs. + +> **Note** +> +> An astute reader might notice that that nowhere does `inputSrcs` enter into these definitions. +> That means that replacing an input derivation with its outputs directly added to `inputSrcs` always results in a derivation in a different equivalence class, despite the resulting input closure (as would be mounted in the store at build time) being the same. +> [Issue #9259](https://github.com/NixOS/nix/issues/9259) is about creating a coarser equivalence relation to address this. +> +> \\(\\sim_\mathrm{Drv}\\) from [derivation resolution](@docroot@/store/resolution.md) is such an equivalence relation. +> It is coarser than this one: any two derivations which are "'hash quotient derivation'-equivalent" (\\(\\sim_\mathrm{IADrv}\\)) are also "resolution-equivalent" (\\(\\sim_\mathrm{Drv}\\)). +> It also relates derivations whose `inputDrvOutputs` have been rewritten into `inputSrcs`. + +[deriving-path]: @docroot@/store/derivation/index.md#deriving-path +[xp-feature-dynamic-derivations]: @docroot@/development/experimental-features.md#xp-feature-dynamic-derivations [xp-feature-ca-derivations]: @docroot@/development/experimental-features.md#xp-feature-ca-derivations -[xp-feature-git-hashing]: @docroot@/development/experimental-features.md#xp-feature-git-hashing -[xp-feature-impure-derivations]: @docroot@/development/experimental-features.md#xp-feature-impure-derivations diff --git a/doc/manual/source/store/math-notation.md b/doc/manual/source/store/math-notation.md new file mode 100644 index 000000000..723982e73 --- /dev/null +++ b/doc/manual/source/store/math-notation.md @@ -0,0 +1,16 @@ +# Appendix: Math notation + +A few times in this manual, formal "proof trees" are used for [natural deduction](https://en.wikipedia.org/wiki/Natural_deduction)-style definition of various [relations](https://en.wikipedia.org/wiki/Relation_(mathematics)). + +The following grammar and assignment of metavariables to syntactic categories is used in these sections. + +\\begin{align} +s, t &\in \text{store-path} \\\\ +o &\in \text{output-name} \\\\ +i, p &\in \text{deriving-path} \\\\ +d &\in \text{derivation} +\\end{align} + +\\begin{align} +\text{deriving-path} \quad p &::= s \mid (p, o) +\\end{align} diff --git a/doc/manual/source/store/resolution.md b/doc/manual/source/store/resolution.md new file mode 100644 index 000000000..9a87fea99 --- /dev/null +++ b/doc/manual/source/store/resolution.md @@ -0,0 +1,219 @@ +# Derivation Resolution + +*See [this chapter's appendix](@docroot@/store/math-notation.md) on grammar and metavariable conventions.* + +To *resolve* a derivation is to replace its [inputs] with the simplest inputs — plain store paths — that denote the same store objects. + +Derivations that only have store paths as inputs are likewise called *resolved derivations*. +(They are called that whether they are in fact the output of derivation resolution, or just made that way without non-store-path inputs to begin with.) + +## Input Content Equivalence of Derivations + +[Deriving paths][deriving-path] intentionally make it possible to refer to the same [store object] in multiple ways. +This is a consequence of content-addressing, since different derivations can produce the same outputs, and the same data can also be manually added to the store. +This is also a consequence even of input-addressing, as an output can be referred to by derivation and output name, or directly by its [computed](./derivation/outputs/input-address.md) store path. +Since dereferencing deriving paths is thus not injective, it induces an equivalence relation on deriving paths. + +Let's call this equivalence relation \\(\\sim\\), where \\(p_1 \\sim p_2\\) means that deriving paths \\(p_1\\) and \\(p_2\\) refer to the same store object. + +**Content Equivalence**: Two deriving paths are equivalent if they refer to the same store object: + +\\[ +\\begin{prooftree} +\\AxiomC{${}^*p_1 = {}^*p_2$} +\\UnaryInfC{$p_1 \\,\\sim_\\mathrm{DP}\\, p_2$} +\\end{prooftree} +\\] + +where \\({}^\*p\\) denotes the store object that deriving path \\(p\\) refers to. + +This also induces an equivalence relation on sets of deriving paths: + +\\[ +\\begin{prooftree} +\\AxiomC{$\\{ {}^*p | p \\in P_1 \\} = \\{ {}^*p | p \\in P_2 \\}$} +\\UnaryInfC{$P_1 \\,\\sim_{\\mathcal{P}(\\mathrm{DP})}\\, P_2$} +\\end{prooftree} +\\] + +**Input Content Equivalence**: This, in turn, induces an equivalence relation on derivations: two derivations are equivalent if their inputs are equivalent, and they are otherwise equal: + +\\[ +\\begin{prooftree} +\\AxiomC{$\\mathrm{inputs}(d_1) \\,\\sim_{\\mathcal{P}(\\mathrm{DP})}\\, \\mathrm{inputs}(d_2)$} +\\AxiomC{$ + d\_1\left[\\mathrm{inputs} := \\{\\}\right] + \= + d\_2\left[\\mathrm{inputs} := \\{\\}\right] +$} +\\BinaryInfC{$d_1 \\,\\sim_\\mathrm{Drv}\\, d_2$} +\\end{prooftree} +\\] + +Derivation resolution always maps derivations to input-content-equivalent derivations. + +## Resolution relation + +Dereferencing a derived path — \\({}^\*p\\) above — was just introduced as a black box. +But actually it is a multi-step process of looking up build results in the [build trace] that itself depends on resolving the lookup keys. +Resolution is thus a recursive multi-step process that is worth diagramming formally. + +We can do this with a small-step binary transition relation; let's call it \\(\rightsquigarrow\\). +We can then conclude dereferenced equality like this: + +\\[ +\\begin{prooftree} +\\AxiomC{$p\_1 \\rightsquigarrow^* p$} +\\AxiomC{$p\_2 \\rightsquigarrow^* p$} +\\BinaryInfC{${}^*p\_1 = {}^*p\_2$} +\\end{prooftree} +\\] + +I.e. by showing that both original items resolve (over 0 or more small steps, hence the \\({}^*\\)) to the same exact item. + +With this motivation, let's now formalize a [small-step](https://en.wikipedia.org/wiki/Operational_semantics#Small-step_semantics) system of reduction rules for resolution. + +### Formal rules + +### \\(\text{resolved}\\) unary relation + +\\[ +\\begin{prooftree} +\\AxiomC{$s \in \text{store-path}$} +\\UnaryInfC{$s$ resolved} +\\end{prooftree} +\\] + +\\[ +\\begin{prooftree} +\\AxiomC{$\forall i \in \mathrm{inputs}(d). i \text{ resolved}$} +\\UnaryInfC{$d$ resolved} +\\end{prooftree} +\\] + +### \\(\rightsquigarrow\\) binary relation + +> **Remark** +> +> Actually, to be completely formal we would need to keep track of the build trace we are choosing to resolve against. +> +> We could do that by making \\(\rightsquigarrow\\) a ternary relation, which would pass the build trace to itself until it finally uses it in that one rule. +> This would add clutter more than insight, so we didn't bother to write it. +> +> There are other options too, like saying the whole reduction rule system is parameterized on the build trace, essentially [currying](https://en.wikipedia.org/wiki/Currying) the ternary \\(\rightsquigarrow\\) into a function from build traces to the binary relation written above. + +#### Core build trace lookup rule + +\\[ +\\begin{prooftree} +\\AxiomC{$s \in \text{store-path}$} +\\AxiomC{${}^*s \in \text{derivation}$} +\\AxiomC{${}^*s$ resolved} +\\AxiomC{$\mathrm{build\text{-}trace}[s][o] = t$} +\\QuaternaryInfC{$(s, o) \rightsquigarrow t$} +\\RightLabel{\\scriptsize output path resolution} +\\end{prooftree} +\\] + +#### Inductive rules + +\\[ +\\begin{prooftree} +\\AxiomC{$i \\rightsquigarrow i'$} +\\AxiomC{$i \\in \\mathrm{inputs}(d)$} +\\BinaryInfC{$d \\rightsquigarrow d[i \\mapsto i']$} +\\end{prooftree} +\\] + +\\[ +\\begin{prooftree} +\\AxiomC{$d \\rightsquigarrow d'$} +\\UnaryInfC{$(\\mathrm{path}(d), o) \\rightsquigarrow (\\mathrm{path}(d'), o)$} +\\end{prooftree} +\\] + +\\[ +\\begin{prooftree} +\\AxiomC{$p \\rightsquigarrow p'$} +\\UnaryInfC{$(p, o) \\rightsquigarrow (p', o)$} +\\end{prooftree} +\\] + +### Properties + +Like all well-behaved evaluation relations, partial resolution is [*confluent*](https://en.wikipedia.org/wiki/Confluence_(abstract_rewriting)). +Also, if we take the symmetric closure of \\(\\rightsquigarrow^\*\\), we end up with the equivalence relations of the previous section. +Resolution respects content equivalence for deriving paths, and input content equivalence for derivations. + +> **Remark** +> +> We chose to define from scratch an "resolved" unary relation explicitly above. +> But it can also be defined as the normal forms of the \\(\\rightsquigarrow^\*\\) relation: +> +> \\[ a \text{ resolved} \Leftrightarrow \forall b. b \rightsquigarrow^* a \Rightarrow b = a\\] +> +> In prose, resolved terms are terms which \\(\\rightsquigarrow^\*\\) only relates on the left side to the same term on the right side; they are the terms which can be resolved no further. + +## Partial versus Complete Resolution + +Similar to evaluation, we can also speak of *partial* versus *complete* derivation resolution. +Partial derivation resolution is what we've actually formalized above with \\(\\rightsquigarrow^\*\\). +Complete resolution is resolution ending in a resolved term (deriving path or derivation). +(Which is a normal form of the relation, per the remark above.) + +With partial resolution, a derivation is related to equivalent derivations with the same or simpler inputs, but not all those inputs will be plain store paths. +This is useful when the input refers to a floating content addressed output we have not yet built — we don't know what (content-address) store path will used for that derivation, so we are "stuck" trying to resolve the deriving path in question. +(In the above formalization, this happens when the build trace is missing the keys we wish to look up in it.) + +Complete resolution is a *functional* relation, i.e. values on the left are uniquely related with values on the right. +It is not however, a *total* relation (in general, assuming arbitrary build traces). +This is discussed in the next section. + +## Termination + +For static derivations graphs, complete resolution is indeed total, because it always terminates for all inputs. +(A relation that is both total and functional is a function.) + +For [dynamic][xp-feature-dynamic-derivations] derivation graphs, however, this is not the case — resolution is not guaranteed to terminate. +The issue isn't rewriting deriving paths themselves: +a single rewrite to normalize an output deriving path to a constant one always exists, and always proceeds in one step. +The issue is that dynamic derivations (i.e. those that are filled-in the graph by a previous resolution) may have more transitive dependencies than the original derivation. + +> **Example** +> +> Suppose we have this deriving path +> ```json +> { +> "drvPath": { +> "drvPath": "...-foo.drv", +> "output": "bar.drv" +> }, +> "output": "baz" +> } +> ``` +> and derivation `foo` is already resolved. +> When we resolve deriving path we'll end up with something like. +> ```json +> { +> "drvPath": "...-foo-bar.drv", +> "output": "baz" +> } +> ``` +> So far is just an atomic single rewrite, with no termination issues. +> But the derivation `foo-bar` may have its *own* dynamic derivation inputs. +> Resolution must resolve that derivation first before the above deriving path can finally be normalized to a plain `...-foo-bar-baz` store path. + +The important thing to notice is that while "build trace" *keys* must be resolved. +The *value* those keys are mapped to have no such constraints. +An arbitrary store object has no notion of being resolved or not. +But, an arbitrary store object can be read back as a derivation (as will in fact be done in case for dynamic derivations / nested output deriving paths). +And those derivations need *not* be resolved. + +It is those dynamic non-resolved derivations which are the source of non-termination. +By the same token, they are also the reason why dynamic derivations offer greater expressive power. + +[store object]: @docroot@/store/store-object.md +[inputs]: @docroot@/store/derivation/index.md#inputs +[build trace]: @docroot@/store/build-trace.md +[deriving-path]: @docroot@/store/derivation/index.md#deriving-path +[xp-feature-dynamic-derivations]: @docroot@/development/experimental-features.md#xp-feature-dynamic-derivations diff --git a/doc/manual/theme/head.hbs b/doc/manual/theme/head.hbs new file mode 100644 index 000000000..e514a9977 --- /dev/null +++ b/doc/manual/theme/head.hbs @@ -0,0 +1,15 @@ + + + diff --git a/src/libstore/include/nix/store/derivations.hh b/src/libstore/include/nix/store/derivations.hh index 4615d8acd..259314d3f 100644 --- a/src/libstore/include/nix/store/derivations.hh +++ b/src/libstore/include/nix/store/derivations.hh @@ -277,7 +277,7 @@ struct BasicDerivation Path builder; Strings args; /** - * Must not contain the key `__json`, at least in order to serialize to A-Term. + * Must not contain the key `__json`, at least in order to serialize to ATerm. */ StringPairs env; std::optional structuredAttrs; From 0d7b16da4dae9dc8f07756a6ef87caf17406e835 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 14 Oct 2025 13:03:18 -0400 Subject: [PATCH 138/213] Split realisation protocol unit tests This will allow us to more accurately test dropping support for dependent realisations, by separating the tests that should not change from the tests that should. I do that change in PR #14247, but even if for some reasons we don't end up doing this soon, I think it is still good to separate the test data this way so we have the option of doing that at some point. --- src/libstore-tests/common-protocol.cc | 19 +++++++++++++-- .../common-protocol/realisation-with-deps.bin | Bin 0 -> 320 bytes .../data/common-protocol/realisation.bin | Bin 520 -> 384 bytes .../serve-protocol/realisation-with-deps.bin | Bin 0 -> 320 bytes .../data/serve-protocol/realisation.bin | Bin 520 -> 384 bytes .../worker-protocol/realisation-with-deps.bin | Bin 0 -> 320 bytes .../data/worker-protocol/realisation.bin | Bin 520 -> 384 bytes src/libstore-tests/serve-protocol.cc | 17 +++++++++++++ src/libstore-tests/worker-protocol.cc | 23 +++++++++++++++--- 9 files changed, 54 insertions(+), 5 deletions(-) create mode 100644 src/libstore-tests/data/common-protocol/realisation-with-deps.bin create mode 100644 src/libstore-tests/data/serve-protocol/realisation-with-deps.bin create mode 100644 src/libstore-tests/data/worker-protocol/realisation-with-deps.bin diff --git a/src/libstore-tests/common-protocol.cc b/src/libstore-tests/common-protocol.cc index 2c001957b..7c40e8cdb 100644 --- a/src/libstore-tests/common-protocol.cc +++ b/src/libstore-tests/common-protocol.cc @@ -114,13 +114,28 @@ CHARACTERIZATION_TEST( Realisation{ { .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, - .signatures = {"asdf", "qwer"}, }, - DrvOutput{ + { .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), .outputName = "baz", }, }, + Realisation{ + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + .signatures = {"asdf", "qwer"}, + }, + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + }, + })) + +CHARACTERIZATION_TEST( + realisation_with_deps, + "realisation-with-deps", + (std::tuple{ Realisation{ { .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, diff --git a/src/libstore-tests/data/common-protocol/realisation-with-deps.bin b/src/libstore-tests/data/common-protocol/realisation-with-deps.bin new file mode 100644 index 0000000000000000000000000000000000000000..54a78b64ebcf0726bd7da506434a316c7336e6e9 GIT binary patch literal 320 zcmXqFWB`L|rIgfy)V!3`ypo{Q#GK6H#FEVXykaG*YNg_gL?cr(E3-5UGs`r~)I=i- zBjco$L_;&vR0A^ubF;J*gOpSgV>7dq)Wj5{WP?bMq9FG(*#5V{_v) zQ^ms4(h4OjrF6q`^NdR4LR_?NT7JG#t&UP=ijoz~ZbQ>l<787a0}D%&X3lj^AWJ^m+6HD_n6H{ZuBr{6`^F(t~3&Z48vlLS!bH${@DkUAI v{L+$u#F7kR9igLCoSB}NSW;S)TC8Lht&~`tlBT4iR9K!`q!e2V4mJh=YNB5O literal 0 HcmV?d00001 diff --git a/src/libstore-tests/data/common-protocol/realisation.bin b/src/libstore-tests/data/common-protocol/realisation.bin index 2176c6c4afd96b32fe372de6084ac7f4c7a11d49..3a0b2b2d8e393e8786bec9d1f3dec9fb1d17ce13 100644 GIT binary patch delta 14 VcmeBRX<%mDFp+U9Q*7;o696H%1v~%% delta 107 zcmZo*?qFfuJCSkg7dq)Wj5{WP?bMq9FG(*#5V{_v) zQ^ms4(h4OjrF6q`^NdR4LR_?NT7JG#t&UP=ijoz~ZbQ>l<787a0}D%&X3lj^AWJ^m+6HD_n6H{ZuBr{6`^F(t~3&Z48vlLS!bH${@DkUAI v{L+$u#F7kR9igLCoSB}NSW;S)TC8Lht&~`tlBT4iR9K!`q!e2V4mJh=YNB5O literal 0 HcmV?d00001 diff --git a/src/libstore-tests/data/serve-protocol/realisation.bin b/src/libstore-tests/data/serve-protocol/realisation.bin index 2176c6c4afd96b32fe372de6084ac7f4c7a11d49..3a0b2b2d8e393e8786bec9d1f3dec9fb1d17ce13 100644 GIT binary patch delta 14 VcmeBRX<%mDFp+U9Q*7;o696H%1v~%% delta 107 zcmZo*?qFfuJCSkg7dq)Wj5{WP?bMq9FG(*#5V{_v) zQ^ms4(h4OjrF6q`^NdR4LR_?NT7JG#t&UP=ijoz~ZbQ>l<787a0}D%&X3lj^AWJ^m+6HD_n6H{ZuBr{6`^F(t~3&Z48vlLS!bH${@DkUAI v{L+$u#F7kR9igLCoSB}NSW;S)TC8Lht&~`tlBT4iR9K!`q!e2V4mJh=YNB5O literal 0 HcmV?d00001 diff --git a/src/libstore-tests/data/worker-protocol/realisation.bin b/src/libstore-tests/data/worker-protocol/realisation.bin index 2176c6c4afd96b32fe372de6084ac7f4c7a11d49..3a0b2b2d8e393e8786bec9d1f3dec9fb1d17ce13 100644 GIT binary patch delta 14 VcmeBRX<%mDFp+U9Q*7;o696H%1v~%% delta 107 zcmZo*?qFfuJCSkg{ + Realisation{ + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + }, Realisation{ { .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, @@ -104,6 +113,14 @@ VERSIONED_CHARACTERIZATION_TEST( .outputName = "baz", }, }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + ServeProtoTest, + realisation_with_deps, + "realisation-with-deps", + defaultVersion, + (std::tuple{ Realisation{ { .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, diff --git a/src/libstore-tests/worker-protocol.cc b/src/libstore-tests/worker-protocol.cc index c4afde3bd..8f70e937b 100644 --- a/src/libstore-tests/worker-protocol.cc +++ b/src/libstore-tests/worker-protocol.cc @@ -150,13 +150,30 @@ VERSIONED_CHARACTERIZATION_TEST( Realisation{ { .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, - .signatures = {"asdf", "qwer"}, }, - DrvOutput{ + { .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), .outputName = "baz", }, }, + Realisation{ + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + .signatures = {"asdf", "qwer"}, + }, + { + .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), + .outputName = "baz", + }, + }, + })) + +VERSIONED_CHARACTERIZATION_TEST( + WorkerProtoTest, + realisation_with_deps, + "realisation-with-deps", + defaultVersion, + (std::tuple{ Realisation{ { .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, @@ -172,7 +189,7 @@ VERSIONED_CHARACTERIZATION_TEST( }, }, }, - DrvOutput{ + { .drvHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="), .outputName = "baz", }, From 144c66215b49bdadba7e11d2fd3ff9b108750274 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 28 Oct 2025 18:01:37 -0400 Subject: [PATCH 139/213] JSON Schema for build trace entry Note, starting to make progress on #11895 by calling it this in the manual. --- doc/manual/package.nix | 1 + doc/manual/source/SUMMARY.md.in | 1 + .../protocols/json/build-trace-entry.md | 27 +++++++ doc/manual/source/protocols/json/meson.build | 1 + .../json/schema/build-trace-entry-v1 | 1 + .../json/schema/build-trace-entry-v1.yaml | 74 +++++++++++++++++++ doc/manual/source/store/build-trace.md | 2 +- src/json-schema-checks/build-trace-entry | 1 + src/json-schema-checks/meson.build | 9 +++ src/json-schema-checks/package.nix | 1 + 10 files changed, 117 insertions(+), 1 deletion(-) create mode 100644 doc/manual/source/protocols/json/build-trace-entry.md create mode 120000 doc/manual/source/protocols/json/schema/build-trace-entry-v1 create mode 100644 doc/manual/source/protocols/json/schema/build-trace-entry-v1.yaml create mode 120000 src/json-schema-checks/build-trace-entry diff --git a/doc/manual/package.nix b/doc/manual/package.nix index 7d29df3c3..e13c6f33d 100644 --- a/doc/manual/package.nix +++ b/doc/manual/package.nix @@ -37,6 +37,7 @@ mkMesonDerivation (finalAttrs: { ../../src/libutil-tests/data/hash ../../src/libstore-tests/data/content-address ../../src/libstore-tests/data/store-path + ../../src/libstore-tests/data/realisation ../../src/libstore-tests/data/derived-path ../../src/libstore-tests/data/path-info ../../src/libstore-tests/data/nar-info diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index b87bf93a3..580076ece 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -126,6 +126,7 @@ - [Store Object Info](protocols/json/store-object-info.md) - [Derivation](protocols/json/derivation.md) - [Deriving Path](protocols/json/deriving-path.md) + - [Build Trace Entry](protocols/json/build-trace-entry.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) - [Store Path Specification](protocols/store-path.md) - [Nix Archive (NAR) Format](protocols/nix-archive/index.md) diff --git a/doc/manual/source/protocols/json/build-trace-entry.md b/doc/manual/source/protocols/json/build-trace-entry.md new file mode 100644 index 000000000..8050a2840 --- /dev/null +++ b/doc/manual/source/protocols/json/build-trace-entry.md @@ -0,0 +1,27 @@ +{{#include build-trace-entry-v1-fixed.md}} + +## Examples + +### Simple build trace entry + +```json +{{#include schema/build-trace-entry-v1/simple.json}} +``` + +### Build trace entry with dependencies + +```json +{{#include schema/build-trace-entry-v1/with-dependent-realisations.json}} +``` + +### Build trace entry with signature + +```json +{{#include schema/build-trace-entry-v1/with-signature.json}} +``` + + \ No newline at end of file diff --git a/doc/manual/source/protocols/json/meson.build b/doc/manual/source/protocols/json/meson.build index 7ebcff697..d8e94d68c 100644 --- a/doc/manual/source/protocols/json/meson.build +++ b/doc/manual/source/protocols/json/meson.build @@ -15,6 +15,7 @@ schemas = [ 'store-object-info-v1', 'derivation-v3', 'deriving-path-v1', + 'build-trace-entry-v1', ] schema_files = files() diff --git a/doc/manual/source/protocols/json/schema/build-trace-entry-v1 b/doc/manual/source/protocols/json/schema/build-trace-entry-v1 new file mode 120000 index 000000000..0d02880a5 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/build-trace-entry-v1 @@ -0,0 +1 @@ +../../../../../../src/libstore-tests/data/realisation \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/build-trace-entry-v1.yaml b/doc/manual/source/protocols/json/schema/build-trace-entry-v1.yaml new file mode 100644 index 000000000..cabf2c350 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/build-trace-entry-v1.yaml @@ -0,0 +1,74 @@ +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/build-trace-entry-v1.json" +title: Build Trace Entry +description: | + A record of a successful build outcome for a specific derivation output. + + This schema describes the JSON representation of a [build trace entry](@docroot@/store/build-trace.md) entry. + + > **Warning** + > + > This JSON format is currently + > [**experimental**](@docroot@/development/experimental-features.md#xp-feature-ca-derivations) + > and subject to change. + +type: object +required: + - id + - outPath + - dependentRealisations + - signatures +properties: + id: + type: string + title: Derivation Output ID + pattern: "^sha256:[0-9a-f]{64}![a-zA-Z_][a-zA-Z0-9_-]*$" + description: | + Unique identifier for the derivation output that was built. + + Format: `{hash-quotient-drv}!{output-name}` + + - **hash-quotient-drv**: SHA-256 [hash of the quotient derivation](@docroot@/store/derivation/outputs/input-address.md#hash-quotient-drv). + Begins with `sha256:`. + + - **output-name**: Name of the specific output (e.g., "out", "dev", "doc") + + Example: `"sha256:ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad!foo"` + + outPath: + "$ref": "store-path-v1.yaml" + title: Output Store Path + description: | + The path to the store object that resulted from building this derivation for the given output name. + + dependentRealisations: + type: object + title: Underlying Base Build Trace + description: | + This is for [*derived*](@docroot@/store/build-trace.md#derived) build trace entries to ensure coherence. + + Keys are derivation output IDs (same format as the main `id` field). + Values are the store paths that those dependencies resolved to. + + As described in the linked section on derived build trace traces, derived build trace entries must be kept in addition and not instead of the underlying base build entries. + This is the set of base build trace entries that this derived build trace is derived from. + (The set is also a map since this miniature base build trace must be coherent, mapping each key to a single value.) + + patternProperties: + "^sha256:[0-9a-f]{64}![a-zA-Z_][a-zA-Z0-9_-]*$": + $ref: "store-path-v1.yaml" + title: Dependent Store Path + description: Store path that this dependency resolved to during the build + additionalProperties: false + + signatures: + type: array + title: Build Signatures + description: | + A set of cryptographic signatures attesting to the authenticity of this build trace entry. + items: + type: string + title: Signature + description: A single cryptographic signature + +additionalProperties: false diff --git a/doc/manual/source/store/build-trace.md b/doc/manual/source/store/build-trace.md index 1086dcb88..8860bc6c7 100644 --- a/doc/manual/source/store/build-trace.md +++ b/doc/manual/source/store/build-trace.md @@ -29,7 +29,7 @@ And even in that case, a different result doesn't mean the original entry was a As such, the decision of whether to trust a counterparty's build trace is a fundamentally subject policy choice. Build trace entries are typically *signed* in order to enable arbitrary public-key-based trust polices. -## Derived build traces +## Derived build traces {#derived} Implementations that wish to memoize the above may also keep additional *derived* build trace entries that do map unresolved derivations. But if they do so, they *must* also keep the underlying base entries with resolved derivation keys around. diff --git a/src/json-schema-checks/build-trace-entry b/src/json-schema-checks/build-trace-entry new file mode 120000 index 000000000..9175e750e --- /dev/null +++ b/src/json-schema-checks/build-trace-entry @@ -0,0 +1 @@ +../../src/libstore-tests/data/realisation \ No newline at end of file diff --git a/src/json-schema-checks/meson.build b/src/json-schema-checks/meson.build index 67f553162..c2c7fbff4 100644 --- a/src/json-schema-checks/meson.build +++ b/src/json-schema-checks/meson.build @@ -54,6 +54,15 @@ schemas = [ 'single_built_built.json', ], }, + { + 'stem' : 'build-trace-entry', + 'schema' : schema_dir / 'build-trace-entry-v1.yaml', + 'files' : [ + 'simple.json', + 'with-dependent-realisations.json', + 'with-signature.json', + ], + }, ] # Derivation and Derivation output diff --git a/src/json-schema-checks/package.nix b/src/json-schema-checks/package.nix index 160db003f..057a6e85b 100644 --- a/src/json-schema-checks/package.nix +++ b/src/json-schema-checks/package.nix @@ -23,6 +23,7 @@ mkMesonDerivation (finalAttrs: { ../../src/libutil-tests/data/hash ../../src/libstore-tests/data/content-address ../../src/libstore-tests/data/store-path + ../../src/libstore-tests/data/realisation ../../src/libstore-tests/data/derivation ../../src/libstore-tests/data/derived-path ../../src/libstore-tests/data/path-info From c3d4c5f69d93278dafe2c631f955ffe0e47ca689 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 3 Nov 2025 22:00:54 +0000 Subject: [PATCH 140/213] build(deps): bump cachix/install-nix-action from 31.5.1 to 31.8.2 Bumps [cachix/install-nix-action](https://github.com/cachix/install-nix-action) from 31.5.1 to 31.8.2. - [Release notes](https://github.com/cachix/install-nix-action/releases) - [Changelog](https://github.com/cachix/install-nix-action/blob/master/RELEASE.md) - [Commits](https://github.com/cachix/install-nix-action/compare/c134e4c9e34bac6cab09cf239815f9339aaaf84e...456688f15bc354bef6d396e4a35f4f89d40bf2b7) --- updated-dependencies: - dependency-name: cachix/install-nix-action dependency-version: 31.8.2 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 67e97b188..60c617978 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -174,7 +174,7 @@ jobs: echo "installer-url=file://$GITHUB_WORKSPACE/out" >> "$GITHUB_OUTPUT" TARBALL_PATH="$(find "$GITHUB_WORKSPACE/out" -name 'nix*.tar.xz' -print | head -n 1)" echo "tarball-path=file://$TARBALL_PATH" >> "$GITHUB_OUTPUT" - - uses: cachix/install-nix-action@c134e4c9e34bac6cab09cf239815f9339aaaf84e # v31.5.1 + - uses: cachix/install-nix-action@456688f15bc354bef6d396e4a35f4f89d40bf2b7 # v31.8.2 if: ${{ !matrix.experimental-installer }} with: install_url: ${{ format('{0}/install', steps.installer-tarball-url.outputs.installer-url) }} From c8e24491c0da15406b736d11f5095a4c504d263c Mon Sep 17 00:00:00 2001 From: Farid Zakaria Date: Mon, 3 Nov 2025 14:19:54 -0800 Subject: [PATCH 141/213] Fix warning in kaitai spec Warning: ``` [39/483] Generating src/kaitai-struct-checks/kaitai-generated-sources with a custom command ../src/kaitai-struct-checks/nar.ksy: /types/padded_str/seq/1/encoding: warning: use canonical encoding name `ASCII` instead of `ascii` (see https://doc.kaitai.io/ksy_style_guide.html#encoding-name) ``` --- doc/manual/source/protocols/nix-archive/nar.ksy | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/manual/source/protocols/nix-archive/nar.ksy b/doc/manual/source/protocols/nix-archive/nar.ksy index 1cad09097..6a172b276 100644 --- a/doc/manual/source/protocols/nix-archive/nar.ksy +++ b/doc/manual/source/protocols/nix-archive/nar.ksy @@ -29,7 +29,7 @@ types: - id: body type: str size: len_str - encoding: 'ascii' + encoding: 'ASCII' - id: padding size: (8 - (len_str % 8)) % 8 From 389bcba97a1295440a24c887840b1af3e73f0dd3 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 30 Oct 2025 13:42:52 -0400 Subject: [PATCH 142/213] JSON Impl and schema for `BuildResult` --- doc/manual/package.nix | 1 + doc/manual/source/SUMMARY.md.in | 1 + .../source/protocols/json/build-result.md | 21 +++ doc/manual/source/protocols/json/meson.build | 1 + .../protocols/json/schema/build-result-v1 | 1 + .../json/schema/build-result-v1.yaml | 136 +++++++++++++++++ src/json-schema-checks/build-result | 1 + src/json-schema-checks/meson.build | 9 ++ src/json-schema-checks/package.nix | 1 + src/libstore-tests/build-result.cc | 108 +++++++++++++ .../data/build-result/not-deterministic.json | 9 ++ .../data/build-result/output-rejected.json | 9 ++ .../data/build-result/success.json | 23 +++ src/libstore-tests/meson.build | 1 + src/libstore/build-result.cc | 142 ++++++++++++++++++ .../include/nix/store/build-result.hh | 3 + 16 files changed, 467 insertions(+) create mode 100644 doc/manual/source/protocols/json/build-result.md create mode 120000 doc/manual/source/protocols/json/schema/build-result-v1 create mode 100644 doc/manual/source/protocols/json/schema/build-result-v1.yaml create mode 120000 src/json-schema-checks/build-result create mode 100644 src/libstore-tests/build-result.cc create mode 100644 src/libstore-tests/data/build-result/not-deterministic.json create mode 100644 src/libstore-tests/data/build-result/output-rejected.json create mode 100644 src/libstore-tests/data/build-result/success.json diff --git a/doc/manual/package.nix b/doc/manual/package.nix index e13c6f33d..343e40016 100644 --- a/doc/manual/package.nix +++ b/doc/manual/package.nix @@ -41,6 +41,7 @@ mkMesonDerivation (finalAttrs: { ../../src/libstore-tests/data/derived-path ../../src/libstore-tests/data/path-info ../../src/libstore-tests/data/nar-info + ../../src/libstore-tests/data/build-result # Too many different types of files to filter for now ../../doc/manual ./. diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 580076ece..5be3d6a90 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -127,6 +127,7 @@ - [Derivation](protocols/json/derivation.md) - [Deriving Path](protocols/json/deriving-path.md) - [Build Trace Entry](protocols/json/build-trace-entry.md) + - [Build Result](protocols/json/build-result.md) - [Serving Tarball Flakes](protocols/tarball-fetcher.md) - [Store Path Specification](protocols/store-path.md) - [Nix Archive (NAR) Format](protocols/nix-archive/index.md) diff --git a/doc/manual/source/protocols/json/build-result.md b/doc/manual/source/protocols/json/build-result.md new file mode 100644 index 000000000..527e7bcc0 --- /dev/null +++ b/doc/manual/source/protocols/json/build-result.md @@ -0,0 +1,21 @@ +{{#include build-result-v1-fixed.md}} + +## Examples + +### Successful build + +```json +{{#include schema/build-result-v1/success.json}} +``` + +### Failed build (output rejected) + +```json +{{#include schema/build-result-v1/output-rejected.json}} +``` + +### Failed build (non-deterministic) + +```json +{{#include schema/build-result-v1/not-deterministic.json}} +``` \ No newline at end of file diff --git a/doc/manual/source/protocols/json/meson.build b/doc/manual/source/protocols/json/meson.build index d8e94d68c..c56de49c7 100644 --- a/doc/manual/source/protocols/json/meson.build +++ b/doc/manual/source/protocols/json/meson.build @@ -16,6 +16,7 @@ schemas = [ 'derivation-v3', 'deriving-path-v1', 'build-trace-entry-v1', + 'build-result-v1', ] schema_files = files() diff --git a/doc/manual/source/protocols/json/schema/build-result-v1 b/doc/manual/source/protocols/json/schema/build-result-v1 new file mode 120000 index 000000000..a143d2c50 --- /dev/null +++ b/doc/manual/source/protocols/json/schema/build-result-v1 @@ -0,0 +1 @@ +../../../../../../src/libstore-tests/data/build-result \ No newline at end of file diff --git a/doc/manual/source/protocols/json/schema/build-result-v1.yaml b/doc/manual/source/protocols/json/schema/build-result-v1.yaml new file mode 100644 index 000000000..31f59a44d --- /dev/null +++ b/doc/manual/source/protocols/json/schema/build-result-v1.yaml @@ -0,0 +1,136 @@ +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/build-result-v1.json" +title: Build Result +description: | + This schema describes the JSON representation of Nix's `BuildResult` type, which represents the result of building a derivation or substituting store paths. + + Build results can represent either successful builds (with built outputs) or various types of failures. + +oneOf: + - "$ref": "#/$defs/success" + - "$ref": "#/$defs/failure" +type: object +required: + - success + - status +properties: + timesBuilt: + type: integer + minimum: 0 + title: Times built + description: | + How many times this build was performed. + + startTime: + type: integer + minimum: 0 + title: Start time + description: | + The start time of the build (or one of the rounds, if it was repeated), as a Unix timestamp. + + stopTime: + type: integer + minimum: 0 + title: Stop time + description: | + The stop time of the build (or one of the rounds, if it was repeated), as a Unix timestamp. + + cpuUser: + type: integer + minimum: 0 + title: User CPU time + description: | + User CPU time the build took, in microseconds. + + cpuSystem: + type: integer + minimum: 0 + title: System CPU time + description: | + System CPU time the build took, in microseconds. + +"$defs": + success: + type: object + title: Successful Build Result + description: | + Represents a successful build with built outputs. + required: + - success + - status + - builtOutputs + properties: + success: + const: true + title: Success indicator + description: | + Always true for successful build results. + + status: + type: string + title: Success status + description: | + Status string for successful builds. + enum: + - "Built" + - "Substituted" + - "AlreadyValid" + - "ResolvesToAlreadyValid" + + builtOutputs: + type: object + title: Built outputs + description: | + A mapping from output names to their build trace entries. + additionalProperties: + "$ref": "build-trace-entry-v1.yaml" + + failure: + type: object + title: Failed Build Result + description: | + Represents a failed build with error information. + required: + - success + - status + - errorMsg + properties: + success: + const: false + title: Success indicator + description: | + Always false for failed build results. + + status: + type: string + title: Failure status + description: | + Status string for failed builds. + enum: + - "PermanentFailure" + - "InputRejected" + - "OutputRejected" + - "TransientFailure" + - "CachedFailure" + - "TimedOut" + - "MiscFailure" + - "DependencyFailed" + - "LogLimitExceeded" + - "NotDeterministic" + - "NoSubstituters" + - "HashMismatch" + + errorMsg: + type: string + title: Error message + description: | + Information about the error if the build failed. + + isNonDeterministic: + type: boolean + title: Non-deterministic flag + description: | + If timesBuilt > 1, whether some builds did not produce the same result. + + Note that 'isNonDeterministic = false' does not mean the build is deterministic, + just that we don't have evidence of non-determinism. diff --git a/src/json-schema-checks/build-result b/src/json-schema-checks/build-result new file mode 120000 index 000000000..8010d0fdd --- /dev/null +++ b/src/json-schema-checks/build-result @@ -0,0 +1 @@ +../../src/libstore-tests/data/build-result \ No newline at end of file diff --git a/src/json-schema-checks/meson.build b/src/json-schema-checks/meson.build index c2c7fbff4..65a2651b7 100644 --- a/src/json-schema-checks/meson.build +++ b/src/json-schema-checks/meson.build @@ -150,6 +150,15 @@ schemas += [ 'impure.json', ], }, + { + 'stem' : 'build-result', + 'schema' : schema_dir / 'build-result-v1.yaml', + 'files' : [ + 'success.json', + 'output-rejected.json', + 'not-deterministic.json', + ], + }, # Match exact variant { 'stem' : 'store-object-info', diff --git a/src/json-schema-checks/package.nix b/src/json-schema-checks/package.nix index 057a6e85b..5365fe75e 100644 --- a/src/json-schema-checks/package.nix +++ b/src/json-schema-checks/package.nix @@ -28,6 +28,7 @@ mkMesonDerivation (finalAttrs: { ../../src/libstore-tests/data/derived-path ../../src/libstore-tests/data/path-info ../../src/libstore-tests/data/nar-info + ../../src/libstore-tests/data/build-result ./. ]; diff --git a/src/libstore-tests/build-result.cc b/src/libstore-tests/build-result.cc new file mode 100644 index 000000000..85e799c2a --- /dev/null +++ b/src/libstore-tests/build-result.cc @@ -0,0 +1,108 @@ +#include + +#include "nix/store/build-result.hh" +#include "nix/util/tests/json-characterization.hh" + +namespace nix { + +class BuildResultTest : public virtual CharacterizationTest +{ + std::filesystem::path unitTestData = getUnitTestData() / "build-result"; + +public: + std::filesystem::path goldenMaster(std::string_view testStem) const override + { + return unitTestData / testStem; + } +}; + +using nlohmann::json; + +struct BuildResultJsonTest : BuildResultTest, + JsonCharacterizationTest, + ::testing::WithParamInterface> +{}; + +TEST_P(BuildResultJsonTest, from_json) +{ + auto & [name, expected] = GetParam(); + readJsonTest(name, expected); +} + +TEST_P(BuildResultJsonTest, to_json) +{ + auto & [name, value] = GetParam(); + writeJsonTest(name, value); +} + +using namespace std::literals::chrono_literals; + +INSTANTIATE_TEST_SUITE_P( + BuildResultJSON, + BuildResultJsonTest, + ::testing::Values( + std::pair{ + "not-deterministic", + BuildResult{ + .inner{BuildResult::Failure{ + .status = BuildResult::Failure::NotDeterministic, + .errorMsg = "no idea why", + .isNonDeterministic = false, // Note: This field is separate from the status + }}, + .timesBuilt = 1, + }, + }, + std::pair{ + "output-rejected", + BuildResult{ + .inner{BuildResult::Failure{ + .status = BuildResult::Failure::OutputRejected, + .errorMsg = "no idea why", + .isNonDeterministic = false, + }}, + .timesBuilt = 3, + .startTime = 30, + .stopTime = 50, + }, + }, + std::pair{ + "success", + BuildResult{ + .inner{BuildResult::Success{ + .status = BuildResult::Success::Built, + .builtOutputs{ + { + "foo", + { + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo"}, + }, + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "foo", + }, + }, + }, + { + "bar", + { + { + .outPath = StorePath{"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar"}, + }, + DrvOutput{ + .drvHash = Hash::parseSRI("sha256-b4afnqKCO9oWXgYHb9DeQ2berSwOjS27rSd9TxXDc/U="), + .outputName = "bar", + }, + }, + }, + }, + }}, + .timesBuilt = 3, + .startTime = 30, + .stopTime = 50, + .cpuUser = std::chrono::microseconds(500s), + .cpuSystem = std::chrono::microseconds(604s), + }, + })); + +} // namespace nix diff --git a/src/libstore-tests/data/build-result/not-deterministic.json b/src/libstore-tests/data/build-result/not-deterministic.json new file mode 100644 index 000000000..c24a15795 --- /dev/null +++ b/src/libstore-tests/data/build-result/not-deterministic.json @@ -0,0 +1,9 @@ +{ + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "NotDeterministic", + "stopTime": 0, + "success": false, + "timesBuilt": 1 +} diff --git a/src/libstore-tests/data/build-result/output-rejected.json b/src/libstore-tests/data/build-result/output-rejected.json new file mode 100644 index 000000000..9494bf4ec --- /dev/null +++ b/src/libstore-tests/data/build-result/output-rejected.json @@ -0,0 +1,9 @@ +{ + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 30, + "status": "OutputRejected", + "stopTime": 50, + "success": false, + "timesBuilt": 3 +} diff --git a/src/libstore-tests/data/build-result/success.json b/src/libstore-tests/data/build-result/success.json new file mode 100644 index 000000000..4baadb547 --- /dev/null +++ b/src/libstore-tests/data/build-result/success.json @@ -0,0 +1,23 @@ +{ + "builtOutputs": { + "bar": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!bar", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "signatures": [] + }, + "foo": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!foo", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + } + }, + "cpuSystem": 604000000, + "cpuUser": 500000000, + "startTime": 30, + "status": "Built", + "stopTime": 50, + "success": true, + "timesBuilt": 3 +} diff --git a/src/libstore-tests/meson.build b/src/libstore-tests/meson.build index 4d464ad89..f76df8bcb 100644 --- a/src/libstore-tests/meson.build +++ b/src/libstore-tests/meson.build @@ -54,6 +54,7 @@ deps_private += gtest subdir('nix-meson-build-support/common') sources = files( + 'build-result.cc', 'common-protocol.cc', 'content-address.cc', 'derivation-advanced-attrs.cc', diff --git a/src/libstore/build-result.cc b/src/libstore/build-result.cc index ecbd27b49..e3d9e9085 100644 --- a/src/libstore/build-result.cc +++ b/src/libstore/build-result.cc @@ -1,4 +1,6 @@ #include "nix/store/build-result.hh" +#include "nix/util/json-utils.hh" +#include namespace nix { @@ -11,4 +13,144 @@ std::strong_ordering BuildResult::Success::operator<=>(const BuildResult::Succes bool BuildResult::Failure::operator==(const BuildResult::Failure &) const noexcept = default; std::strong_ordering BuildResult::Failure::operator<=>(const BuildResult::Failure &) const noexcept = default; +static constexpr std::array, 4> successStatusStrings{{ +#define ENUM_ENTRY(e) {BuildResult::Success::e, #e} + ENUM_ENTRY(Built), + ENUM_ENTRY(Substituted), + ENUM_ENTRY(AlreadyValid), + ENUM_ENTRY(ResolvesToAlreadyValid), +#undef ENUM_ENTRY +}}; + +static std::string_view successStatusToString(BuildResult::Success::Status status) +{ + for (const auto & [enumVal, str] : successStatusStrings) { + if (enumVal == status) + return str; + } + throw Error("unknown success status: %d", static_cast(status)); +} + +static BuildResult::Success::Status successStatusFromString(std::string_view str) +{ + for (const auto & [enumVal, enumStr] : successStatusStrings) { + if (enumStr == str) + return enumVal; + } + throw Error("unknown built result success status '%s'", str); +} + +static constexpr std::array, 12> failureStatusStrings{{ +#define ENUM_ENTRY(e) {BuildResult::Failure::e, #e} + ENUM_ENTRY(PermanentFailure), + ENUM_ENTRY(InputRejected), + ENUM_ENTRY(OutputRejected), + ENUM_ENTRY(TransientFailure), + ENUM_ENTRY(CachedFailure), + ENUM_ENTRY(TimedOut), + ENUM_ENTRY(MiscFailure), + ENUM_ENTRY(DependencyFailed), + ENUM_ENTRY(LogLimitExceeded), + ENUM_ENTRY(NotDeterministic), + ENUM_ENTRY(NoSubstituters), + ENUM_ENTRY(HashMismatch), +#undef ENUM_ENTRY +}}; + +static std::string_view failureStatusToString(BuildResult::Failure::Status status) +{ + for (const auto & [enumVal, str] : failureStatusStrings) { + if (enumVal == status) + return str; + } + throw Error("unknown failure status: %d", static_cast(status)); +} + +static BuildResult::Failure::Status failureStatusFromString(std::string_view str) +{ + for (const auto & [enumVal, enumStr] : failureStatusStrings) { + if (enumStr == str) + return enumVal; + } + throw Error("unknown built result failure status '%s'", str); +} + } // namespace nix + +namespace nlohmann { + +using namespace nix; + +void adl_serializer::to_json(json & res, const BuildResult & br) +{ + res = json::object(); + + // Common fields + res["timesBuilt"] = br.timesBuilt; + res["startTime"] = br.startTime; + res["stopTime"] = br.stopTime; + + if (br.cpuUser.has_value()) { + res["cpuUser"] = br.cpuUser->count(); + } + if (br.cpuSystem.has_value()) { + res["cpuSystem"] = br.cpuSystem->count(); + } + + // Handle success or failure variant + std::visit( + overloaded{ + [&](const BuildResult::Success & success) { + res["success"] = true; + res["status"] = successStatusToString(success.status); + res["builtOutputs"] = success.builtOutputs; + }, + [&](const BuildResult::Failure & failure) { + res["success"] = false; + res["status"] = failureStatusToString(failure.status); + res["errorMsg"] = failure.errorMsg; + res["isNonDeterministic"] = failure.isNonDeterministic; + }, + }, + br.inner); +} + +BuildResult adl_serializer::from_json(const json & _json) +{ + auto & json = getObject(_json); + + BuildResult br; + + // Common fields + br.timesBuilt = getUnsigned(valueAt(json, "timesBuilt")); + br.startTime = getUnsigned(valueAt(json, "startTime")); + br.stopTime = getUnsigned(valueAt(json, "stopTime")); + + if (auto cpuUser = optionalValueAt(json, "cpuUser")) { + br.cpuUser = std::chrono::microseconds(getUnsigned(*cpuUser)); + } + if (auto cpuSystem = optionalValueAt(json, "cpuSystem")) { + br.cpuSystem = std::chrono::microseconds(getUnsigned(*cpuSystem)); + } + + // Determine success or failure based on success field + bool success = getBoolean(valueAt(json, "success")); + std::string statusStr = getString(valueAt(json, "status")); + + if (success) { + BuildResult::Success s; + s.status = successStatusFromString(statusStr); + s.builtOutputs = valueAt(json, "builtOutputs"); + br.inner = std::move(s); + } else { + BuildResult::Failure f; + f.status = failureStatusFromString(statusStr); + f.errorMsg = getString(valueAt(json, "errorMsg")); + f.isNonDeterministic = getBoolean(valueAt(json, "isNonDeterministic")); + br.inner = std::move(f); + } + + return br; +} + +} // namespace nlohmann diff --git a/src/libstore/include/nix/store/build-result.hh b/src/libstore/include/nix/store/build-result.hh index 0446c4038..4739232f8 100644 --- a/src/libstore/include/nix/store/build-result.hh +++ b/src/libstore/include/nix/store/build-result.hh @@ -7,6 +7,7 @@ #include "nix/store/derived-path.hh" #include "nix/store/realisation.hh" +#include "nix/util/json-impls.hh" namespace nix { @@ -175,3 +176,5 @@ struct KeyedBuildResult : BuildResult }; } // namespace nix + +JSON_IMPL(nix::BuildResult) From 469123eda19028ea784b78b6d21ae0d4e2c91ab3 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 15 Aug 2024 14:10:13 +0200 Subject: [PATCH 143/213] doc: Check link fragments with lychee --- flake.nix | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/flake.nix b/flake.nix index a70617b74..897889a71 100644 --- a/flake.nix +++ b/flake.nix @@ -320,7 +320,16 @@ checks = forAllSystems ( system: - (import ./ci/gha/tests { + let + pkgs = nixpkgsFor.${system}.native; + in + { + # https://nixos.org/manual/nixpkgs/stable/index.html#tester-lycheeLinkCheck + linkcheck = pkgs.testers.lycheeLinkCheck { + site = self.packages.${system}.nix-manual + "/share/doc/nix/manual"; + }; + } + // (import ./ci/gha/tests { inherit system; pkgs = nixpkgsFor.${system}.native; nixFlake = self; From ae15d4eaf395f02a6b08e75044d5b5d48e1cb12c Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Tue, 4 Nov 2025 00:18:51 +0100 Subject: [PATCH 144/213] Fix links in the manual --- doc/manual/anchors.jq | 2 +- doc/manual/source/command-ref/nix-channel.md | 2 +- .../source/command-ref/nix-env/upgrade.md | 2 +- doc/manual/source/development/building.md | 4 ++-- doc/manual/source/development/testing.md | 2 +- doc/manual/source/glossary.md | 10 +++++----- .../source/language/advanced-attributes.md | 2 +- doc/manual/source/language/derivations.md | 2 +- doc/manual/source/language/identifiers.md | 2 +- doc/manual/source/language/index.md | 10 +++++----- doc/manual/source/language/string-context.md | 2 +- .../source/language/string-interpolation.md | 2 +- doc/manual/source/language/syntax.md | 6 +++--- .../source/protocols/json/schema/hash-v1.yaml | 2 +- .../source/protocols/nix-archive/index.md | 2 +- doc/manual/source/release-notes/rl-2.18.md | 2 +- doc/manual/source/release-notes/rl-2.19.md | 4 ++-- doc/manual/source/release-notes/rl-2.23.md | 2 +- doc/manual/source/release-notes/rl-2.24.md | 2 +- doc/manual/source/store/building.md | 2 +- doc/manual/source/store/derivation/index.md | 2 +- .../source/store/derivation/outputs/index.md | 2 +- .../store/store-object/content-address.md | 4 ++-- doc/manual/source/store/store-path.md | 2 +- src/libexpr/primops.cc | 20 +++++++++---------- src/libexpr/primops/context.cc | 4 ++-- src/libstore/include/nix/store/globals.hh | 2 +- src/libstore/include/nix/store/local-store.hh | 2 +- src/libutil/experimental-features.cc | 2 +- src/nix/flake.md | 2 +- 30 files changed, 53 insertions(+), 53 deletions(-) diff --git a/doc/manual/anchors.jq b/doc/manual/anchors.jq index 72309779c..4ee2bc130 100755 --- a/doc/manual/anchors.jq +++ b/doc/manual/anchors.jq @@ -3,7 +3,7 @@ def transform_anchors_html: - . | gsub($empty_anchor_regex; "") + . | gsub($empty_anchor_regex; "") | gsub($anchor_regex; "" + .text + ""); diff --git a/doc/manual/source/command-ref/nix-channel.md b/doc/manual/source/command-ref/nix-channel.md index ed9cbb41f..3d02a7d40 100644 --- a/doc/manual/source/command-ref/nix-channel.md +++ b/doc/manual/source/command-ref/nix-channel.md @@ -14,7 +14,7 @@ The moving parts of channels are: - The official channels listed at - The user-specific list of [subscribed channels](#subscribed-channels) - The [downloaded channel contents](#channels) -- The [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path), set with the [`-I` option](#opt-i) or the [`NIX_PATH` environment variable](#env-NIX_PATH) +- The [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path), set with the [`-I` option](#opt-I) or the [`NIX_PATH` environment variable](#env-NIX_PATH) > **Note** > diff --git a/doc/manual/source/command-ref/nix-env/upgrade.md b/doc/manual/source/command-ref/nix-env/upgrade.md index 2779363c3..bf4c1a8ed 100644 --- a/doc/manual/source/command-ref/nix-env/upgrade.md +++ b/doc/manual/source/command-ref/nix-env/upgrade.md @@ -22,7 +22,7 @@ left untouched; this is not an error. It is also not an error if an element of *args* matches no installed derivations. For a description of how *args* is mapped to a set of store paths, see -[`--install`](#operation---install). If *args* describes multiple +[`--install`](./install.md). If *args* describes multiple store paths with the same symbolic name, only the one with the highest version is installed. diff --git a/doc/manual/source/development/building.md b/doc/manual/source/development/building.md index 889d81d80..eb65a7247 100644 --- a/doc/manual/source/development/building.md +++ b/doc/manual/source/development/building.md @@ -66,7 +66,7 @@ You can also build Nix for one of the [supported platforms](#platforms). This section assumes you are using Nix with the [`flakes`] and [`nix-command`] experimental features enabled. [`flakes`]: @docroot@/development/experimental-features.md#xp-feature-flakes -[`nix-command`]: @docroot@/development/experimental-features.md#xp-nix-command +[`nix-command`]: @docroot@/development/experimental-features.md#xp-feature-nix-command To build all dependencies and start a shell in which all environment variables are set up so that those dependencies can be found: @@ -256,7 +256,7 @@ You can use any of the other supported environments in place of `nix-cli-ccacheS ## Editor integration The `clangd` LSP server is installed by default on the `clang`-based `devShell`s. -See [supported compilation environments](#compilation-environments) and instructions how to set up a shell [with flakes](#nix-with-flakes) or in [classic Nix](#classic-nix). +See [supported compilation environments](#compilation-environments) and instructions how to set up a shell [with flakes](#building-nix-with-flakes) or in [classic Nix](#building-nix). To use the LSP with your editor, you will want a `compile_commands.json` file telling `clangd` how we are compiling the code. Meson's configure always produces this inside the build directory. diff --git a/doc/manual/source/development/testing.md b/doc/manual/source/development/testing.md index c0b130155..7c2cbbb5d 100644 --- a/doc/manual/source/development/testing.md +++ b/doc/manual/source/development/testing.md @@ -119,7 +119,7 @@ This will: 3. Stop the program when the test fails, allowing the user to then issue arbitrary commands to GDB. -### Characterisation testing { #characaterisation-testing-unit } +### Characterisation testing { #characterisation-testing-unit } See [functional characterisation testing](#characterisation-testing-functional) for a broader discussion of characterisation testing. diff --git a/doc/manual/source/glossary.md b/doc/manual/source/glossary.md index e6a294e7d..502e6d4de 100644 --- a/doc/manual/source/glossary.md +++ b/doc/manual/source/glossary.md @@ -208,7 +208,7 @@ - [impure derivation]{#gloss-impure-derivation} - [An experimental feature](#@docroot@/development/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure, + [An experimental feature](@docroot@/development/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure, so that they are always rebuilt, and their outputs not reused by subsequent calls to realise them. - [Nix database]{#gloss-nix-database} @@ -279,7 +279,7 @@ See [References](@docroot@/store/store-object.md#references) for details. -- [referrer]{#gloss-reference} +- [referrer]{#gloss-referrer} A reversed edge from one [store object] to another. @@ -367,8 +367,8 @@ Nix represents files as [file system objects][file system object], and how they belong together is encoded as [references][reference] between [store objects][store object] that contain these file system objects. - The [Nix language] allows denoting packages in terms of [attribute sets](@docroot@/language/types.md#attribute-set) containing: - - attributes that refer to the files of a package, typically in the form of [derivation outputs](#output), + The [Nix language] allows denoting packages in terms of [attribute sets](@docroot@/language/types.md#type-attrs) containing: + - attributes that refer to the files of a package, typically in the form of [derivation outputs](#gloss-output), - attributes with metadata, such as information about how the package is supposed to be used. The exact shape of these attribute sets is up to convention. @@ -383,7 +383,7 @@ [string]: ./language/types.md#type-string [path]: ./language/types.md#type-path - [attribute name]: ./language/types.md#attribute-set + [attribute name]: ./language/types.md#type-attrs - [base directory]{#gloss-base-directory} diff --git a/doc/manual/source/language/advanced-attributes.md b/doc/manual/source/language/advanced-attributes.md index c9d64f060..f0b1a4c73 100644 --- a/doc/manual/source/language/advanced-attributes.md +++ b/doc/manual/source/language/advanced-attributes.md @@ -333,7 +333,7 @@ Here is more information on the `output*` attributes, and what values they may b `outputHashAlgo` can only be `null` when `outputHash` follows the SRI format, because in that case the choice of hash algorithm is determined by `outputHash`. - - [`outputHash`]{#adv-attr-outputHashAlgo}; [`outputHash`]{#adv-attr-outputHashMode} + - [`outputHash`]{#adv-attr-outputHash} This will specify the output hash of the single output of a [fixed-output derivation]. diff --git a/doc/manual/source/language/derivations.md b/doc/manual/source/language/derivations.md index 43eec680b..2403183fc 100644 --- a/doc/manual/source/language/derivations.md +++ b/doc/manual/source/language/derivations.md @@ -16,7 +16,7 @@ It outputs an attribute set, and produces a [store derivation] as a side effect - [`name`]{#attr-name} ([String](@docroot@/language/types.md#type-string)) A symbolic name for the derivation. - See [derivation outputs](@docroot@/store/derivation/index.md#outputs) for what this is affects. + See [derivation outputs](@docroot@/store/derivation/outputs/index.md#outputs) for what this is affects. [store path]: @docroot@/store/store-path.md diff --git a/doc/manual/source/language/identifiers.md b/doc/manual/source/language/identifiers.md index 584a2f861..67bb1eeec 100644 --- a/doc/manual/source/language/identifiers.md +++ b/doc/manual/source/language/identifiers.md @@ -16,7 +16,7 @@ An *identifier* is an [ASCII](https://en.wikipedia.org/wiki/ASCII) character seq # Names -A *name* can be written as an [identifier](#identifier) or a [string literal](./string-literals.md). +A *name* can be written as an [identifier](#identifiers) or a [string literal](./string-literals.md). > **Syntax** > diff --git a/doc/manual/source/language/index.md b/doc/manual/source/language/index.md index 1eb14e96d..116f928dc 100644 --- a/doc/manual/source/language/index.md +++ b/doc/manual/source/language/index.md @@ -137,7 +137,7 @@ This is an incomplete overview of language features, by example. - [Booleans](@docroot@/language/types.md#type-boolean) + [Booleans](@docroot@/language/types.md#type-bool) @@ -245,7 +245,7 @@ This is an incomplete overview of language features, by example. - An [attribute set](@docroot@/language/types.md#attribute-set) with attributes named `x` and `y` + An [attribute set](@docroot@/language/types.md#type-attrs) with attributes named `x` and `y` @@ -285,7 +285,7 @@ This is an incomplete overview of language features, by example. - [Lists](@docroot@/language/types.md#list) with three elements. + [Lists](@docroot@/language/types.md#type-list) with three elements. @@ -369,7 +369,7 @@ This is an incomplete overview of language features, by example. - [Attribute selection](@docroot@/language/types.md#attribute-set) (evaluates to `1`) + [Attribute selection](@docroot@/language/types.md#type-attrs) (evaluates to `1`) @@ -381,7 +381,7 @@ This is an incomplete overview of language features, by example. - [Attribute selection](@docroot@/language/types.md#attribute-set) with default (evaluates to `3`) + [Attribute selection](@docroot@/language/types.md#type-attrs) with default (evaluates to `3`) diff --git a/doc/manual/source/language/string-context.md b/doc/manual/source/language/string-context.md index 0d8fcdefa..65c59d865 100644 --- a/doc/manual/source/language/string-context.md +++ b/doc/manual/source/language/string-context.md @@ -111,7 +111,7 @@ It creates an [attribute set] representing the string context, which can be insp [`builtins.hasContext`]: ./builtins.md#builtins-hasContext [`builtins.getContext`]: ./builtins.md#builtins-getContext -[attribute set]: ./types.md#attribute-set +[attribute set]: ./types.md#type-attrs ## Clearing string contexts diff --git a/doc/manual/source/language/string-interpolation.md b/doc/manual/source/language/string-interpolation.md index a503d5f04..8e25d2b63 100644 --- a/doc/manual/source/language/string-interpolation.md +++ b/doc/manual/source/language/string-interpolation.md @@ -6,7 +6,7 @@ Such a construct is called *interpolated string*, and the expression inside is a [string]: ./types.md#type-string [path]: ./types.md#type-path -[attribute set]: ./types.md#attribute-set +[attribute set]: ./types.md#type-attrs > **Syntax** > diff --git a/doc/manual/source/language/syntax.md b/doc/manual/source/language/syntax.md index 85162db74..b127aca14 100644 --- a/doc/manual/source/language/syntax.md +++ b/doc/manual/source/language/syntax.md @@ -51,7 +51,7 @@ See [String literals](string-literals.md). Path literals can also include [string interpolation], besides being [interpolated into other expressions]. - [interpolated into other expressions]: ./string-interpolation.md#interpolated-expressions + [interpolated into other expressions]: ./string-interpolation.md#interpolated-expression At least one slash (`/`) must appear *before* any interpolated expression for the result to be recognized as a path. @@ -235,7 +235,7 @@ of object-oriented programming, for example. ## Recursive sets -Recursive sets are like normal [attribute sets](./types.md#attribute-set), but the attributes can refer to each other. +Recursive sets are like normal [attribute sets](./types.md#type-attrs), but the attributes can refer to each other. > *rec-attrset* = `rec {` [ *name* `=` *expr* `;` `]`... `}` @@ -287,7 +287,7 @@ This evaluates to `"foobar"`. ## Inheriting attributes -When defining an [attribute set](./types.md#attribute-set) or in a [let-expression](#let-expressions) it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes). +When defining an [attribute set](./types.md#type-attrs) or in a [let-expression](#let-expressions) it is often convenient to copy variables from the surrounding lexical scope (e.g., when you want to propagate attributes). This can be shortened using the `inherit` keyword. Example: diff --git a/doc/manual/source/protocols/json/schema/hash-v1.yaml b/doc/manual/source/protocols/json/schema/hash-v1.yaml index 316fb6d73..821546dee 100644 --- a/doc/manual/source/protocols/json/schema/hash-v1.yaml +++ b/doc/manual/source/protocols/json/schema/hash-v1.yaml @@ -51,4 +51,4 @@ additionalProperties: false description: | The hash algorithm used to compute the hash value. - `blake3` is currently experimental and requires the [`blake-hashing`](@docroot@/development/experimental-features.md#xp-feature-blake-hashing) experimental feature. + `blake3` is currently experimental and requires the [`blake-hashing`](@docroot@/development/experimental-features.md#xp-feature-blake3-hashes) experimental feature. diff --git a/doc/manual/source/protocols/nix-archive/index.md b/doc/manual/source/protocols/nix-archive/index.md index 4d25f63e2..bd2a8e833 100644 --- a/doc/manual/source/protocols/nix-archive/index.md +++ b/doc/manual/source/protocols/nix-archive/index.md @@ -4,7 +4,7 @@ This is the complete specification of the [Nix Archive] format. The Nix Archive format closely follows the abstract specification of a [file system object] tree, because it is designed to serialize exactly that data structure. -[Nix Archive]: @docroot@/store/file-system-object/content-address.md#nix-archive +[Nix Archive]: @docroot@/store/file-system-object/content-address.md#serial-nix-archive [file system object]: @docroot@/store/file-system-object.md The format of this specification is close to [Extended Backus–Naur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), with the exception of the `str(..)` function / parameterized rule, which length-prefixes and pads strings. diff --git a/doc/manual/source/release-notes/rl-2.18.md b/doc/manual/source/release-notes/rl-2.18.md index eb26fc9e7..71b25f408 100644 --- a/doc/manual/source/release-notes/rl-2.18.md +++ b/doc/manual/source/release-notes/rl-2.18.md @@ -13,7 +13,7 @@ - The `discard-references` feature has been stabilized. This means that the - [unsafeDiscardReferences](@docroot@/development/experimental-features.md#xp-feature-discard-references) + [unsafeDiscardReferences](@docroot@/language/advanced-attributes.md#adv-attr-unsafeDiscardReferences) attribute is no longer guarded by an experimental flag and can be used freely. diff --git a/doc/manual/source/release-notes/rl-2.19.md b/doc/manual/source/release-notes/rl-2.19.md index 06c704324..04f8c9c28 100644 --- a/doc/manual/source/release-notes/rl-2.19.md +++ b/doc/manual/source/release-notes/rl-2.19.md @@ -17,8 +17,8 @@ - `nix-shell` shebang lines now support single-quoted arguments. -- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/development/experimental-features.md#xp-fetch-tree). - This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/development/experimental-features.md#xp-fetch-tree). +- `builtins.fetchTree` is now its own experimental feature, [`fetch-tree`](@docroot@/development/experimental-features.md#xp-feature-fetch-tree). + This allows stabilising it independently of the rest of what is encompassed by [`flakes`](@docroot@/development/experimental-features.md#xp-feature-flakes). - The interface for creating and updating lock files has been overhauled: diff --git a/doc/manual/source/release-notes/rl-2.23.md b/doc/manual/source/release-notes/rl-2.23.md index e6b0e9ffc..b358a0fdc 100644 --- a/doc/manual/source/release-notes/rl-2.23.md +++ b/doc/manual/source/release-notes/rl-2.23.md @@ -14,7 +14,7 @@ - Modify `nix derivation {add,show}` JSON format [#9866](https://github.com/NixOS/nix/issues/9866) [#10722](https://github.com/NixOS/nix/pull/10722) - The JSON format for derivations has been slightly revised to better conform to our [JSON guidelines](@docroot@/development/cli-guideline.md#returning-future-proof-json). + The JSON format for derivations has been slightly revised to better conform to our [JSON guidelines](@docroot@/development/json-guideline.md). In particular, the hash algorithm and content addressing method of content-addressed derivation outputs are now separated into two fields `hashAlgo` and `method`, rather than one field with an arcane `:`-separated format. diff --git a/doc/manual/source/release-notes/rl-2.24.md b/doc/manual/source/release-notes/rl-2.24.md index d4af3cb51..e9b46bb22 100644 --- a/doc/manual/source/release-notes/rl-2.24.md +++ b/doc/manual/source/release-notes/rl-2.24.md @@ -93,7 +93,7 @@ - Support unit prefixes in configuration settings [#10668](https://github.com/NixOS/nix/pull/10668) - Configuration settings in Nix now support unit prefixes, allowing for more intuitive and readable configurations. For example, you can now specify [`--min-free 1G`](@docroot@/command-ref/opt-common.md#opt-min-free) to set the minimum free space to 1 gigabyte. + Configuration settings in Nix now support unit prefixes, allowing for more intuitive and readable configurations. For example, you can now specify [`--min-free 1G`](@docroot@/command-ref/conf-file.md#conf-min-free) to set the minimum free space to 1 gigabyte. This enhancement was extracted from [#7851](https://github.com/NixOS/nix/pull/7851) and is also useful for PR [#10661](https://github.com/NixOS/nix/pull/10661). diff --git a/doc/manual/source/store/building.md b/doc/manual/source/store/building.md index dbfe6b5ca..f2d470e99 100644 --- a/doc/manual/source/store/building.md +++ b/doc/manual/source/store/building.md @@ -8,7 +8,7 @@ - Once this is done, the derivation is *normalized*, replacing each input deriving path with its store path, which we now know from realising the input. -## Builder Execution +## Builder Execution {#builder-execution} The [`builder`](./derivation/index.md#builder) is executed as follows: diff --git a/doc/manual/source/store/derivation/index.md b/doc/manual/source/store/derivation/index.md index 61c5335ff..670f3b2bd 100644 --- a/doc/manual/source/store/derivation/index.md +++ b/doc/manual/source/store/derivation/index.md @@ -102,7 +102,7 @@ But rather than somehow scanning all the other fields for inputs, Nix requires t ### System {#system} -The system type on which the [`builder`](#attr-builder) executable is meant to be run. +The system type on which the [`builder`](#builder) executable is meant to be run. A necessary condition for Nix to schedule a given derivation on some [Nix instance] is for the "system" of that derivation to match that instance's [`system` configuration option] or [`extra-platforms` configuration option]. diff --git a/doc/manual/source/store/derivation/outputs/index.md b/doc/manual/source/store/derivation/outputs/index.md index 0683f5703..ca2ce6665 100644 --- a/doc/manual/source/store/derivation/outputs/index.md +++ b/doc/manual/source/store/derivation/outputs/index.md @@ -43,7 +43,7 @@ In particular, the specification decides: - if the content is content-addressed, how is it content addressed -- if the content is content-addressed, [what is its content address](./content-address.md#fixed-content-addressing) (and thus what is its [store path]) +- if the content is content-addressed, [what is its content address](./content-address.md#fixed) (and thus what is its [store path]) ## Types of derivations diff --git a/doc/manual/source/store/store-object/content-address.md b/doc/manual/source/store/store-object/content-address.md index 36e841fa3..7834ac510 100644 --- a/doc/manual/source/store/store-object/content-address.md +++ b/doc/manual/source/store/store-object/content-address.md @@ -1,7 +1,7 @@ # Content-Addressing Store Objects Just [like][fso-ca] [File System Objects][File System Object], -[Store Objects][Store Object] can also be [content-addressed](@docroot@/glossary.md#gloss-content-addressed), +[Store Objects][Store Object] can also be [content-addressed](@docroot@/glossary.md#gloss-content-address), unless they are [input-addressed](@docroot@/glossary.md#gloss-input-addressed-store-object). For store objects, the content address we produce will take the form of a [Store Path] rather than regular hash. @@ -107,7 +107,7 @@ References (to other store objects and self-references alike) are supported so l > > This method is part of the [`git-hashing`][xp-feature-git-hashing] experimental feature. -This uses the corresponding [Git](../file-system-object/content-address.md#serial-git) method of file system object content addressing. +This uses the corresponding [Git](../file-system-object/content-address.md#git) method of file system object content addressing. References are not supported. diff --git a/doc/manual/source/store/store-path.md b/doc/manual/source/store/store-path.md index beec2389b..4061f3653 100644 --- a/doc/manual/source/store/store-path.md +++ b/doc/manual/source/store/store-path.md @@ -6,7 +6,7 @@ > > A rendered store path -Nix implements references to [store objects](./index.md#store-object) as *store paths*. +Nix implements references to [store objects](./store-object.md) as *store paths*. Think of a store path as an [opaque], [unique identifier]: The only way to obtain store path is by adding or building store objects. diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 96e79fedd..d1aae64fa 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -825,10 +825,10 @@ static RegisterPrimOp primop_genericClosure( - [Int](@docroot@/language/types.md#type-int) - [Float](@docroot@/language/types.md#type-float) - - [Boolean](@docroot@/language/types.md#type-boolean) + - [Boolean](@docroot@/language/types.md#type-bool) - [String](@docroot@/language/types.md#type-string) - [Path](@docroot@/language/types.md#type-path) - - [List](@docroot@/language/types.md#list) + - [List](@docroot@/language/types.md#type-list) The result is produced by calling the `operator` on each `item` that has not been called yet, including newly added items, until no new items are added. Items are compared by their `key` attribute. @@ -2103,7 +2103,7 @@ static RegisterPrimOp primop_findFile( builtins.findFile builtins.nixPath "nixpkgs" ``` - A search path is represented as a list of [attribute sets](./types.md#attribute-set) with two attributes: + A search path is represented as a list of [attribute sets](./types.md#type-attrs) with two attributes: - `prefix` is a relative path. - `path` denotes a file system location @@ -2395,7 +2395,7 @@ static RegisterPrimOp primop_outputOf({ returns an input placeholder for the output of the output of `myDrv`. - This primop corresponds to the `^` sigil for [deriving paths](@docroot@/glossary.md#gloss-deriving-paths), e.g. as part of installable syntax on the command line. + This primop corresponds to the `^` sigil for [deriving paths](@docroot@/glossary.md#gloss-deriving-path), e.g. as part of installable syntax on the command line. )", .fun = prim_outputOf, .experimentalFeature = Xp::DynamicDerivations, @@ -4966,7 +4966,7 @@ static RegisterPrimOp primop_compareVersions({ version *s1* is older than version *s2*, `0` if they are the same, and `1` if *s1* is newer than *s2*. The version comparison algorithm is the same as the one used by [`nix-env - -u`](../command-ref/nix-env.md#operation---upgrade). + -u`](../command-ref/nix-env/upgrade.md). )", .fun = prim_compareVersions, }); @@ -4995,7 +4995,7 @@ static RegisterPrimOp primop_splitVersion({ .doc = R"( Split a string representing a version into its components, by the same version splitting logic underlying the version comparison in - [`nix-env -u`](../command-ref/nix-env.md#operation---upgrade). + [`nix-env -u`](../command-ref/nix-env/upgrade.md). )", .fun = prim_splitVersion, }); @@ -5045,9 +5045,9 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) Primitive value. It can be returned by - [comparison operators](@docroot@/language/operators.md#Comparison) + [comparison operators](@docroot@/language/operators.md#comparison) and used in - [conditional expressions](@docroot@/language/syntax.md#Conditionals). + [conditional expressions](@docroot@/language/syntax.md#conditionals). The name `true` is not special, and can be shadowed: @@ -5068,9 +5068,9 @@ void EvalState::createBaseEnv(const EvalSettings & evalSettings) Primitive value. It can be returned by - [comparison operators](@docroot@/language/operators.md#Comparison) + [comparison operators](@docroot@/language/operators.md#comparison) and used in - [conditional expressions](@docroot@/language/syntax.md#Conditionals). + [conditional expressions](@docroot@/language/syntax.md#conditionals). The name `false` is not special, and can be shadowed: diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index 12b8ffdf9..8a9fe42e8 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -79,7 +79,7 @@ static RegisterPrimOp primop_unsafeDiscardOutputDependency( Create a copy of the given string where every [derivation deep](@docroot@/language/string-context.md#string-context-element-derivation-deep) string context element is turned into a - [constant](@docroot@/language/string-context.md#string-context-element-constant) + [constant](@docroot@/language/string-context.md#string-context-constant) string context element. This is the opposite of [`builtins.addDrvOutputDependencies`](#builtins-addDrvOutputDependencies). @@ -145,7 +145,7 @@ static RegisterPrimOp primop_addDrvOutputDependencies( .args = {"s"}, .doc = R"( Create a copy of the given string where a single - [constant](@docroot@/language/string-context.md#string-context-element-constant) + [constant](@docroot@/language/string-context.md#string-context-constant) string context element is turned into a [derivation deep](@docroot@/language/string-context.md#string-context-element-derivation-deep) string context element. diff --git a/src/libstore/include/nix/store/globals.hh b/src/libstore/include/nix/store/globals.hh index 8aa82c4a2..5ddfbee30 100644 --- a/src/libstore/include/nix/store/globals.hh +++ b/src/libstore/include/nix/store/globals.hh @@ -189,7 +189,7 @@ public: 0, "cores", R"( - Sets the value of the `NIX_BUILD_CORES` environment variable in the [invocation of the `builder` executable](@docroot@/language/derivations.md#builder-execution) of a derivation. + Sets the value of the `NIX_BUILD_CORES` environment variable in the [invocation of the `builder` executable](@docroot@/store/building.md#builder-execution) of a derivation. The `builder` executable can use this variable to control its own maximum amount of parallelism. diff --git a/doc/manual/source/protocols/json/meson.build b/doc/manual/source/protocols/json/meson.build index c56de49c7..c0b8416d7 100644 --- a/doc/manual/source/protocols/json/meson.build +++ b/doc/manual/source/protocols/json/meson.build @@ -13,7 +13,7 @@ schemas = [ 'content-address-v1', 'store-path-v1', 'store-object-info-v1', - 'derivation-v3', + 'derivation-v4', 'deriving-path-v1', 'build-trace-entry-v1', 'build-result-v1', diff --git a/doc/manual/source/protocols/json/schema/derivation-v3.yaml b/doc/manual/source/protocols/json/schema/derivation-v4.yaml similarity index 78% rename from doc/manual/source/protocols/json/schema/derivation-v3.yaml rename to doc/manual/source/protocols/json/schema/derivation-v4.yaml index fa68adcb1..2528f7502 100644 --- a/doc/manual/source/protocols/json/schema/derivation-v3.yaml +++ b/doc/manual/source/protocols/json/schema/derivation-v4.yaml @@ -1,8 +1,8 @@ "$schema": "http://json-schema.org/draft-04/schema" -"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/derivation-v3.json" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/derivation-v4.json" title: Derivation description: | - Experimental JSON representation of a Nix derivation (version 3). + Experimental JSON representation of a Nix derivation (version 4). This schema describes the JSON representation of Nix's `Derivation` type. @@ -17,8 +17,7 @@ required: - name - version - outputs - - inputSrcs - - inputDrvs + - inputs - system - builder - args @@ -32,10 +31,10 @@ properties: Used when calculating store paths for the derivation’s outputs. version: - const: 3 - title: Format version (must be 3) + const: 4 + title: Format version (must be 4) description: | - Must be `3`. + Must be `4`. This is a guard that allows us to continue evolving this format. The choice of `3` is fairly arbitrary, but corresponds to this informal version: @@ -47,6 +46,12 @@ properties: - Version 3: Drop store dir from store paths, just include base name. + - Version 4: Two cleanups, batched together to lesson churn: + + - Reorganize inputs into nested structure (`inputs.srcs` and `inputs.drvs`) + + - Use canonical content address JSON format for floating content addressed derivation outputs. + Note that while this format is experimental, the maintenance of versions is best-effort, and not promised to identify every change. outputs: @@ -70,47 +75,56 @@ properties: additionalProperties: "$ref": "#/$defs/output/overall" - inputSrcs: - type: array - title: Input source paths - description: | - List of store paths on which this derivation depends. - - > **Example** - > - > ```json - > "inputSrcs": [ - > "47y241wqdhac3jm5l7nv0x4975mb1975-separate-debug-info.sh", - > "56d0w71pjj9bdr363ym3wj1zkwyqq97j-fix-pop-var-context-error.patch" - > ] - > ``` - items: - $ref: "store-path-v1.yaml" - - inputDrvs: + inputs: type: object - title: Input derivations + title: Derivation inputs description: | - Mapping of derivation paths to lists of output names they provide. - - > **Example** - > - > ```json - > "inputDrvs": { - > "6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"], - > "fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"] - > } - > ``` - > - > specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`. - patternProperties: - "^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+\\.drv$": - title: Store Path + Input dependencies for the derivation, organized into source paths and derivation dependencies. + required: + - srcs + - drvs + properties: + srcs: + type: array + title: Input source paths description: | - A store path to a derivation, mapped to the outputs of that derivation. - oneOf: - - "$ref": "#/$defs/outputNames" - - "$ref": "#/$defs/dynamicOutputs" + List of store paths on which this derivation depends. + + > **Example** + > + > ```json + > "srcs": [ + > "47y241wqdhac3jm5l7nv0x4975mb1975-separate-debug-info.sh", + > "56d0w71pjj9bdr363ym3wj1zkwyqq97j-fix-pop-var-context-error.patch" + > ] + > ``` + items: + $ref: "store-path-v1.yaml" + drvs: + type: object + title: Input derivations + description: | + Mapping of derivation paths to lists of output names they provide. + + > **Example** + > + > ```json + > "drvs": { + > "6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"], + > "fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"] + > } + > ``` + > + > specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`. + patternProperties: + "^[0123456789abcdfghijklmnpqrsvwxyz]{32}-.+\\.drv$": + title: Store Path + description: | + A store path to a derivation, mapped to the outputs of that derivation. + oneOf: + - "$ref": "#/$defs/outputNames" + - "$ref": "#/$defs/dynamicOutputs" + additionalProperties: false additionalProperties: false system: @@ -189,24 +203,18 @@ properties: The output is content-addressed, and the content-address is fixed in advance. See [Fixed-output content-addressing](@docroot@/store/derivation/outputs/content-address.md#fixed) for more details. - type: object + "$ref": "./content-address-v1.yaml" required: - method - - hashAlgo - hash properties: method: - "$ref": "./content-address-v1.yaml#/$defs/method" description: | Method of content addressing used for this output. - hashAlgo: - title: Hash algorithm - "$ref": "./hash-v1.yaml#/$defs/algorithm" hash: - type: string title: Expected hash value description: | - The expected content hash in base-16. + The expected content hash. additionalProperties: false caFloating: diff --git a/src/json-schema-checks/meson.build b/src/json-schema-checks/meson.build index 65a2651b7..fedacedeb 100644 --- a/src/json-schema-checks/meson.build +++ b/src/json-schema-checks/meson.build @@ -70,7 +70,7 @@ schemas += [ # Match overall { 'stem' : 'derivation', - 'schema' : schema_dir / 'derivation-v3.yaml', + 'schema' : schema_dir / 'derivation-v4.yaml', 'files' : [ 'dyn-dep-derivation.json', 'simple-derivation.json', @@ -78,7 +78,7 @@ schemas += [ }, { 'stem' : 'derivation', - 'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/overall', + 'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/overall', 'files' : [ 'output-caFixedFlat.json', 'output-caFixedNAR.json', @@ -92,14 +92,14 @@ schemas += [ # Match exact variant { 'stem' : 'derivation', - 'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/inputAddressed', + 'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/inputAddressed', 'files' : [ 'output-inputAddressed.json', ], }, { 'stem' : 'derivation', - 'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/caFixed', + 'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/caFixed', 'files' : [ 'output-caFixedFlat.json', 'output-caFixedNAR.json', @@ -108,21 +108,21 @@ schemas += [ }, { 'stem' : 'derivation', - 'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/caFloating', + 'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/caFloating', 'files' : [ 'output-caFloating.json', ], }, { 'stem' : 'derivation', - 'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/deferred', + 'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/deferred', 'files' : [ 'output-deferred.json', ], }, { 'stem' : 'derivation', - 'schema' : schema_dir / 'derivation-v3.yaml#/$defs/output/impure', + 'schema' : schema_dir / 'derivation-v4.yaml#/$defs/output/impure', 'files' : [ 'output-impure.json', ], diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json b/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json index eb4bd4f3d..781b4cb14 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-defaults.json @@ -12,8 +12,10 @@ "outputHashMode": "recursive", "system": "my-system" }, - "inputDrvs": {}, - "inputSrcs": [], + "inputs": { + "drvs": {}, + "srcs": [] + }, "name": "advanced-attributes-defaults", "outputs": { "out": { @@ -22,5 +24,5 @@ } }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json index 3a4a3079b..7437b51ef 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs-defaults.json @@ -8,8 +8,10 @@ "dev": "/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz", "out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9" }, - "inputDrvs": {}, - "inputSrcs": [], + "inputs": { + "drvs": {}, + "srcs": [] + }, "name": "advanced-attributes-structured-attrs-defaults", "outputs": { "dev": { @@ -33,5 +35,5 @@ "system": "my-system" }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json index b10355af7..2a4e70558 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json @@ -9,25 +9,27 @@ "dev": "/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz", "out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9" }, - "inputDrvs": { - "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] + "inputs": { + "drvs": { + "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + }, + "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + } }, - "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] - } + "srcs": [ + "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv" + ] }, - "inputSrcs": [ - "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv" - ], "name": "advanced-attributes-structured-attrs", "outputs": { "bin": { @@ -101,5 +103,5 @@ "system": "my-system" }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes.json b/src/libstore-tests/data/derivation/ca/advanced-attributes.json index d66882036..55dbe62e0 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes.json @@ -25,25 +25,27 @@ "requiredSystemFeatures": "rainbow uid-range", "system": "my-system" }, - "inputDrvs": { - "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] + "inputs": { + "drvs": { + "j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + }, + "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + } }, - "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] - } + "srcs": [ + "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv" + ] }, - "inputSrcs": [ - "qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv" - ], "name": "advanced-attributes", "outputs": { "out": { @@ -52,5 +54,5 @@ } }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ca/self-contained.json b/src/libstore-tests/data/derivation/ca/self-contained.json index 331beb7be..c05710140 100644 --- a/src/libstore-tests/data/derivation/ca/self-contained.json +++ b/src/libstore-tests/data/derivation/ca/self-contained.json @@ -10,8 +10,10 @@ "out": "/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9", "system": "x86_64-linux" }, - "inputDrvs": {}, - "inputSrcs": [], + "inputs": { + "drvs": {}, + "srcs": [] + }, "name": "myname", "outputs": { "out": { @@ -20,5 +22,5 @@ } }, "system": "x86_64-linux", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/dyn-dep-derivation.json b/src/libstore-tests/data/derivation/dyn-dep-derivation.json index 1a9f54c53..1793c5f2d 100644 --- a/src/libstore-tests/data/derivation/dyn-dep-derivation.json +++ b/src/libstore-tests/data/derivation/dyn-dep-derivation.json @@ -7,33 +7,35 @@ "env": { "BIG_BAD": "WOLF" }, - "inputDrvs": { - "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": { - "dynamicOutputs": { - "cat": { - "dynamicOutputs": {}, - "outputs": [ - "kitten" - ] + "inputs": { + "drvs": { + "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": { + "dynamicOutputs": { + "cat": { + "dynamicOutputs": {}, + "outputs": [ + "kitten" + ] + }, + "goose": { + "dynamicOutputs": {}, + "outputs": [ + "gosling" + ] + } }, - "goose": { - "dynamicOutputs": {}, - "outputs": [ - "gosling" - ] - } - }, - "outputs": [ - "cat", - "dog" - ] - } + "outputs": [ + "cat", + "dog" + ] + } + }, + "srcs": [ + "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1" + ] }, - "inputSrcs": [ - "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1" - ], "name": "dyn-dep-derivation", "outputs": {}, "system": "wasm-sel4", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.json b/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.json index 0fa543f21..898762123 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes-defaults.json @@ -10,8 +10,10 @@ "out": "/nix/store/1qsc7svv43m4dw2prh6mvyf7cai5czji-advanced-attributes-defaults", "system": "my-system" }, - "inputDrvs": {}, - "inputSrcs": [], + "inputs": { + "drvs": {}, + "srcs": [] + }, "name": "advanced-attributes-defaults", "outputs": { "out": { @@ -19,5 +21,5 @@ } }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.json b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.json index e02392ea1..c51095986 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs-defaults.json @@ -8,8 +8,10 @@ "dev": "/nix/store/8bazivnbipbyi569623skw5zm91z6kc2-advanced-attributes-structured-attrs-defaults-dev", "out": "/nix/store/f8f8nvnx32bxvyxyx2ff7akbvwhwd9dw-advanced-attributes-structured-attrs-defaults" }, - "inputDrvs": {}, - "inputSrcs": [], + "inputs": { + "drvs": {}, + "srcs": [] + }, "name": "advanced-attributes-structured-attrs-defaults", "outputs": { "dev": { @@ -29,5 +31,5 @@ "system": "my-system" }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json index 9230b06b6..e07d1294b 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json @@ -9,25 +9,27 @@ "dev": "/nix/store/wyfgwsdi8rs851wmy1xfzdxy7y5vrg5l-advanced-attributes-structured-attrs-dev", "out": "/nix/store/7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs" }, - "inputDrvs": { - "afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] + "inputs": { + "drvs": { + "afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + }, + "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + } }, - "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] - } + "srcs": [ + "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv" + ] }, - "inputSrcs": [ - "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv" - ], "name": "advanced-attributes-structured-attrs", "outputs": { "bin": { @@ -96,5 +98,5 @@ "system": "my-system" }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes.json b/src/libstore-tests/data/derivation/ia/advanced-attributes.json index ba5911c91..372b4fbb9 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes.json @@ -23,25 +23,27 @@ "requiredSystemFeatures": "rainbow uid-range", "system": "my-system" }, - "inputDrvs": { - "afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] + "inputs": { + "drvs": { + "afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + }, + "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv": { + "dynamicOutputs": {}, + "outputs": [ + "dev", + "out" + ] + } }, - "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv": { - "dynamicOutputs": {}, - "outputs": [ - "dev", - "out" - ] - } + "srcs": [ + "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv" + ] }, - "inputSrcs": [ - "vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv" - ], "name": "advanced-attributes", "outputs": { "out": { @@ -49,5 +51,5 @@ } }, "system": "my-system", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/data/derivation/output-caFixedFlat.json b/src/libstore-tests/data/derivation/output-caFixedFlat.json index e6a0123f6..9a38608b3 100644 --- a/src/libstore-tests/data/derivation/output-caFixedFlat.json +++ b/src/libstore-tests/data/derivation/output-caFixedFlat.json @@ -1,5 +1,8 @@ { - "hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f", - "hashAlgo": "sha256", + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8=" + }, "method": "flat" } diff --git a/src/libstore-tests/data/derivation/output-caFixedNAR.json b/src/libstore-tests/data/derivation/output-caFixedNAR.json index b57e065a9..767c605a3 100644 --- a/src/libstore-tests/data/derivation/output-caFixedNAR.json +++ b/src/libstore-tests/data/derivation/output-caFixedNAR.json @@ -1,5 +1,8 @@ { - "hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f", - "hashAlgo": "sha256", + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8=" + }, "method": "nar" } diff --git a/src/libstore-tests/data/derivation/output-caFixedText.json b/src/libstore-tests/data/derivation/output-caFixedText.json index 84778509e..a04f1ff2a 100644 --- a/src/libstore-tests/data/derivation/output-caFixedText.json +++ b/src/libstore-tests/data/derivation/output-caFixedText.json @@ -1,5 +1,8 @@ { - "hash": "894517c9163c896ec31a2adbd33c0681fd5f45b2c0ef08a64c92a03fb97f390f", - "hashAlgo": "sha256", + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "iUUXyRY8iW7DGirb0zwGgf1fRbLA7wimTJKgP7l/OQ8=" + }, "method": "text" } diff --git a/src/libstore-tests/data/derivation/simple-derivation.json b/src/libstore-tests/data/derivation/simple-derivation.json index 41a049aef..04129a096 100644 --- a/src/libstore-tests/data/derivation/simple-derivation.json +++ b/src/libstore-tests/data/derivation/simple-derivation.json @@ -7,20 +7,22 @@ "env": { "BIG_BAD": "WOLF" }, - "inputDrvs": { - "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": { - "dynamicOutputs": {}, - "outputs": [ - "cat", - "dog" - ] - } + "inputs": { + "drvs": { + "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep2.drv": { + "dynamicOutputs": {}, + "outputs": [ + "cat", + "dog" + ] + } + }, + "srcs": [ + "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1" + ] }, - "inputSrcs": [ - "c015dhfh5l0lp6wxyvdn7bmwhbbr6hr9-dep1" - ], "name": "simple-derivation", "outputs": {}, "system": "wasm-sel4", - "version": 3 + "version": 4 } diff --git a/src/libstore-tests/nix_api_store.cc b/src/libstore-tests/nix_api_store.cc index 228b8069f..bf411053a 100644 --- a/src/libstore-tests/nix_api_store.cc +++ b/src/libstore-tests/nix_api_store.cc @@ -636,7 +636,7 @@ TEST_F(NixApiStoreTestWithRealisedPath, nix_store_realise_output_ordering) auto outj_ph = nix::hashPlaceholder("outj"); std::string drvJson = R"({ - "version": 3, + "version": 4, "name": "multi-output-test", "system": ")" + nix::settings.thisSystem.get() + R"(", @@ -668,8 +668,10 @@ TEST_F(NixApiStoreTestWithRealisedPath, nix_store_realise_output_ordering) "outa": ")" + outa_ph + R"(" }, - "inputDrvs": {}, - "inputSrcs": [], + "inputs": { + "drvs": {}, + "srcs": [] + }, "outputs": { "outd": { "hashAlgo": "sha256", "method": "nar" }, "outf": { "hashAlgo": "sha256", "method": "nar" }, diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index e6ac08fd9..31ca167f9 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -1293,15 +1293,13 @@ void adl_serializer::to_json(json & res, const DerivationOutpu overloaded{ [&](const DerivationOutput::InputAddressed & doi) { res["path"] = doi.path; }, [&](const DerivationOutput::CAFixed & dof) { - /* it would be nice to output the path for user convenience, but - this would require us to know the store dir. */ + res = dof.ca; + // FIXME print refs? + /* it would be nice to output the path for user convenience, but + this would require us to know the store dir. */ #if 0 res["path"] = dof.path(store, drvName, outputName); #endif - res["method"] = std::string{dof.ca.method.render()}; - res["hashAlgo"] = printHashAlgo(dof.ca.hash.algo); - res["hash"] = dof.ca.hash.to_string(HashFormat::Base16, false); - // FIXME print refs? }, [&](const DerivationOutput::CAFloating & dof) { res["method"] = std::string{dof.method.render()}; @@ -1341,15 +1339,12 @@ adl_serializer::from_json(const json & _json, const Experiment }; } - else if (keys == (std::set{"method", "hashAlgo", "hash"})) { - auto [method, hashAlgo] = methodAlgo(); + else if (keys == (std::set{"method", "hash"})) { auto dof = DerivationOutput::CAFixed{ - .ca = - ContentAddress{ - .method = std::move(method), - .hash = Hash::parseNonSRIUnprefixed(getString(valueAt(json, "hash")), hashAlgo), - }, + .ca = static_cast(_json), }; + if (dof.ca.method == ContentAddressMethod::Raw::Text) + xpSettings.require(Xp::DynamicDerivations, "text-hashed derivation output in JSON"); /* We no longer produce this (denormalized) field (for the reasons described above), so we don't need to check it. */ #if 0 @@ -1392,7 +1387,7 @@ void adl_serializer::to_json(json & res, const Derivation & d) res["name"] = d.name; - res["version"] = 3; + res["version"] = 4; { nlohmann::json & outputsObj = res["outputs"]; @@ -1403,13 +1398,16 @@ void adl_serializer::to_json(json & res, const Derivation & d) } { - auto & inputsList = res["inputSrcs"]; - inputsList = nlohmann::json::array(); - for (auto & input : d.inputSrcs) - inputsList.emplace_back(input); - } + auto & inputsObj = res["inputs"]; + inputsObj = nlohmann::json::object(); + + { + auto & inputsList = inputsObj["srcs"]; + inputsList = nlohmann::json::array(); + for (auto & input : d.inputSrcs) + inputsList.emplace_back(input); + } - { auto doInput = [&](this const auto & doInput, const auto & inputNode) -> nlohmann::json { auto value = nlohmann::json::object(); value["outputs"] = inputNode.value; @@ -1421,12 +1419,11 @@ void adl_serializer::to_json(json & res, const Derivation & d) } return value; }; - { - auto & inputDrvsObj = res["inputDrvs"]; - inputDrvsObj = nlohmann::json::object(); - for (auto & [inputDrv, inputNode] : d.inputDrvs.map) { - inputDrvsObj[inputDrv.to_string()] = doInput(inputNode); - } + + auto & inputDrvsObj = inputsObj["drvs"]; + inputDrvsObj = nlohmann::json::object(); + for (auto & [inputDrv, inputNode] : d.inputDrvs.map) { + inputDrvsObj[inputDrv.to_string()] = doInput(inputNode); } } @@ -1449,8 +1446,8 @@ Derivation adl_serializer::from_json(const json & _json, const Exper res.name = getString(valueAt(json, "name")); - if (valueAt(json, "version") != 3) - throw Error("Only derivation format version 3 is currently supported."); + if (valueAt(json, "version") != 4) + throw Error("Only derivation format version 4 is currently supported."); try { auto outputs = getObject(valueAt(json, "outputs")); @@ -1463,32 +1460,39 @@ Derivation adl_serializer::from_json(const json & _json, const Exper } try { - auto inputSrcs = getArray(valueAt(json, "inputSrcs")); - for (auto & input : inputSrcs) - res.inputSrcs.insert(input); - } catch (Error & e) { - e.addTrace({}, "while reading key 'inputSrcs'"); - throw; - } + auto inputsObj = getObject(valueAt(json, "inputs")); - try { - auto doInput = [&](this const auto & doInput, const auto & _json) -> DerivedPathMap::ChildNode { - auto & json = getObject(_json); - DerivedPathMap::ChildNode node; - node.value = getStringSet(valueAt(json, "outputs")); - auto drvs = getObject(valueAt(json, "dynamicOutputs")); - for (auto & [outputId, childNode] : drvs) { - xpSettings.require( - Xp::DynamicDerivations, [&] { return fmt("dynamic output '%s' in JSON", outputId); }); - node.childMap[outputId] = doInput(childNode); - } - return node; - }; - auto drvs = getObject(valueAt(json, "inputDrvs")); - for (auto & [inputDrvPath, inputOutputs] : drvs) - res.inputDrvs.map[StorePath{inputDrvPath}] = doInput(inputOutputs); + try { + auto inputSrcs = getArray(valueAt(inputsObj, "srcs")); + for (auto & input : inputSrcs) + res.inputSrcs.insert(input); + } catch (Error & e) { + e.addTrace({}, "while reading key 'srcs'"); + throw; + } + + try { + auto doInput = [&](this const auto & doInput, const auto & _json) -> DerivedPathMap::ChildNode { + auto & json = getObject(_json); + DerivedPathMap::ChildNode node; + node.value = getStringSet(valueAt(json, "outputs")); + auto drvs = getObject(valueAt(json, "dynamicOutputs")); + for (auto & [outputId, childNode] : drvs) { + xpSettings.require( + Xp::DynamicDerivations, [&] { return fmt("dynamic output '%s' in JSON", outputId); }); + node.childMap[outputId] = doInput(childNode); + } + return node; + }; + auto drvs = getObject(valueAt(inputsObj, "drvs")); + for (auto & [inputDrvPath, inputOutputs] : drvs) + res.inputDrvs.map[StorePath{inputDrvPath}] = doInput(inputOutputs); + } catch (Error & e) { + e.addTrace({}, "while reading key 'drvs'"); + throw; + } } catch (Error & e) { - e.addTrace({}, "while reading key 'inputDrvs'"); + e.addTrace({}, "while reading key 'inputs'"); throw; } diff --git a/tests/functional/dyn-drv/non-trivial.nix b/tests/functional/dyn-drv/non-trivial.nix index 3c24ac2ee..87f2d9cfe 100644 --- a/tests/functional/dyn-drv/non-trivial.nix +++ b/tests/functional/dyn-drv/non-trivial.nix @@ -51,10 +51,12 @@ builtins.outputOf "$word": "hello, from $word!", "PATH": ${builtins.toJSON path} }, - "inputDrvs": { - $inputDrvs + "inputs": { + "drvs": { + $inputDrvs + }, + "srcs": [] }, - "inputSrcs": [], "name": "build-$word", "outputs": { "out": { @@ -63,7 +65,7 @@ builtins.outputOf } }, "system": "${system}", - "version": 3 + "version": 4 } EOF drvPath=$(echo "$json" | nix derivation add) From caa196e31d00df3fa31d8fc47ae2efb8eb5ac6d4 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 30 Oct 2025 17:44:07 -0400 Subject: [PATCH 170/213] Make the store path info `ca` field structured in JSON The old string format is a holdover from the pre JSON days. It is not friendly to users who need to get the information out of it. Also introduce the sort of versioning we have for derivation for this format too. --- doc/manual/source/protocols/json/meson.build | 2 +- ...re-object-info-v1 => store-object-info-v2} | 0 ...info-v1.yaml => store-object-info-v2.yaml} | 31 ++++++++++++++++--- .../protocols/json/store-object-info.md | 12 +++---- src/json-schema-checks/meson.build | 12 +++---- src/libstore-tests/data/nar-info/impure.json | 12 +++++-- src/libstore-tests/data/nar-info/pure.json | 12 +++++-- .../data/path-info/empty_impure.json | 3 +- .../data/path-info/empty_pure.json | 3 +- src/libstore-tests/data/path-info/impure.json | 12 +++++-- src/libstore-tests/data/path-info/pure.json | 12 +++++-- src/libstore/path-info.cc | 24 ++++++++++++-- tests/functional/fixed.sh | 11 ++++++- tests/functional/git-hashing/simple-common.sh | 14 +++++++-- tests/functional/impure-derivations.sh | 2 +- tests/functional/nix-profile.sh | 2 +- tests/functional/signing.sh | 2 +- 17 files changed, 130 insertions(+), 36 deletions(-) rename doc/manual/source/protocols/json/schema/{store-object-info-v1 => store-object-info-v2} (100%) rename doc/manual/source/protocols/json/schema/{store-object-info-v1.yaml => store-object-info-v2.yaml} (91%) diff --git a/doc/manual/source/protocols/json/meson.build b/doc/manual/source/protocols/json/meson.build index c0b8416d7..4ab94c63b 100644 --- a/doc/manual/source/protocols/json/meson.build +++ b/doc/manual/source/protocols/json/meson.build @@ -12,7 +12,7 @@ schemas = [ 'hash-v1', 'content-address-v1', 'store-path-v1', - 'store-object-info-v1', + 'store-object-info-v2', 'derivation-v4', 'deriving-path-v1', 'build-trace-entry-v1', diff --git a/doc/manual/source/protocols/json/schema/store-object-info-v1 b/doc/manual/source/protocols/json/schema/store-object-info-v2 similarity index 100% rename from doc/manual/source/protocols/json/schema/store-object-info-v1 rename to doc/manual/source/protocols/json/schema/store-object-info-v2 diff --git a/doc/manual/source/protocols/json/schema/store-object-info-v1.yaml b/doc/manual/source/protocols/json/schema/store-object-info-v2.yaml similarity index 91% rename from doc/manual/source/protocols/json/schema/store-object-info-v1.yaml rename to doc/manual/source/protocols/json/schema/store-object-info-v2.yaml index d79f25043..4f442e0c3 100644 --- a/doc/manual/source/protocols/json/schema/store-object-info-v1.yaml +++ b/doc/manual/source/protocols/json/schema/store-object-info-v2.yaml @@ -1,6 +1,6 @@ -"$schema": "http://json-schema.org/draft-07/schema" -"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/store-object-info-v1.json" -title: Store Object Info +"$schema": "http://json-schema.org/draft-04/schema" +"$id": "https://nix.dev/manual/nix/latest/protocols/json/schema/store-object-info-v2.json" +title: Store Object Info v2 description: | Information about a [store object](@docroot@/store/store-object.md). @@ -41,11 +41,27 @@ $defs: This is the minimal set of fields that describe what a store object contains. type: object required: + - version - narHash - narSize - references - ca properties: + version: + type: integer + const: 2 + title: Format version (must be 2) + description: | + Must be `2`. + This is a guard that allows us to continue evolving this format. + Here is the rough version history: + + - Version 0: `.narinfo` line-oriented format + + - Version 1: Original JSON format, with ugly `"r:sha256"` inherited from `.narinfo` format. + + - Version 2: Use structured JSON type for `ca` + path: type: string title: Store Path @@ -76,7 +92,10 @@ $defs: type: string ca: - type: ["string", "null"] + oneOf: + - type: "null" + const: null + - "$ref": "./content-address-v1.yaml" title: Content Address description: | If the store object is [content-addressed](@docroot@/store/store-object/content-address.md), @@ -91,6 +110,7 @@ $defs: In other words, the same store object in different stores could have different values for these impure fields. type: object required: + - version - narHash - narSize - references @@ -101,6 +121,7 @@ $defs: - ultimate - signatures properties: + version: { $ref: "#/$defs/base/properties/version" } path: { $ref: "#/$defs/base/properties/path" } narHash: { $ref: "#/$defs/base/properties/narHash" } narSize: { $ref: "#/$defs/base/properties/narSize" } @@ -164,6 +185,7 @@ $defs: This download information, being specific to how the store object happens to be stored and transferred, is also considered to be non-intrinsic / impure. type: object required: + - version - narHash - narSize - references @@ -179,6 +201,7 @@ $defs: - downloadHash - downloadSize properties: + version: { $ref: "#/$defs/base/properties/version" } path: { $ref: "#/$defs/base/properties/path" } narHash: { $ref: "#/$defs/base/properties/narHash" } narSize: { $ref: "#/$defs/base/properties/narSize" } diff --git a/doc/manual/source/protocols/json/store-object-info.md b/doc/manual/source/protocols/json/store-object-info.md index 4673dd773..6a101ab0f 100644 --- a/doc/manual/source/protocols/json/store-object-info.md +++ b/doc/manual/source/protocols/json/store-object-info.md @@ -1,29 +1,29 @@ -{{#include store-object-info-v1-fixed.md}} +{{#include store-object-info-v2-fixed.md}} ## Examples ### Minimal store object (content-addressed) ```json -{{#include schema/store-object-info-v1/pure.json}} +{{#include schema/store-object-info-v2/pure.json}} ``` ### Store object with impure fields ```json -{{#include schema/store-object-info-v1/impure.json}} +{{#include schema/store-object-info-v2/impure.json}} ``` ### Minimal store object (empty) ```json -{{#include schema/store-object-info-v1/empty_pure.json}} +{{#include schema/store-object-info-v2/empty_pure.json}} ``` ### Store object with all impure fields ```json -{{#include schema/store-object-info-v1/empty_impure.json}} +{{#include schema/store-object-info-v2/empty_impure.json}} ``` ### NAR info (minimal) @@ -41,5 +41,5 @@ diff --git a/src/json-schema-checks/meson.build b/src/json-schema-checks/meson.build index fedacedeb..f72affb0b 100644 --- a/src/json-schema-checks/meson.build +++ b/src/json-schema-checks/meson.build @@ -134,7 +134,7 @@ schemas += [ # Match overall { 'stem' : 'store-object-info', - 'schema' : schema_dir / 'store-object-info-v1.yaml', + 'schema' : schema_dir / 'store-object-info-v2.yaml', 'files' : [ 'pure.json', 'impure.json', @@ -144,7 +144,7 @@ schemas += [ }, { 'stem' : 'nar-info', - 'schema' : schema_dir / 'store-object-info-v1.yaml', + 'schema' : schema_dir / 'store-object-info-v2.yaml', 'files' : [ 'pure.json', 'impure.json', @@ -162,7 +162,7 @@ schemas += [ # Match exact variant { 'stem' : 'store-object-info', - 'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/base', + 'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/base', 'files' : [ 'pure.json', 'empty_pure.json', @@ -170,7 +170,7 @@ schemas += [ }, { 'stem' : 'store-object-info', - 'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/impure', + 'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/impure', 'files' : [ 'impure.json', 'empty_impure.json', @@ -178,14 +178,14 @@ schemas += [ }, { 'stem' : 'nar-info', - 'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/base', + 'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/base', 'files' : [ 'pure.json', ], }, { 'stem' : 'nar-info', - 'schema' : schema_dir / 'store-object-info-v1.yaml#/$defs/narInfo', + 'schema' : schema_dir / 'store-object-info-v2.yaml#/$defs/narInfo', 'files' : [ 'impure.json', ], diff --git a/src/libstore-tests/data/nar-info/impure.json b/src/libstore-tests/data/nar-info/impure.json index bb9791a6a..f35ff990b 100644 --- a/src/libstore-tests/data/nar-info/impure.json +++ b/src/libstore-tests/data/nar-info/impure.json @@ -1,5 +1,12 @@ { - "ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh", + "ca": { + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=" + }, + "method": "nar" + }, "compression": "xz", "deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", "downloadHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", @@ -16,5 +23,6 @@ "qwer" ], "ultimate": true, - "url": "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz" + "url": "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz", + "version": 2 } diff --git a/src/libstore-tests/data/nar-info/pure.json b/src/libstore-tests/data/nar-info/pure.json index 955baec31..2c5cb3bde 100644 --- a/src/libstore-tests/data/nar-info/pure.json +++ b/src/libstore-tests/data/nar-info/pure.json @@ -1,9 +1,17 @@ { - "ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh", + "ca": { + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=" + }, + "method": "nar" + }, "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", "narSize": 34878, "references": [ "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", "/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo" - ] + ], + "version": 2 } diff --git a/src/libstore-tests/data/path-info/empty_impure.json b/src/libstore-tests/data/path-info/empty_impure.json index be982dcef..381acaa03 100644 --- a/src/libstore-tests/data/path-info/empty_impure.json +++ b/src/libstore-tests/data/path-info/empty_impure.json @@ -6,5 +6,6 @@ "references": [], "registrationTime": null, "signatures": [], - "ultimate": false + "ultimate": false, + "version": 2 } diff --git a/src/libstore-tests/data/path-info/empty_pure.json b/src/libstore-tests/data/path-info/empty_pure.json index 10d9f508a..6d3fa646b 100644 --- a/src/libstore-tests/data/path-info/empty_pure.json +++ b/src/libstore-tests/data/path-info/empty_pure.json @@ -2,5 +2,6 @@ "ca": null, "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", "narSize": 0, - "references": [] + "references": [], + "version": 2 } diff --git a/src/libstore-tests/data/path-info/impure.json b/src/libstore-tests/data/path-info/impure.json index 0c452cc49..141b38a16 100644 --- a/src/libstore-tests/data/path-info/impure.json +++ b/src/libstore-tests/data/path-info/impure.json @@ -1,5 +1,12 @@ { - "ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh", + "ca": { + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=" + }, + "method": "nar" + }, "deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", "narSize": 34878, @@ -12,5 +19,6 @@ "asdf", "qwer" ], - "ultimate": true + "ultimate": true, + "version": 2 } diff --git a/src/libstore-tests/data/path-info/pure.json b/src/libstore-tests/data/path-info/pure.json index 955baec31..2c5cb3bde 100644 --- a/src/libstore-tests/data/path-info/pure.json +++ b/src/libstore-tests/data/path-info/pure.json @@ -1,9 +1,17 @@ { - "ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh", + "ca": { + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=" + }, + "method": "nar" + }, "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", "narSize": 34878, "references": [ "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", "/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo" - ] + ], + "version": 2 } diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index 09a78a4ad..c535d08f4 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -156,6 +156,8 @@ UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInf auto jsonObject = json::object(); + jsonObject["version"] = 2; + jsonObject["narHash"] = narHash.to_string(hashFormat, true); jsonObject["narSize"] = narSize; @@ -165,7 +167,7 @@ UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInf jsonRefs.emplace_back(store.printStorePath(ref)); } - jsonObject["ca"] = ca ? (std::optional{renderContentAddress(*ca)}) : std::nullopt; + jsonObject["ca"] = ca; if (includeImpureInfo) { jsonObject["deriver"] = deriver ? (std::optional{store.printStorePath(*deriver)}) : std::nullopt; @@ -189,6 +191,16 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store }; auto & json = getObject(_json); + + // Check version (optional for backward compatibility) + nlohmann::json::number_unsigned_t version = 1; + if (json.contains("version")) { + version = getUnsigned(valueAt(json, "version")); + if (version != 1 && version != 2) { + throw Error("Unsupported path info JSON format version %d, expected 1 through 2", version); + } + } + res.narHash = Hash::parseAny(getString(valueAt(json, "narHash")), std::nullopt); res.narSize = getUnsigned(valueAt(json, "narSize")); @@ -205,7 +217,15 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store // missing is for back-compat. if (auto * rawCa0 = optionalValueAt(json, "ca")) if (auto * rawCa = getNullable(*rawCa0)) - res.ca = ContentAddress::parse(getString(*rawCa)); + switch (version) { + case 1: + // old string format also used in SQLite DB and .narinfo + res.ca = ContentAddress::parse(getString(*rawCa)); + break; + case 2 ... std::numeric_limits::max(): + res.ca = *rawCa; + break; + } if (auto * rawDeriver0 = optionalValueAt(json, "deriver")) if (auto * rawDeriver = getNullable(*rawDeriver0)) diff --git a/tests/functional/fixed.sh b/tests/functional/fixed.sh index edf6f88d4..7861392ec 100755 --- a/tests/functional/fixed.sh +++ b/tests/functional/fixed.sh @@ -14,7 +14,16 @@ nix-build fixed.nix -A bad --no-out-link && fail "should fail" # Building with the bad hash should produce the "good" output path as # a side-effect. [[ -e $path ]] -nix path-info --json "$path" | grep fixed:md5:2qk15sxzzjlnpjk9brn7j8ppcd +nix path-info --json "$path" | jq -e \ + --arg hash "$(nix hash convert --to base64 "md5:8ddd8be4b179a529afa5f2ffae4b9858")" \ + '.[].ca == { + method: "flat", + hash: { + algorithm: "md5", + format: "base64", + hash: $hash + }, + }' echo 'testing good...' nix-build fixed.nix -A good --no-out-link diff --git a/tests/functional/git-hashing/simple-common.sh b/tests/functional/git-hashing/simple-common.sh index 08b5c0e71..eaa0a9529 100644 --- a/tests/functional/git-hashing/simple-common.sh +++ b/tests/functional/git-hashing/simple-common.sh @@ -47,9 +47,17 @@ try2 () { hashFromGit=$(git -C "$repo" rev-parse "HEAD:$hashPath") [[ "$hashFromGit" == "$expected" ]] - local caFromNix - caFromNix=$(nix path-info --json "$path" | jq -r ".[] | .ca") - [[ "fixed:git:$hashAlgo:$(nix hash convert --to nix32 "$hashAlgo:$hashFromGit")" = "$caFromNix" ]] + nix path-info --json "$path" | jq -e \ + --arg algo "$hashAlgo" \ + --arg hash "$(nix hash convert --to base64 "$hashAlgo:$hashFromGit")" \ + '.[].ca == { + method: "git", + hash: { + algorithm: $algo, + format: "base64", + hash: $hash + }, + }' } test0 () { diff --git a/tests/functional/impure-derivations.sh b/tests/functional/impure-derivations.sh index e0b7c3eea..211abccb0 100755 --- a/tests/functional/impure-derivations.sh +++ b/tests/functional/impure-derivations.sh @@ -30,7 +30,7 @@ path1_stuff=$(echo "$json" | jq -r .[].outputs.stuff) [[ $(< "$path1"/n) = 0 ]] [[ $(< "$path1_stuff"/bla) = 0 ]] -[[ $(nix path-info --json "$path1" | jq .[].ca) =~ fixed:r:sha256: ]] +nix path-info --json "$path1" | jq -e '.[].ca | .method == "nar" and .hash.algorithm == "sha256"' path2=$(nix build -L --no-link --json --file ./impure-derivations.nix impure | jq -r .[].outputs.out) [[ $(< "$path2"/n) = 1 ]] diff --git a/tests/functional/nix-profile.sh b/tests/functional/nix-profile.sh index 922162d4b..494b24ddb 100755 --- a/tests/functional/nix-profile.sh +++ b/tests/functional/nix-profile.sh @@ -166,7 +166,7 @@ printf 4.0 > "$flake1Dir"/version printf Utrecht > "$flake1Dir"/who nix profile add "$flake1Dir" [[ $("$TEST_HOME"/.nix-profile/bin/hello) = "Hello Utrecht" ]] -[[ $(nix path-info --json "$(realpath "$TEST_HOME"/.nix-profile/bin/hello)" | jq -r .[].ca) =~ fixed:r:sha256: ]] +nix path-info --json "$(realpath "$TEST_HOME"/.nix-profile/bin/hello)" | jq -e '.[].ca | .method == "nar" and .hash.algorithm == "sha256"' # Override the outputs. nix profile remove simple flake1 diff --git a/tests/functional/signing.sh b/tests/functional/signing.sh index 2893efec7..1bcaf2f53 100755 --- a/tests/functional/signing.sh +++ b/tests/functional/signing.sh @@ -58,7 +58,7 @@ nix store verify -r "$outPath2" --sigs-needed 1 --trusted-public-keys "$pk1" # Build something content-addressed. outPathCA=$(IMPURE_VAR1=foo IMPURE_VAR2=bar nix-build ./fixed.nix -A good.0 --no-out-link) -nix path-info --json "$outPathCA" | jq -e '.[] | .ca | startswith("fixed:md5:")' +nix path-info --json "$outPathCA" | jq -e '.[].ca | .method == "flat" and .hash.algorithm == "md5"' # Content-addressed paths don't need signatures, so they verify # regardless of --sigs-needed. From 8cc3ede0fac902c66a9563a9924c54eda66d48c9 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 31 Oct 2025 00:43:45 -0400 Subject: [PATCH 171/213] Add change-log entry for derivation format changes --- doc/manual/rl-next/json-format-changes.md | 47 +++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 doc/manual/rl-next/json-format-changes.md diff --git a/doc/manual/rl-next/json-format-changes.md b/doc/manual/rl-next/json-format-changes.md new file mode 100644 index 000000000..c5518ee1b --- /dev/null +++ b/doc/manual/rl-next/json-format-changes.md @@ -0,0 +1,47 @@ +--- +synopsis: "JSON format changes for store path info and derivations" +prs: [] +issues: [] +--- + +JSON formats for store path info and derivations have been updated with new versions and structured fields. + +## Store Path Info JSON (Version 2) + +The store path info JSON format has been updated from version 1 to version 2: + +- **Added `version` field**: + + All store path info JSON now includes `"version": 2`. + +- **Structured `ca` field**: + + Content address is now a structured JSON object instead of a string: + + - Old: `"ca": "fixed:r:sha256:1abc..."` + - New: `"ca": {"method": "nar", "hash": {"algorithm": "sha256", "format": "base64", "hash": "EMIJ+giQ..."}}` + - Still `null` values for input-addressed store objects + +Version 1 format is still accepted when reading for backward compatibility. + +**Affected command**: `nix path-info --json` + +## Derivation JSON (Version 4) + +The derivation JSON format has been updated from version 3 to version 4: + +- **Restructured inputs**: + + Inputs are now nested under an `inputs` object: + + - Old: `"inputSrcs": [...], "inputDrvs": {...}` + - New: `"inputs": {"srcs": [...], "drvs": {...}}` + +- **Consistent content addresses**: + + Floating content-addressed outputs now use structured JSON format. + This is the same format as `ca` in in store path info (after the new version). + +Version 3 and earlier formats are *not* accepted when reading. + +**Affected command**: `nix derivation`, namely it's `show` and `add` sub-commands. From ca787bc3e052d8cb35debbeb321c0a59f8ecbbc0 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 6 Nov 2025 20:42:50 +0100 Subject: [PATCH 172/213] tests: add error tests for builtins.genericClosure Covers error conditions for: - Invalid argument types (not an attrset) - Missing required attributes (startSet, operator) - Type mismatches (startSet/operator not correct type) - Element validation (elements not attrsets, missing key attribute) - Key comparison errors (incompatible types, uncomparable types) - Operator return value validation (not a list) --- ...val-fail-genericClosure-element-missing-key.err.exp | 10 ++++++++++ .../eval-fail-genericClosure-element-missing-key.nix | 4 ++++ ...val-fail-genericClosure-element-not-attrset.err.exp | 10 ++++++++++ .../eval-fail-genericClosure-element-not-attrset.nix | 4 ++++ ...fail-genericClosure-keys-incompatible-types.err.exp | 10 ++++++++++ ...val-fail-genericClosure-keys-incompatible-types.nix | 7 +++++++ .../eval-fail-genericClosure-keys-uncomparable.err.exp | 10 ++++++++++ .../eval-fail-genericClosure-keys-uncomparable.nix | 7 +++++++ .../eval-fail-genericClosure-missing-operator.err.exp | 10 ++++++++++ .../lang/eval-fail-genericClosure-missing-operator.nix | 3 +++ .../eval-fail-genericClosure-missing-startSet.err.exp | 10 ++++++++++ .../lang/eval-fail-genericClosure-missing-startSet.nix | 3 +++ .../lang/eval-fail-genericClosure-not-attrset.err.exp | 10 ++++++++++ .../lang/eval-fail-genericClosure-not-attrset.nix | 1 + ...l-fail-genericClosure-operator-not-function.err.exp | 10 ++++++++++ .../eval-fail-genericClosure-operator-not-function.nix | 4 ++++ .../eval-fail-genericClosure-operator-not-list.err.exp | 10 ++++++++++ .../eval-fail-genericClosure-operator-not-list.nix | 4 ++++ .../eval-fail-genericClosure-startSet-not-list.err.exp | 10 ++++++++++ .../eval-fail-genericClosure-startSet-not-list.nix | 4 ++++ 20 files changed, 141 insertions(+) create mode 100644 tests/functional/lang/eval-fail-genericClosure-element-missing-key.err.exp create mode 100644 tests/functional/lang/eval-fail-genericClosure-element-missing-key.nix create mode 100644 tests/functional/lang/eval-fail-genericClosure-element-not-attrset.err.exp create mode 100644 tests/functional/lang/eval-fail-genericClosure-element-not-attrset.nix create mode 100644 tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.err.exp create mode 100644 tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.nix create mode 100644 tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.err.exp create mode 100644 tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.nix create mode 100644 tests/functional/lang/eval-fail-genericClosure-missing-operator.err.exp create mode 100644 tests/functional/lang/eval-fail-genericClosure-missing-operator.nix create mode 100644 tests/functional/lang/eval-fail-genericClosure-missing-startSet.err.exp create mode 100644 tests/functional/lang/eval-fail-genericClosure-missing-startSet.nix create mode 100644 tests/functional/lang/eval-fail-genericClosure-not-attrset.err.exp create mode 100644 tests/functional/lang/eval-fail-genericClosure-not-attrset.nix create mode 100644 tests/functional/lang/eval-fail-genericClosure-operator-not-function.err.exp create mode 100644 tests/functional/lang/eval-fail-genericClosure-operator-not-function.nix create mode 100644 tests/functional/lang/eval-fail-genericClosure-operator-not-list.err.exp create mode 100644 tests/functional/lang/eval-fail-genericClosure-operator-not-list.nix create mode 100644 tests/functional/lang/eval-fail-genericClosure-startSet-not-list.err.exp create mode 100644 tests/functional/lang/eval-fail-genericClosure-startSet-not-list.nix diff --git a/tests/functional/lang/eval-fail-genericClosure-element-missing-key.err.exp b/tests/functional/lang/eval-fail-genericClosure-element-missing-key.err.exp new file mode 100644 index 000000000..f1a9e386c --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-element-missing-key.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-element-missing-key.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = [ { nokey = 1; } ]; + + … in one of the attrsets generated by (or initially passed to) builtins.genericClosure + + error: attribute 'key' missing diff --git a/tests/functional/lang/eval-fail-genericClosure-element-missing-key.nix b/tests/functional/lang/eval-fail-genericClosure-element-missing-key.nix new file mode 100644 index 000000000..e39e4043b --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-element-missing-key.nix @@ -0,0 +1,4 @@ +builtins.genericClosure { + startSet = [ { nokey = 1; } ]; + operator = x: [ ]; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.err.exp b/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.err.exp new file mode 100644 index 000000000..24a3e434b --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-element-not-attrset.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = [ "not an attrset" ]; + + … while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure + + error: expected a set but found a string: "not an attrset" diff --git a/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.nix b/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.nix new file mode 100644 index 000000000..6850be1c2 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.nix @@ -0,0 +1,4 @@ +builtins.genericClosure { + startSet = [ "not an attrset" ]; + operator = x: [ ]; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.err.exp b/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.err.exp new file mode 100644 index 000000000..677e6a156 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-keys-incompatible-types.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = [ + + … while comparing the `key` attributes of two genericClosure elements + + error: cannot compare a string with an integer diff --git a/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.nix b/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.nix new file mode 100644 index 000000000..3335416fd --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.nix @@ -0,0 +1,7 @@ +builtins.genericClosure { + startSet = [ + { key = 1; } + { key = "string"; } + ]; + operator = x: [ ]; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.err.exp b/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.err.exp new file mode 100644 index 000000000..d216f77ea --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-keys-uncomparable.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = [ + + … while comparing the `key` attributes of two genericClosure elements + + error: cannot compare a set with a set; values of that type are incomparable diff --git a/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.nix b/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.nix new file mode 100644 index 000000000..6a1915b6a --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.nix @@ -0,0 +1,7 @@ +builtins.genericClosure { + startSet = [ + { key = { }; } + { key = { }; } + ]; + operator = x: [ ]; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-missing-operator.err.exp b/tests/functional/lang/eval-fail-genericClosure-missing-operator.err.exp new file mode 100644 index 000000000..0dce0ffd9 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-missing-operator.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-missing-operator.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = [ { key = 1; } ]; + + … in the attrset passed as argument to builtins.genericClosure + + error: attribute 'operator' missing diff --git a/tests/functional/lang/eval-fail-genericClosure-missing-operator.nix b/tests/functional/lang/eval-fail-genericClosure-missing-operator.nix new file mode 100644 index 000000000..0b7c63f6d --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-missing-operator.nix @@ -0,0 +1,3 @@ +builtins.genericClosure { + startSet = [ { key = 1; } ]; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-missing-startSet.err.exp b/tests/functional/lang/eval-fail-genericClosure-missing-startSet.err.exp new file mode 100644 index 000000000..b68c6542a --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-missing-startSet.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-missing-startSet.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| operator = x: [ ]; + + … in the attrset passed as argument to builtins.genericClosure + + error: attribute 'startSet' missing diff --git a/tests/functional/lang/eval-fail-genericClosure-missing-startSet.nix b/tests/functional/lang/eval-fail-genericClosure-missing-startSet.nix new file mode 100644 index 000000000..b62802986 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-missing-startSet.nix @@ -0,0 +1,3 @@ +builtins.genericClosure { + operator = x: [ ]; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-not-attrset.err.exp b/tests/functional/lang/eval-fail-genericClosure-not-attrset.err.exp new file mode 100644 index 000000000..fd3360310 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-not-attrset.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-not-attrset.nix:1:1: + 1| builtins.genericClosure "not an attrset" + | ^ + 2| + + … while evaluating the first argument passed to builtins.genericClosure + + error: expected a set but found a string: "not an attrset" diff --git a/tests/functional/lang/eval-fail-genericClosure-not-attrset.nix b/tests/functional/lang/eval-fail-genericClosure-not-attrset.nix new file mode 100644 index 000000000..3998c3432 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-not-attrset.nix @@ -0,0 +1 @@ +builtins.genericClosure "not an attrset" diff --git a/tests/functional/lang/eval-fail-genericClosure-operator-not-function.err.exp b/tests/functional/lang/eval-fail-genericClosure-operator-not-function.err.exp new file mode 100644 index 000000000..d3c5a627a --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-operator-not-function.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-operator-not-function.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = [ { key = 1; } ]; + + … while evaluating the 'operator' attribute passed as argument to builtins.genericClosure + + error: expected a function but found a string: "not a function" diff --git a/tests/functional/lang/eval-fail-genericClosure-operator-not-function.nix b/tests/functional/lang/eval-fail-genericClosure-operator-not-function.nix new file mode 100644 index 000000000..425cd427d --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-operator-not-function.nix @@ -0,0 +1,4 @@ +builtins.genericClosure { + startSet = [ { key = 1; } ]; + operator = "not a function"; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-operator-not-list.err.exp b/tests/functional/lang/eval-fail-genericClosure-operator-not-list.err.exp new file mode 100644 index 000000000..c48c27bfe --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-operator-not-list.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-operator-not-list.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = [ { key = 1; } ]; + + … while evaluating the return value of the `operator` passed to builtins.genericClosure + + error: expected a list but found a string: "not a list" diff --git a/tests/functional/lang/eval-fail-genericClosure-operator-not-list.nix b/tests/functional/lang/eval-fail-genericClosure-operator-not-list.nix new file mode 100644 index 000000000..26f97c51c --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-operator-not-list.nix @@ -0,0 +1,4 @@ +builtins.genericClosure { + startSet = [ { key = 1; } ]; + operator = x: "not a list"; +} diff --git a/tests/functional/lang/eval-fail-genericClosure-startSet-not-list.err.exp b/tests/functional/lang/eval-fail-genericClosure-startSet-not-list.err.exp new file mode 100644 index 000000000..e711a23f5 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-startSet-not-list.err.exp @@ -0,0 +1,10 @@ +error: + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-startSet-not-list.nix:1:1: + 1| builtins.genericClosure { + | ^ + 2| startSet = "not a list"; + + … while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure + + error: expected a list but found a string: "not a list" diff --git a/tests/functional/lang/eval-fail-genericClosure-startSet-not-list.nix b/tests/functional/lang/eval-fail-genericClosure-startSet-not-list.nix new file mode 100644 index 000000000..834c82f65 --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-startSet-not-list.nix @@ -0,0 +1,4 @@ +builtins.genericClosure { + startSet = "not a list"; + operator = x: [ ]; +} From d262efc240c82745eee0b5d6ce5154472ce0cea0 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 6 Nov 2025 21:30:40 +0100 Subject: [PATCH 173/213] libexpr: improve error messages for builtins.genericClosure Show which element(s) are involved at each error point: - When an element is missing the "key" attribute, show the element - When an element is not an attribute set, show the element - When comparing keys fails, show both elements being compared - When calling operator fails, show which element was being processed This provides concrete context using ValuePrinter with errorPrintOptions. Note: errorPrintOptions uses maxDepth=10 by default, which may print quite deeply nested structures in error messages. This could potentially be overwhelming, but follows the existing default for error contexts. --- src/libexpr-tests/error_traces.cc | 59 ++++++++--- src/libexpr/primops.cc | 100 +++++++++++++----- ...nericClosure-deeply-nested-element.err.exp | 18 ++++ ...l-genericClosure-deeply-nested-element.nix | 35 ++++++ ...genericClosure-element-missing-key.err.exp | 2 +- ...genericClosure-element-not-attrset.err.exp | 2 +- ...ricClosure-keys-incompatible-types.err.exp | 6 +- ...l-genericClosure-keys-uncomparable.err.exp | 6 +- ...l-genericClosure-operator-not-list.err.exp | 2 + tests/functional/lang/infinite-nesting.nix | 4 + 10 files changed, 188 insertions(+), 46 deletions(-) create mode 100644 tests/functional/lang/eval-fail-genericClosure-deeply-nested-element.err.exp create mode 100644 tests/functional/lang/eval-fail-genericClosure-deeply-nested-element.nix create mode 100644 tests/functional/lang/infinite-nesting.nix diff --git a/src/libexpr-tests/error_traces.cc b/src/libexpr-tests/error_traces.cc index 7e7b5eb84..974e4d281 100644 --- a/src/libexpr-tests/error_traces.cc +++ b/src/libexpr-tests/error_traces.cc @@ -165,35 +165,48 @@ TEST_F(ErrorTraceTest, genericClosure) HintFmt("expected a function but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), HintFmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); - ASSERT_TRACE2( + ASSERT_TRACE3( "genericClosure { startSet = [{ key = 1;}]; operator = item: true; }", TypeError, HintFmt("expected a list but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating the return value of the `operator` passed to builtins.genericClosure")); + HintFmt("while evaluating the return value of the `operator` passed to builtins.genericClosure"), + HintFmt( + "while calling %s on genericClosure element %s", + "operator", + Uncolored("{ key = " ANSI_CYAN "1" ANSI_NORMAL "; }"))); - ASSERT_TRACE2( + ASSERT_TRACE3( "genericClosure { startSet = [{ key = 1;}]; operator = item: [ true ]; }", TypeError, HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); + HintFmt(""), + HintFmt("in genericClosure element %s", Uncolored(ANSI_CYAN "true" ANSI_NORMAL))); - ASSERT_TRACE2( + ASSERT_TRACE3( "genericClosure { startSet = [{ key = 1;}]; operator = item: [ {} ]; }", TypeError, HintFmt("attribute '%s' missing", "key"), - HintFmt("in one of the attrsets generated by (or initially passed to) builtins.genericClosure")); + HintFmt(""), + HintFmt("in genericClosure element %s", Uncolored("{ }"))); - ASSERT_TRACE2( + ASSERT_TRACE3( "genericClosure { startSet = [{ key = 1;}]; operator = item: [{ key = ''a''; }]; }", EvalError, - HintFmt("cannot compare %s with %s", "a string", "an integer"), - HintFmt("while comparing the `key` attributes of two genericClosure elements")); + HintFmt( + "cannot compare %s with %s; values are %s and %s", + "a string", + "an integer", + Uncolored(ANSI_MAGENTA "\"a\"" ANSI_NORMAL), + Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), + HintFmt("with element %s", Uncolored("{ key = " ANSI_CYAN "1" ANSI_NORMAL "; }")), + HintFmt("while comparing element %s", Uncolored("{ key = " ANSI_MAGENTA "\"a\"" ANSI_NORMAL "; }"))); - ASSERT_TRACE2( + ASSERT_TRACE3( "genericClosure { startSet = [ true ]; operator = item: [{ key = ''a''; }]; }", TypeError, HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure")); + HintFmt(""), + HintFmt("in genericClosure element %s", Uncolored(ANSI_CYAN "true" ANSI_NORMAL))); } TEST_F(ErrorTraceTest, replaceStrings) @@ -1050,17 +1063,35 @@ TEST_F(ErrorTraceTest, bitXor) TEST_F(ErrorTraceTest, lessThan) { - ASSERT_TRACE1("lessThan 1 \"foo\"", EvalError, HintFmt("cannot compare %s with %s", "an integer", "a string")); + ASSERT_TRACE1( + "lessThan 1 \"foo\"", + EvalError, + HintFmt( + "cannot compare %s with %s; values are %s and %s", + "an integer", + "a string", + Uncolored(ANSI_CYAN "1" ANSI_NORMAL), + Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL))); ASSERT_TRACE1( "lessThan {} {}", EvalError, - HintFmt("cannot compare %s with %s; values of that type are incomparable", "a set", "a set")); + HintFmt( + "cannot compare %s with %s; values of that type are incomparable (values are %s and %s)", + "a set", + "a set", + Uncolored("{ }"), + Uncolored("{ }"))); ASSERT_TRACE2( "lessThan [ 1 2 ] [ \"foo\" ]", EvalError, - HintFmt("cannot compare %s with %s", "an integer", "a string"), + HintFmt( + "cannot compare %s with %s; values are %s and %s", + "an integer", + "a string", + Uncolored(ANSI_CYAN "1" ANSI_NORMAL), + Uncolored(ANSI_MAGENTA "\"foo\"" ANSI_NORMAL)), HintFmt("while comparing two list elements")); } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index d1aae64fa..0bd03b232 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -681,7 +681,14 @@ struct CompareValues if (v1->type() == nInt && v2->type() == nFloat) return v1->integer().value < v2->fpoint(); if (v1->type() != v2->type()) - state.error("cannot compare %s with %s", showType(*v1), showType(*v2)).debugThrow(); + state + .error( + "cannot compare %s with %s; values are %s and %s", + showType(*v1), + showType(*v2), + ValuePrinter(state, *v1, errorPrintOptions), + ValuePrinter(state, *v2, errorPrintOptions)) + .debugThrow(); // Allow selecting a subset of enum values #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wswitch-enum" @@ -711,7 +718,11 @@ struct CompareValues default: state .error( - "cannot compare %s with %s; values of that type are incomparable", showType(*v1), showType(*v2)) + "cannot compare %s with %s; values of that type are incomparable (values are %s and %s)", + showType(*v1), + showType(*v2), + ValuePrinter(state, *v1, errorPrintOptions), + ValuePrinter(state, *v2, errorPrintOptions)) .debugThrow(); #pragma GCC diagnostic pop } @@ -757,42 +768,79 @@ static void prim_genericClosure(EvalState & state, const PosIdx pos, Value ** ar `workSet', adding the result to `workSet', continuing until no new elements are found. */ ValueList res; - // `doneKeys' doesn't need to be a GC root, because its values are - // reachable from res. - auto cmp = CompareValues(state, noPos, "while comparing the `key` attributes of two genericClosure elements"); - std::set doneKeys(cmp); + // Track which element each key came from + auto cmp = CompareValues(state, noPos, ""); + std::map keyToElem(cmp); while (!workSet.empty()) { Value * e = *(workSet.begin()); workSet.pop_front(); - state.forceAttrs( - *e, - noPos, - "while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure"); + try { + state.forceAttrs(*e, noPos, ""); + } catch (Error & err) { + err.addTrace(nullptr, "in genericClosure element %s", ValuePrinter(state, *e, errorPrintOptions)); + throw; + } - auto key = state.getAttr( - state.s.key, - e->attrs(), - "in one of the attrsets generated by (or initially passed to) builtins.genericClosure"); + const Attr * key; + try { + key = state.getAttr(state.s.key, e->attrs(), ""); + } catch (Error & err) { + err.addTrace(nullptr, "in genericClosure element %s", ValuePrinter(state, *e, errorPrintOptions)); + throw; + } state.forceValue(*key->value, noPos); - if (!doneKeys.insert(key->value).second) - continue; + try { + auto [it, inserted] = keyToElem.insert({key->value, e}); + if (!inserted) + continue; + } catch (Error & err) { + // Try to find which element we're comparing against + Value * otherElem = nullptr; + for (auto & [otherKey, elem] : keyToElem) { + try { + cmp(key->value, otherKey); + } catch (Error &) { + // Found the element we're comparing against + otherElem = elem; + break; + } + } + if (otherElem) { + // Traces are printed in reverse order; pre-swap them. + err.addTrace(nullptr, "with element %s", ValuePrinter(state, *otherElem, errorPrintOptions)); + err.addTrace(nullptr, "while comparing element %s", ValuePrinter(state, *e, errorPrintOptions)); + } else { + // Couldn't find the specific element, just show current + err.addTrace(nullptr, "while checking key of element %s", ValuePrinter(state, *e, errorPrintOptions)); + } + throw; + } res.push_back(e); /* Call the `operator' function with `e' as argument. */ Value newElements; - state.callFunction(*op->value, {&e, 1}, newElements, noPos); - state.forceList( - newElements, - noPos, - "while evaluating the return value of the `operator` passed to builtins.genericClosure"); + try { + state.callFunction(*op->value, {&e, 1}, newElements, noPos); + state.forceList( + newElements, + noPos, + "while evaluating the return value of the `operator` passed to builtins.genericClosure"); - /* Add the values returned by the operator to the work set. */ - for (auto elem : newElements.listView()) { - state.forceValue(*elem, noPos); // "while evaluating one one of the elements returned by the `operator` - // passed to builtins.genericClosure"); - workSet.push_back(elem); + /* Add the values returned by the operator to the work set. */ + for (auto elem : newElements.listView()) { + state.forceValue(*elem, noPos); // "while evaluating one one of the elements returned by the `operator` + // passed to builtins.genericClosure"); + workSet.push_back(elem); + } + } catch (Error & err) { + err.addTrace( + nullptr, + "while calling %s on genericClosure element %s", + state.symbols[state.s.operator_], + ValuePrinter(state, *e, errorPrintOptions)); + throw; } } diff --git a/tests/functional/lang/eval-fail-genericClosure-deeply-nested-element.err.exp b/tests/functional/lang/eval-fail-genericClosure-deeply-nested-element.err.exp new file mode 100644 index 000000000..a5567cbfc --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-deeply-nested-element.err.exp @@ -0,0 +1,18 @@ +error: + … while calling the 'seq' builtin + at /pwd/lang/eval-fail-genericClosure-deeply-nested-element.nix:25:1: + 24| in + 25| builtins.seq finiteVal ( + | ^ + 26| builtins.genericClosure { + + … while calling the 'genericClosure' builtin + at /pwd/lang/eval-fail-genericClosure-deeply-nested-element.nix:26:3: + 25| builtins.seq finiteVal ( + 26| builtins.genericClosure { + | ^ + 27| startSet = [ + + … in genericClosure element { finite = { a0 = { a1 = { a2 = { a3 = { a4 = { a5 = { a6 = { a7 = { a8 = { ... }; }; }; }; }; }; }; }; }; }; «1 attribute elided» } + + error: attribute 'key' missing diff --git a/tests/functional/lang/eval-fail-genericClosure-deeply-nested-element.nix b/tests/functional/lang/eval-fail-genericClosure-deeply-nested-element.nix new file mode 100644 index 000000000..abc0591bb --- /dev/null +++ b/tests/functional/lang/eval-fail-genericClosure-deeply-nested-element.nix @@ -0,0 +1,35 @@ +let + finite = { + a0 = { + a1 = { + a2 = { + a3 = { + a4 = { + a5 = { + a6 = { + a7 = { + a8 = { + a9 = "deep"; + }; + }; + }; + }; + }; + }; + }; + }; + }; + }; + finiteVal = builtins.deepSeq finite finite; +in +builtins.seq finiteVal ( + builtins.genericClosure { + startSet = [ + { + infinite = import ./infinite-nesting.nix; + finite = finiteVal; + } + ]; + operator = x: [ (import ./infinite-nesting.nix) ]; + } +) diff --git a/tests/functional/lang/eval-fail-genericClosure-element-missing-key.err.exp b/tests/functional/lang/eval-fail-genericClosure-element-missing-key.err.exp index f1a9e386c..3ba2a7ea8 100644 --- a/tests/functional/lang/eval-fail-genericClosure-element-missing-key.err.exp +++ b/tests/functional/lang/eval-fail-genericClosure-element-missing-key.err.exp @@ -5,6 +5,6 @@ error: | ^ 2| startSet = [ { nokey = 1; } ]; - … in one of the attrsets generated by (or initially passed to) builtins.genericClosure + … in genericClosure element { nokey = 1; } error: attribute 'key' missing diff --git a/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.err.exp b/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.err.exp index 24a3e434b..b469f6043 100644 --- a/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.err.exp +++ b/tests/functional/lang/eval-fail-genericClosure-element-not-attrset.err.exp @@ -5,6 +5,6 @@ error: | ^ 2| startSet = [ "not an attrset" ]; - … while evaluating one of the elements generated by (or initially passed to) builtins.genericClosure + … in genericClosure element "not an attrset" error: expected a set but found a string: "not an attrset" diff --git a/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.err.exp b/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.err.exp index 677e6a156..04b458a48 100644 --- a/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.err.exp +++ b/tests/functional/lang/eval-fail-genericClosure-keys-incompatible-types.err.exp @@ -5,6 +5,8 @@ error: | ^ 2| startSet = [ - … while comparing the `key` attributes of two genericClosure elements + … while comparing element { key = "string"; } - error: cannot compare a string with an integer + … with element { key = 1; } + + error: cannot compare a string with an integer; values are "string" and 1 diff --git a/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.err.exp b/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.err.exp index d216f77ea..97e2bed02 100644 --- a/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.err.exp +++ b/tests/functional/lang/eval-fail-genericClosure-keys-uncomparable.err.exp @@ -5,6 +5,8 @@ error: | ^ 2| startSet = [ - … while comparing the `key` attributes of two genericClosure elements + … while comparing element { key = { }; } - error: cannot compare a set with a set; values of that type are incomparable + … with element { key = { }; } + + error: cannot compare a set with a set; values of that type are incomparable (values are { } and { }) diff --git a/tests/functional/lang/eval-fail-genericClosure-operator-not-list.err.exp b/tests/functional/lang/eval-fail-genericClosure-operator-not-list.err.exp index c48c27bfe..49d478033 100644 --- a/tests/functional/lang/eval-fail-genericClosure-operator-not-list.err.exp +++ b/tests/functional/lang/eval-fail-genericClosure-operator-not-list.err.exp @@ -5,6 +5,8 @@ error: | ^ 2| startSet = [ { key = 1; } ]; + … while calling operator on genericClosure element { key = 1; } + … while evaluating the return value of the `operator` passed to builtins.genericClosure error: expected a list but found a string: "not a list" diff --git a/tests/functional/lang/infinite-nesting.nix b/tests/functional/lang/infinite-nesting.nix new file mode 100644 index 000000000..1f937e63d --- /dev/null +++ b/tests/functional/lang/infinite-nesting.nix @@ -0,0 +1,4 @@ +let + mkInfinite = i: { "a${toString i}" = mkInfinite (i + 1); }; +in +mkInfinite 0 From 3ee8e45f8e28c90547e438b47f0d0bcda7fe6237 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Fri, 7 Nov 2025 00:27:39 +0100 Subject: [PATCH 174/213] tests: Replace fragile genericClosure unit tests We now have functional tests for these. The unit tests added negligible value while imposing a much higher maintenance cost. The maintenance cost is high: - No automatic accept option - They broke 5+ times during this session due to implementation changes (trace count, ordering) - They require understanding ANSI escape codes, Uncolored() wrappers, trace reversal - They test empty traces HintFmt("") from withTrace(pos, "") - pure implementation detail - They're fragile: adding any trace anywhere breaks the exact count assertions The additional value over functional tests is minimal: - Functional tests already verify the error message - Functional tests already show trace order and content (as users see it, helps review) - Unit tests verify "exactly 3 traces, not 2 or 4" - but users don't count traces - Unit tests verify empty traces exist - but users never see them The white-box testing catches the wrong things: - It catches "you added helpful context" as a failure - It doesn't catch "the context is confusing" (which functional tests would show) - It enforces implementation details that should be allowed to evolve --- src/libexpr-tests/error_traces.cc | 70 ------------------------------- 1 file changed, 70 deletions(-) diff --git a/src/libexpr-tests/error_traces.cc b/src/libexpr-tests/error_traces.cc index 974e4d281..e722cc484 100644 --- a/src/libexpr-tests/error_traces.cc +++ b/src/libexpr-tests/error_traces.cc @@ -139,76 +139,6 @@ TEST_F(ErrorTraceTest, NestedThrows) #define ASSERT_DERIVATION_TRACE3(args, type, message, context1, context2) \ ASSERT_TRACE4(args, type, message, context1, context2, DERIVATION_TRACE_HINTFMT("foo")) -TEST_F(ErrorTraceTest, genericClosure) -{ - ASSERT_TRACE2( - "genericClosure 1", - TypeError, - HintFmt("expected a set but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the first argument passed to builtins.genericClosure")); - - ASSERT_TRACE2( - "genericClosure {}", - TypeError, - HintFmt("attribute '%s' missing", "startSet"), - HintFmt("in the attrset passed as argument to builtins.genericClosure")); - - ASSERT_TRACE2( - "genericClosure { startSet = 1; }", - TypeError, - HintFmt("expected a list but found %s: %s", "an integer", Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("while evaluating the 'startSet' attribute passed as argument to builtins.genericClosure")); - - ASSERT_TRACE2( - "genericClosure { startSet = [{ key = 1;}]; operator = true; }", - TypeError, - HintFmt("expected a function but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating the 'operator' attribute passed as argument to builtins.genericClosure")); - - ASSERT_TRACE3( - "genericClosure { startSet = [{ key = 1;}]; operator = item: true; }", - TypeError, - HintFmt("expected a list but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt("while evaluating the return value of the `operator` passed to builtins.genericClosure"), - HintFmt( - "while calling %s on genericClosure element %s", - "operator", - Uncolored("{ key = " ANSI_CYAN "1" ANSI_NORMAL "; }"))); - - ASSERT_TRACE3( - "genericClosure { startSet = [{ key = 1;}]; operator = item: [ true ]; }", - TypeError, - HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt(""), - HintFmt("in genericClosure element %s", Uncolored(ANSI_CYAN "true" ANSI_NORMAL))); - - ASSERT_TRACE3( - "genericClosure { startSet = [{ key = 1;}]; operator = item: [ {} ]; }", - TypeError, - HintFmt("attribute '%s' missing", "key"), - HintFmt(""), - HintFmt("in genericClosure element %s", Uncolored("{ }"))); - - ASSERT_TRACE3( - "genericClosure { startSet = [{ key = 1;}]; operator = item: [{ key = ''a''; }]; }", - EvalError, - HintFmt( - "cannot compare %s with %s; values are %s and %s", - "a string", - "an integer", - Uncolored(ANSI_MAGENTA "\"a\"" ANSI_NORMAL), - Uncolored(ANSI_CYAN "1" ANSI_NORMAL)), - HintFmt("with element %s", Uncolored("{ key = " ANSI_CYAN "1" ANSI_NORMAL "; }")), - HintFmt("while comparing element %s", Uncolored("{ key = " ANSI_MAGENTA "\"a\"" ANSI_NORMAL "; }"))); - - ASSERT_TRACE3( - "genericClosure { startSet = [ true ]; operator = item: [{ key = ''a''; }]; }", - TypeError, - HintFmt("expected a set but found %s: %s", "a Boolean", Uncolored(ANSI_CYAN "true" ANSI_NORMAL)), - HintFmt(""), - HintFmt("in genericClosure element %s", Uncolored(ANSI_CYAN "true" ANSI_NORMAL))); -} - TEST_F(ErrorTraceTest, replaceStrings) { ASSERT_TRACE2( From 9c04c629e5e39c7ec55bbaf3590bf3b553faa2c2 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 6 Nov 2025 16:51:19 -0500 Subject: [PATCH 175/213] `UnkeyedValidPathInfo::fromJSON` Remove support for older version It turns out this code path is only used for unit tests (to ensure our JSON formats are possible to parse by other code, elsewhere). No user-facing functionality consumes this format. Therefore, let's drop the old version parsing support. --- doc/manual/rl-next/json-format-changes.md | 2 +- src/libstore/derivation-options.cc | 9 ------- src/libstore/path-info.cc | 30 ++++++++-------------- src/libutil/include/nix/util/json-utils.hh | 9 +++++++ 4 files changed, 20 insertions(+), 30 deletions(-) diff --git a/doc/manual/rl-next/json-format-changes.md b/doc/manual/rl-next/json-format-changes.md index c5518ee1b..ef442c27e 100644 --- a/doc/manual/rl-next/json-format-changes.md +++ b/doc/manual/rl-next/json-format-changes.md @@ -22,7 +22,7 @@ The store path info JSON format has been updated from version 1 to version 2: - New: `"ca": {"method": "nar", "hash": {"algorithm": "sha256", "format": "base64", "hash": "EMIJ+giQ..."}}` - Still `null` values for input-addressed store objects -Version 1 format is still accepted when reading for backward compatibility. +Nix currently only produces, and doesn't consume this format. **Affected command**: `nix path-info --json` diff --git a/src/libstore/derivation-options.cc b/src/libstore/derivation-options.cc index 75313841c..265f28e80 100644 --- a/src/libstore/derivation-options.cc +++ b/src/libstore/derivation-options.cc @@ -423,15 +423,6 @@ void adl_serializer::to_json(json & json, const DerivationOpt json["allowSubstitutes"] = o.allowSubstitutes; } -template -static inline std::optional ptrToOwned(const json * ptr) -{ - if (ptr) - return std::optional{*ptr}; - else - return std::nullopt; -} - DerivationOptions::OutputChecks adl_serializer::from_json(const json & json_) { auto & json = getObject(json_); diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index c535d08f4..7d8bf4911 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -192,13 +192,10 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store auto & json = getObject(_json); - // Check version (optional for backward compatibility) - nlohmann::json::number_unsigned_t version = 1; - if (json.contains("version")) { - version = getUnsigned(valueAt(json, "version")); - if (version != 1 && version != 2) { - throw Error("Unsupported path info JSON format version %d, expected 1 through 2", version); - } + { + auto version = getUnsigned(valueAt(json, "version")); + if (version != 2) + throw Error("Unsupported path info JSON format version %d, only version 2 is currently supported", version); } res.narHash = Hash::parseAny(getString(valueAt(json, "narHash")), std::nullopt); @@ -213,19 +210,12 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store throw; } - // New format as this as nullable but mandatory field; handling - // missing is for back-compat. - if (auto * rawCa0 = optionalValueAt(json, "ca")) - if (auto * rawCa = getNullable(*rawCa0)) - switch (version) { - case 1: - // old string format also used in SQLite DB and .narinfo - res.ca = ContentAddress::parse(getString(*rawCa)); - break; - case 2 ... std::numeric_limits::max(): - res.ca = *rawCa; - break; - } + try { + res.ca = ptrToOwned(getNullable(valueAt(json, "ca"))); + } catch (Error & e) { + e.addTrace({}, "while reading key 'ca'"); + throw; + } if (auto * rawDeriver0 = optionalValueAt(json, "deriver")) if (auto * rawDeriver = getNullable(*rawDeriver0)) diff --git a/src/libutil/include/nix/util/json-utils.hh b/src/libutil/include/nix/util/json-utils.hh index 7a3fe4f36..ec513ca25 100644 --- a/src/libutil/include/nix/util/json-utils.hh +++ b/src/libutil/include/nix/util/json-utils.hh @@ -114,4 +114,13 @@ struct adl_serializer> } }; +template +static inline std::optional ptrToOwned(const json * ptr) +{ + if (ptr) + return std::optional{*ptr}; + else + return std::nullopt; +} + } // namespace nlohmann From 80b1d7b87a80667fb2c7c58b6bf43c73aec5de15 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 6 Nov 2025 16:50:15 -0500 Subject: [PATCH 176/213] Better version error for JSON derivation decoding It now says which (other) version was encountered instead --- src/libstore/derivations.cc | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc index 31ca167f9..f96109a6c 100644 --- a/src/libstore/derivations.cc +++ b/src/libstore/derivations.cc @@ -1381,13 +1381,15 @@ adl_serializer::from_json(const json & _json, const Experiment } } +static unsigned constexpr expectedJsonVersionDerivation = 4; + void adl_serializer::to_json(json & res, const Derivation & d) { res = nlohmann::json::object(); res["name"] = d.name; - res["version"] = 4; + res["version"] = expectedJsonVersionDerivation; { nlohmann::json & outputsObj = res["outputs"]; @@ -1446,8 +1448,14 @@ Derivation adl_serializer::from_json(const json & _json, const Exper res.name = getString(valueAt(json, "name")); - if (valueAt(json, "version") != 4) - throw Error("Only derivation format version 4 is currently supported."); + { + auto version = getUnsigned(valueAt(json, "version")); + if (valueAt(json, "version") != expectedJsonVersionDerivation) + throw Error( + "Unsupported derivation JSON format version %d, only format version %d is currently supported.", + version, + expectedJsonVersionDerivation); + } try { auto outputs = getObject(valueAt(json, "outputs")); From 4f1c8f62c38c5f9325eaa342cc0e625d45703a35 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 6 Nov 2025 17:07:15 -0500 Subject: [PATCH 177/213] Futher cleans up store object info JSON v2 Since we haven't released v2 yet (2.32 has v1) we can just update this in-place and avoid version churn. Note that as a nice side effect of using the standard `Hash` JSON impl, we don't neeed this `hashFormat` parameter anymore. --- doc/manual/rl-next/json-format-changes.md | 8 +++++ .../json/schema/store-object-info-v2.yaml | 4 +-- src/libstore-tests/data/nar-info/impure.json | 12 +++++-- src/libstore-tests/data/nar-info/pure.json | 6 +++- .../data/path-info/empty_impure.json | 6 +++- .../data/path-info/empty_pure.json | 6 +++- src/libstore-tests/data/path-info/impure.json | 6 +++- src/libstore-tests/data/path-info/pure.json | 6 +++- src/libstore-tests/nar-info.cc | 36 +++++++++---------- src/libstore-tests/path-info.cc | 2 +- src/libstore/include/nix/store/nar-info.hh | 2 +- src/libstore/include/nix/store/path-info.hh | 2 +- src/libstore/nar-info.cc | 22 ++++++------ src/libstore/path-info.cc | 7 ++-- src/nix/path-info.cc | 2 +- tests/functional/path-info.sh | 12 +++++-- 16 files changed, 91 insertions(+), 48 deletions(-) diff --git a/doc/manual/rl-next/json-format-changes.md b/doc/manual/rl-next/json-format-changes.md index ef442c27e..bd7e11243 100644 --- a/doc/manual/rl-next/json-format-changes.md +++ b/doc/manual/rl-next/json-format-changes.md @@ -22,6 +22,14 @@ The store path info JSON format has been updated from version 1 to version 2: - New: `"ca": {"method": "nar", "hash": {"algorithm": "sha256", "format": "base64", "hash": "EMIJ+giQ..."}}` - Still `null` values for input-addressed store objects +- **Structured hash fields**: + + Hash values (`narHash` and `downloadHash`) are now structured JSON objects instead of strings: + + - Old: `"narHash": "sha256:FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="` + - New: `"narHash": {"algorithm": "sha256", "format": "base64", "hash": "FePFYIlM..."}` + - Same structure applies to `downloadHash` in NAR info contexts + Nix currently only produces, and doesn't consume this format. **Affected command**: `nix path-info --json` diff --git a/doc/manual/source/protocols/json/schema/store-object-info-v2.yaml b/doc/manual/source/protocols/json/schema/store-object-info-v2.yaml index 4f442e0c3..44d9e5eae 100644 --- a/doc/manual/source/protocols/json/schema/store-object-info-v2.yaml +++ b/doc/manual/source/protocols/json/schema/store-object-info-v2.yaml @@ -71,7 +71,7 @@ $defs: Note: This field may not be present in all contexts, such as when the path is used as the key and the the store object info the value in map. narHash: - type: string + "$ref": "./hash-v1.yaml" title: NAR Hash description: | Hash of the [file system object](@docroot@/store/file-system-object.md) part of the store object when serialized as a [Nix Archive](@docroot@/store/file-system-object/content-address.md#serial-nix-archive). @@ -229,7 +229,7 @@ $defs: > This is an impure "`.narinfo`" field that may not be included in certain contexts. downloadHash: - type: string + "$ref": "./hash-v1.yaml" title: Download Hash description: | A digest for the compressed archive itself, as opposed to the data contained within. diff --git a/src/libstore-tests/data/nar-info/impure.json b/src/libstore-tests/data/nar-info/impure.json index f35ff990b..13cfa8639 100644 --- a/src/libstore-tests/data/nar-info/impure.json +++ b/src/libstore-tests/data/nar-info/impure.json @@ -9,9 +9,17 @@ }, "compression": "xz", "deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", - "downloadHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "downloadHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, "downloadSize": 4029176, - "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, "narSize": 34878, "references": [ "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", diff --git a/src/libstore-tests/data/nar-info/pure.json b/src/libstore-tests/data/nar-info/pure.json index 2c5cb3bde..470f92da9 100644 --- a/src/libstore-tests/data/nar-info/pure.json +++ b/src/libstore-tests/data/nar-info/pure.json @@ -7,7 +7,11 @@ }, "method": "nar" }, - "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, "narSize": 34878, "references": [ "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", diff --git a/src/libstore-tests/data/path-info/empty_impure.json b/src/libstore-tests/data/path-info/empty_impure.json index 381acaa03..2fcd2078c 100644 --- a/src/libstore-tests/data/path-info/empty_impure.json +++ b/src/libstore-tests/data/path-info/empty_impure.json @@ -1,7 +1,11 @@ { "ca": null, "deriver": null, - "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, "narSize": 0, "references": [], "registrationTime": null, diff --git a/src/libstore-tests/data/path-info/empty_pure.json b/src/libstore-tests/data/path-info/empty_pure.json index 6d3fa646b..365e2f646 100644 --- a/src/libstore-tests/data/path-info/empty_pure.json +++ b/src/libstore-tests/data/path-info/empty_pure.json @@ -1,6 +1,10 @@ { "ca": null, - "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, "narSize": 0, "references": [], "version": 2 diff --git a/src/libstore-tests/data/path-info/impure.json b/src/libstore-tests/data/path-info/impure.json index 141b38a16..5e9944e5a 100644 --- a/src/libstore-tests/data/path-info/impure.json +++ b/src/libstore-tests/data/path-info/impure.json @@ -8,7 +8,11 @@ "method": "nar" }, "deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", - "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, "narSize": 34878, "references": [ "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", diff --git a/src/libstore-tests/data/path-info/pure.json b/src/libstore-tests/data/path-info/pure.json index 2c5cb3bde..470f92da9 100644 --- a/src/libstore-tests/data/path-info/pure.json +++ b/src/libstore-tests/data/path-info/pure.json @@ -7,7 +7,11 @@ }, "method": "nar" }, - "narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=", + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, "narSize": 34878, "references": [ "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", diff --git a/src/libstore-tests/nar-info.cc b/src/libstore-tests/nar-info.cc index 751c5e305..41faa9274 100644 --- a/src/libstore-tests/nar-info.cc +++ b/src/libstore-tests/nar-info.cc @@ -59,24 +59,24 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) return info; } -#define JSON_TEST(STEM, PURE) \ - TEST_F(NarInfoTest, NarInfo_##STEM##_from_json) \ - { \ - readTest(#STEM, [&](const auto & encoded_) { \ - auto encoded = json::parse(encoded_); \ - auto expected = makeNarInfo(*store, PURE); \ - NarInfo got = NarInfo::fromJSON(*store, expected.path, encoded); \ - ASSERT_EQ(got, expected); \ - }); \ - } \ - \ - TEST_F(NarInfoTest, NarInfo_##STEM##_to_json) \ - { \ - writeTest( \ - #STEM, \ - [&]() -> json { return makeNarInfo(*store, PURE).toJSON(*store, PURE, HashFormat::SRI); }, \ - [](const auto & file) { return json::parse(readFile(file)); }, \ - [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ +#define JSON_TEST(STEM, PURE) \ + TEST_F(NarInfoTest, NarInfo_##STEM##_from_json) \ + { \ + readTest(#STEM, [&](const auto & encoded_) { \ + auto encoded = json::parse(encoded_); \ + auto expected = makeNarInfo(*store, PURE); \ + NarInfo got = NarInfo::fromJSON(*store, expected.path, encoded); \ + ASSERT_EQ(got, expected); \ + }); \ + } \ + \ + TEST_F(NarInfoTest, NarInfo_##STEM##_to_json) \ + { \ + writeTest( \ + #STEM, \ + [&]() -> json { return makeNarInfo(*store, PURE).toJSON(*store, PURE); }, \ + [](const auto & file) { return json::parse(readFile(file)); }, \ + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ } JSON_TEST(pure, false) diff --git a/src/libstore-tests/path-info.cc b/src/libstore-tests/path-info.cc index 63310c1c3..a40b26149 100644 --- a/src/libstore-tests/path-info.cc +++ b/src/libstore-tests/path-info.cc @@ -80,7 +80,7 @@ static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo { \ writeTest( \ #STEM, \ - [&]() -> json { return OBJ.toJSON(*store, PURE, HashFormat::SRI); }, \ + [&]() -> json { return OBJ.toJSON(*store, PURE); }, \ [](const auto & file) { return json::parse(readFile(file)); }, \ [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ } diff --git a/src/libstore/include/nix/store/nar-info.hh b/src/libstore/include/nix/store/nar-info.hh index 1684837c6..34606a89b 100644 --- a/src/libstore/include/nix/store/nar-info.hh +++ b/src/libstore/include/nix/store/nar-info.hh @@ -42,7 +42,7 @@ struct NarInfo : ValidPathInfo std::string to_string(const StoreDirConfig & store) const; - nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const override; + nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo) const override; static NarInfo fromJSON(const StoreDirConfig & store, const StorePath & path, const nlohmann::json & json); }; diff --git a/src/libstore/include/nix/store/path-info.hh b/src/libstore/include/nix/store/path-info.hh index cbc5abdb4..0f00a14b7 100644 --- a/src/libstore/include/nix/store/path-info.hh +++ b/src/libstore/include/nix/store/path-info.hh @@ -117,7 +117,7 @@ struct UnkeyedValidPathInfo * @param includeImpureInfo If true, variable elements such as the * registration time are included. */ - virtual nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const; + virtual nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo) const; static UnkeyedValidPathInfo fromJSON(const StoreDirConfig & store, const nlohmann::json & json); }; diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index 6f1abb273..4d4fb7de2 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -130,11 +130,11 @@ std::string NarInfo::to_string(const StoreDirConfig & store) const return res; } -nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const +nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo) const { using nlohmann::json; - auto jsonObject = ValidPathInfo::toJSON(store, includeImpureInfo, hashFormat); + auto jsonObject = ValidPathInfo::toJSON(store, includeImpureInfo); if (includeImpureInfo) { if (!url.empty()) @@ -142,7 +142,7 @@ nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureI if (!compression.empty()) jsonObject["compression"] = compression; if (fileHash) - jsonObject["downloadHash"] = fileHash->to_string(hashFormat, true); + jsonObject["downloadHash"] = *fileHash; if (fileSize) jsonObject["downloadSize"] = fileSize; } @@ -161,17 +161,17 @@ NarInfo NarInfo::fromJSON(const StoreDirConfig & store, const StorePath & path, auto & obj = getObject(json); - if (json.contains("url")) - res.url = getString(valueAt(obj, "url")); + if (auto * url = get(obj, "url")) + res.url = getString(*url); - if (json.contains("compression")) - res.compression = getString(valueAt(obj, "compression")); + if (auto * compression = get(obj, "compression")) + res.compression = getString(*compression); - if (json.contains("downloadHash")) - res.fileHash = Hash::parseAny(getString(valueAt(obj, "downloadHash")), std::nullopt); + if (auto * downloadHash = get(obj, "downloadHash")) + res.fileHash = *downloadHash; - if (json.contains("downloadSize")) - res.fileSize = getUnsigned(valueAt(obj, "downloadSize")); + if (auto * downloadSize = get(obj, "downloadSize")) + res.fileSize = getUnsigned(*downloadSize); return res; } diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index 7d8bf4911..811c397a4 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -149,8 +149,7 @@ ValidPathInfo ValidPathInfo::makeFromCA( return res; } -nlohmann::json -UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo, HashFormat hashFormat) const +nlohmann::json UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo) const { using nlohmann::json; @@ -158,7 +157,7 @@ UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInf jsonObject["version"] = 2; - jsonObject["narHash"] = narHash.to_string(hashFormat, true); + jsonObject["narHash"] = narHash; jsonObject["narSize"] = narSize; { @@ -198,7 +197,7 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store throw Error("Unsupported path info JSON format version %d, only version 2 is currently supported", version); } - res.narHash = Hash::parseAny(getString(valueAt(json, "narHash")), std::nullopt); + res.narHash = valueAt(json, "narHash"); res.narSize = getUnsigned(valueAt(json, "narSize")); try { diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index 146b775e5..697b73e5c 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -51,7 +51,7 @@ static json pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool // know the name yet until we've read the NAR info. printedStorePath = store.printStorePath(info->path); - jsonObject = info->toJSON(store, true, HashFormat::SRI); + jsonObject = info->toJSON(store, true); if (showClosureSize) { StorePathSet closure; diff --git a/tests/functional/path-info.sh b/tests/functional/path-info.sh index 463ac6214..70ad1a7aa 100755 --- a/tests/functional/path-info.sh +++ b/tests/functional/path-info.sh @@ -17,8 +17,16 @@ diff --unified --color=always \ jq --sort-keys 'map_values(.narHash)') \ <(jq --sort-keys <<-EOF { - "$foo": "sha256-QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA=", - "$bar": "sha256-9fhYGu9fqxcQC2Kc81qh2RMo1QcLBUBo8U+pPn+jthQ=", + "$foo": { + "algorithm": "sha256", + "format": "base64", + "hash": "QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA=" + }, + "$bar": { + "algorithm": "sha256", + "format": "base64", + "hash": "9fhYGu9fqxcQC2Kc81qh2RMo1QcLBUBo8U+pPn+jthQ=" + }, "$baz": null } EOF From c5f348db959b87a4b61806830989d5e927921c00 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Fri, 7 Nov 2025 00:16:03 -0500 Subject: [PATCH 178/213] Test output checks referring to other outputs `allowedReferences` and friends can, in addition to supporting store paths (and placeholders, but because those will be rewritten to store paths), they also support to refering to other outputs in the derivation by name. We update the tests in order to cover for that. (While we are at it, also introduce some scratch variables for paths and placeholders to make the C++ literalsf for this test more concise.) --- .../advanced-attributes-structured-attrs.json | 6 +- .../derivation/ca/advanced-attributes.json | 4 +- .../data/derivation/ca/all_set.json | 6 +- .../ca/structuredAttrs_all_set.json | 6 +- .../advanced-attributes-structured-attrs.json | 18 +++--- .../derivation/ia/advanced-attributes.json | 8 +-- .../data/derivation/ia/all_set.json | 6 +- .../ia/structuredAttrs_all_set.json | 6 +- .../derivation-advanced-attrs.cc | 63 ++++++++++++------- .../advanced-attributes-structured-attrs.nix | 10 ++- .../derivation/advanced-attributes.nix | 10 ++- .../advanced-attributes-structured-attrs.drv | 2 +- .../derivation/ca/advanced-attributes.drv | 2 +- .../advanced-attributes-structured-attrs.drv | 2 +- .../derivation/ia/advanced-attributes.drv | 2 +- 15 files changed, 95 insertions(+), 56 deletions(-) diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json index 2a4e70558..95122ad41 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes-structured-attrs.json @@ -69,7 +69,8 @@ "outputChecks": { "bin": { "disallowedReferences": [ - "/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g" + "/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g", + "dev" ], "disallowedRequisites": [ "/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8" @@ -84,7 +85,8 @@ "/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9" ], "allowedRequisites": [ - "/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z" + "/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z", + "bin" ] } }, diff --git a/src/libstore-tests/data/derivation/ca/advanced-attributes.json b/src/libstore-tests/data/derivation/ca/advanced-attributes.json index 55dbe62e0..6b77459bc 100644 --- a/src/libstore-tests/data/derivation/ca/advanced-attributes.json +++ b/src/libstore-tests/data/derivation/ca/advanced-attributes.json @@ -11,9 +11,9 @@ "__sandboxProfile": "sandcastle", "allowSubstitutes": "", "allowedReferences": "/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9", - "allowedRequisites": "/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z", + "allowedRequisites": "/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z bin", "builder": "/bin/bash", - "disallowedReferences": "/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g", + "disallowedReferences": "/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g dev", "disallowedRequisites": "/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8", "exportReferencesGraph": "refs1 /164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9 refs2 /nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv", "impureEnvVars": "UNICORN", diff --git a/src/libstore-tests/data/derivation/ca/all_set.json b/src/libstore-tests/data/derivation/ca/all_set.json index e06eada01..198356c64 100644 --- a/src/libstore-tests/data/derivation/ca/all_set.json +++ b/src/libstore-tests/data/derivation/ca/all_set.json @@ -23,10 +23,12 @@ "/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9" ], "allowedRequisites": [ - "/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z" + "/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z", + "bin" ], "disallowedReferences": [ - "/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g" + "/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g", + "dev" ], "disallowedRequisites": [ "/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8" diff --git a/src/libstore-tests/data/derivation/ca/structuredAttrs_all_set.json b/src/libstore-tests/data/derivation/ca/structuredAttrs_all_set.json index 2a321897c..f566c48dd 100644 --- a/src/libstore-tests/data/derivation/ca/structuredAttrs_all_set.json +++ b/src/libstore-tests/data/derivation/ca/structuredAttrs_all_set.json @@ -23,7 +23,8 @@ "allowedReferences": null, "allowedRequisites": null, "disallowedReferences": [ - "/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g" + "/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g", + "dev" ], "disallowedRequisites": [ "/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8" @@ -46,7 +47,8 @@ "/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9" ], "allowedRequisites": [ - "/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z" + "/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z", + "bin" ], "disallowedReferences": [], "disallowedRequisites": [], diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json index e07d1294b..bbd68e087 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes-structured-attrs.json @@ -5,9 +5,9 @@ ], "builder": "/bin/bash", "env": { - "bin": "/nix/store/33qms3h55wlaspzba3brlzlrm8m2239g-advanced-attributes-structured-attrs-bin", - "dev": "/nix/store/wyfgwsdi8rs851wmy1xfzdxy7y5vrg5l-advanced-attributes-structured-attrs-dev", - "out": "/nix/store/7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs" + "bin": "/nix/store/cnpasdljgkhnwaf78cf3qygcp4qbki1c-advanced-attributes-structured-attrs-bin", + "dev": "/nix/store/ijq6mwpa9jbnpnl33qldfqihrr38kprx-advanced-attributes-structured-attrs-dev", + "out": "/nix/store/h1vh648d3p088kdimy0r8ngpfx7c3nzw-advanced-attributes-structured-attrs" }, "inputs": { "drvs": { @@ -33,13 +33,13 @@ "name": "advanced-attributes-structured-attrs", "outputs": { "bin": { - "path": "33qms3h55wlaspzba3brlzlrm8m2239g-advanced-attributes-structured-attrs-bin" + "path": "cnpasdljgkhnwaf78cf3qygcp4qbki1c-advanced-attributes-structured-attrs-bin" }, "dev": { - "path": "wyfgwsdi8rs851wmy1xfzdxy7y5vrg5l-advanced-attributes-structured-attrs-dev" + "path": "ijq6mwpa9jbnpnl33qldfqihrr38kprx-advanced-attributes-structured-attrs-dev" }, "out": { - "path": "7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs" + "path": "h1vh648d3p088kdimy0r8ngpfx7c3nzw-advanced-attributes-structured-attrs" } }, "structuredAttrs": { @@ -66,7 +66,8 @@ "outputChecks": { "bin": { "disallowedReferences": [ - "/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar" + "/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar", + "dev" ], "disallowedRequisites": [ "/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev" @@ -81,7 +82,8 @@ "/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo" ], "allowedRequisites": [ - "/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev" + "/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev", + "bin" ] } }, diff --git a/src/libstore-tests/data/derivation/ia/advanced-attributes.json b/src/libstore-tests/data/derivation/ia/advanced-attributes.json index 372b4fbb9..e2de9431b 100644 --- a/src/libstore-tests/data/derivation/ia/advanced-attributes.json +++ b/src/libstore-tests/data/derivation/ia/advanced-attributes.json @@ -11,14 +11,14 @@ "__sandboxProfile": "sandcastle", "allowSubstitutes": "", "allowedReferences": "/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo", - "allowedRequisites": "/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev", + "allowedRequisites": "/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev bin", "builder": "/bin/bash", - "disallowedReferences": "/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar", + "disallowedReferences": "/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar dev", "disallowedRequisites": "/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev", "exportReferencesGraph": "refs1 /nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo refs2 /nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv", "impureEnvVars": "UNICORN", "name": "advanced-attributes", - "out": "/nix/store/wyhpwd748pns4k7svh48wdrc8kvjk0ra-advanced-attributes", + "out": "/nix/store/ymqmybkq5j4nd1xplw6ccdpbjnfi017v-advanced-attributes", "preferLocalBuild": "1", "requiredSystemFeatures": "rainbow uid-range", "system": "my-system" @@ -47,7 +47,7 @@ "name": "advanced-attributes", "outputs": { "out": { - "path": "wyhpwd748pns4k7svh48wdrc8kvjk0ra-advanced-attributes" + "path": "ymqmybkq5j4nd1xplw6ccdpbjnfi017v-advanced-attributes" } }, "system": "my-system", diff --git a/src/libstore-tests/data/derivation/ia/all_set.json b/src/libstore-tests/data/derivation/ia/all_set.json index 62b6cdf97..8731ca3a2 100644 --- a/src/libstore-tests/data/derivation/ia/all_set.json +++ b/src/libstore-tests/data/derivation/ia/all_set.json @@ -23,10 +23,12 @@ "/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo" ], "allowedRequisites": [ - "/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev" + "/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev", + "bin" ], "disallowedReferences": [ - "/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar" + "/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar", + "dev" ], "disallowedRequisites": [ "/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev" diff --git a/src/libstore-tests/data/derivation/ia/structuredAttrs_all_set.json b/src/libstore-tests/data/derivation/ia/structuredAttrs_all_set.json index 0fa383589..67fa634cf 100644 --- a/src/libstore-tests/data/derivation/ia/structuredAttrs_all_set.json +++ b/src/libstore-tests/data/derivation/ia/structuredAttrs_all_set.json @@ -23,7 +23,8 @@ "allowedReferences": null, "allowedRequisites": null, "disallowedReferences": [ - "/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar" + "/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar", + "dev" ], "disallowedRequisites": [ "/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev" @@ -46,7 +47,8 @@ "/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo" ], "allowedRequisites": [ - "/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev" + "/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev", + "bin" ], "disallowedReferences": [], "disallowedRequisites": [], diff --git a/src/libstore-tests/derivation-advanced-attrs.cc b/src/libstore-tests/derivation-advanced-attrs.cc index 41538cdcc..f44e96cdd 100644 --- a/src/libstore-tests/derivation-advanced-attrs.cc +++ b/src/libstore-tests/derivation-advanced-attrs.cc @@ -127,6 +127,21 @@ TEST_ATERM_JSON(advancedAttributes_structuredAttrs_defaults, "advanced-attribute #undef TEST_ATERM_JSON +/** + * Since these are both repeated and sensative opaque values, it makes + * sense to give them names in this file. + */ +static std::string pathFoo = "/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo", + pathFooDev = "/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev", + pathBar = "/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar", + pathBarDev = "/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev", + pathBarDrvIA = "/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv", + pathBarDrvCA = "/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv", + placeholderFoo = "/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9", + placeholderFooDev = "/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z", + placeholderBar = "/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g", + placeholderBarDev = "/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"; + using ExportReferencesMap = decltype(DerivationOptions::exportReferencesGraph); static const DerivationOptions advancedAttributes_defaults = { @@ -216,16 +231,16 @@ DerivationOptions advancedAttributes_ia = { .outputChecks = DerivationOptions::OutputChecks{ .ignoreSelfRefs = true, - .allowedReferences = StringSet{"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"}, - .disallowedReferences = StringSet{"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar"}, - .allowedRequisites = StringSet{"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev"}, - .disallowedRequisites = StringSet{"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"}, + .allowedReferences = StringSet{pathFoo}, + .disallowedReferences = StringSet{pathBar, "dev"}, + .allowedRequisites = StringSet{pathFooDev, "bin"}, + .disallowedRequisites = StringSet{pathBarDev}, }, .unsafeDiscardReferences = {}, .passAsFile = {}, .exportReferencesGraph{ - {"refs1", {"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"}}, - {"refs2", {"/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"}}, + {"refs1", {pathFoo}}, + {"refs2", {pathBarDrvIA}}, }, .additionalSandboxProfile = "sandcastle", .noChroot = true, @@ -246,16 +261,16 @@ DerivationOptions advancedAttributes_ca = { .outputChecks = DerivationOptions::OutputChecks{ .ignoreSelfRefs = true, - .allowedReferences = StringSet{"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"}, - .disallowedReferences = StringSet{"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g"}, - .allowedRequisites = StringSet{"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z"}, - .disallowedRequisites = StringSet{"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"}, + .allowedReferences = StringSet{placeholderFoo}, + .disallowedReferences = StringSet{placeholderBar, "dev"}, + .allowedRequisites = StringSet{placeholderFooDev, "bin"}, + .disallowedRequisites = StringSet{placeholderBarDev}, }, .unsafeDiscardReferences = {}, .passAsFile = {}, .exportReferencesGraph{ - {"refs1", {"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"}}, - {"refs2", {"/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"}}, + {"refs1", {placeholderFoo}}, + {"refs2", {pathBarDrvCA}}, }, .additionalSandboxProfile = "sandcastle", .noChroot = true, @@ -375,13 +390,13 @@ DerivationOptions advancedAttributes_structuredAttrs_ia = { std::map{ {"out", DerivationOptions::OutputChecks{ - .allowedReferences = StringSet{"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"}, - .allowedRequisites = StringSet{"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev"}, + .allowedReferences = StringSet{pathFoo}, + .allowedRequisites = StringSet{pathFooDev, "bin"}, }}, {"bin", DerivationOptions::OutputChecks{ - .disallowedReferences = StringSet{"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar"}, - .disallowedRequisites = StringSet{"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"}, + .disallowedReferences = StringSet{pathBar, "dev"}, + .disallowedRequisites = StringSet{pathBarDev}, }}, {"dev", DerivationOptions::OutputChecks{ @@ -393,8 +408,8 @@ DerivationOptions advancedAttributes_structuredAttrs_ia = { .passAsFile = {}, .exportReferencesGraph = { - {"refs1", {"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"}}, - {"refs2", {"/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"}}, + {"refs1", {pathFoo}}, + {"refs2", {pathBarDrvIA}}, }, .additionalSandboxProfile = "sandcastle", .noChroot = true, @@ -417,13 +432,13 @@ DerivationOptions advancedAttributes_structuredAttrs_ca = { std::map{ {"out", DerivationOptions::OutputChecks{ - .allowedReferences = StringSet{"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"}, - .allowedRequisites = StringSet{"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z"}, + .allowedReferences = StringSet{placeholderFoo}, + .allowedRequisites = StringSet{placeholderFooDev, "bin"}, }}, {"bin", DerivationOptions::OutputChecks{ - .disallowedReferences = StringSet{"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g"}, - .disallowedRequisites = StringSet{"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"}, + .disallowedReferences = StringSet{placeholderBar, "dev"}, + .disallowedRequisites = StringSet{placeholderBarDev}, }}, {"dev", DerivationOptions::OutputChecks{ @@ -435,8 +450,8 @@ DerivationOptions advancedAttributes_structuredAttrs_ca = { .passAsFile = {}, .exportReferencesGraph = { - {"refs1", {"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"}}, - {"refs2", {"/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"}}, + {"refs1", {placeholderFoo}}, + {"refs2", {pathBarDrvCA}}, }, .additionalSandboxProfile = "sandcastle", .noChroot = true, diff --git a/tests/functional/derivation/advanced-attributes-structured-attrs.nix b/tests/functional/derivation/advanced-attributes-structured-attrs.nix index 46f619272..b11041303 100644 --- a/tests/functional/derivation/advanced-attributes-structured-attrs.nix +++ b/tests/functional/derivation/advanced-attributes-structured-attrs.nix @@ -66,10 +66,16 @@ derivation' { outputChecks = { out = { allowedReferences = [ foo ]; - allowedRequisites = [ foo.dev ]; + allowedRequisites = [ + foo.dev + "bin" + ]; }; bin = { - disallowedReferences = [ bar ]; + disallowedReferences = [ + bar + "dev" + ]; disallowedRequisites = [ bar.dev ]; }; dev = { diff --git a/tests/functional/derivation/advanced-attributes.nix b/tests/functional/derivation/advanced-attributes.nix index dd0c09e22..19a80f15d 100644 --- a/tests/functional/derivation/advanced-attributes.nix +++ b/tests/functional/derivation/advanced-attributes.nix @@ -58,8 +58,14 @@ derivation' { impureEnvVars = [ "UNICORN" ]; __darwinAllowLocalNetworking = true; allowedReferences = [ foo ]; - allowedRequisites = [ foo.dev ]; - disallowedReferences = [ bar ]; + allowedRequisites = [ + foo.dev + "bin" + ]; + disallowedReferences = [ + bar + "dev" + ]; disallowedRequisites = [ bar.dev ]; requiredSystemFeatures = [ "rainbow" diff --git a/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv b/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv index cd02c2f86..eeaba88e6 100644 --- a/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv +++ b/tests/functional/derivation/ca/advanced-attributes-structured-attrs.drv @@ -1 +1 @@ -Derive([("bin","","r:sha256",""),("dev","","r:sha256",""),("out","","r:sha256","")],[("/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv",["dev","out"]),("/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv",["dev","out"])],["/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"exportReferencesGraph\":{\"refs1\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"refs2\":[\"/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv\"]},\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g\"],\"disallowedRequisites\":[\"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"allowedRequisites\":[\"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z\"]}},\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/04f3da1kmbr67m3gzxikmsl4vjz5zf777sv6m14ahv22r65aac9m"),("dev","/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9")]) \ No newline at end of file +Derive([("bin","","r:sha256",""),("dev","","r:sha256",""),("out","","r:sha256","")],[("/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv",["dev","out"]),("/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv",["dev","out"])],["/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"exportReferencesGraph\":{\"refs1\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"refs2\":[\"/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv\"]},\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g\",\"dev\"],\"disallowedRequisites\":[\"/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9\"],\"allowedRequisites\":[\"/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z\",\"bin\"]}},\"outputHashAlgo\":\"sha256\",\"outputHashMode\":\"recursive\",\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/04f3da1kmbr67m3gzxikmsl4vjz5zf777sv6m14ahv22r65aac9m"),("dev","/02qcpld1y6xhs5gz9bchpxaw0xdhmsp5dv88lh25r2ss44kh8dxz"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9")]) \ No newline at end of file diff --git a/tests/functional/derivation/ca/advanced-attributes.drv b/tests/functional/derivation/ca/advanced-attributes.drv index 068cb593e..ee5968cdc 100644 --- a/tests/functional/derivation/ca/advanced-attributes.drv +++ b/tests/functional/derivation/ca/advanced-attributes.drv @@ -1 +1 @@ -Derive([("out","","r:sha256","")],[("/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv",["dev","out"]),("/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv",["dev","out"])],["/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"),("allowedRequisites","/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z"),("builder","/bin/bash"),("disallowedReferences","/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g"),("disallowedRequisites","/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"),("exportReferencesGraph","refs1 /164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9 refs2 /nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"),("outputHashAlgo","sha256"),("outputHashMode","recursive"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")]) \ No newline at end of file +Derive([("out","","r:sha256","")],[("/nix/store/j56sf12rxpcv5swr14vsjn5cwm6bj03h-foo.drv",["dev","out"]),("/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv",["dev","out"])],["/nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9"),("allowedRequisites","/0nr45p69vn6izw9446wsh9bng9nndhvn19kpsm4n96a5mycw0s4z bin"),("builder","/bin/bash"),("disallowedReferences","/0nyw57wm2iicnm9rglvjmbci3ikmcp823czdqdzdcgsnnwqps71g dev"),("disallowedRequisites","/07f301yqyz8c6wf6bbbavb2q39j4n8kmcly1s09xadyhgy6x2wr8"),("exportReferencesGraph","refs1 /164j69y6zir9z0339n8pjigg3rckinlr77bxsavzizdaaljb7nh9 refs2 /nix/store/qnml92yh97a6fbrs2m5qg5cqlc8vni58-bar.drv"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9"),("outputHashAlgo","sha256"),("outputHashMode","recursive"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")]) \ No newline at end of file diff --git a/tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv b/tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv index 1dfcac42d..0aa82e636 100644 --- a/tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv +++ b/tests/functional/derivation/ia/advanced-attributes-structured-attrs.drv @@ -1 +1 @@ -Derive([("bin","/nix/store/33qms3h55wlaspzba3brlzlrm8m2239g-advanced-attributes-structured-attrs-bin","",""),("dev","/nix/store/wyfgwsdi8rs851wmy1xfzdxy7y5vrg5l-advanced-attributes-structured-attrs-dev","",""),("out","/nix/store/7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs","","")],[("/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv",["dev","out"]),("/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv",["dev","out"])],["/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"exportReferencesGraph\":{\"refs1\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"refs2\":[\"/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv\"]},\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar\"],\"disallowedRequisites\":[\"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"allowedRequisites\":[\"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev\"]}},\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/nix/store/33qms3h55wlaspzba3brlzlrm8m2239g-advanced-attributes-structured-attrs-bin"),("dev","/nix/store/wyfgwsdi8rs851wmy1xfzdxy7y5vrg5l-advanced-attributes-structured-attrs-dev"),("out","/nix/store/7cxy4zx1vqc885r4jl2l64pymqbdmhii-advanced-attributes-structured-attrs")]) \ No newline at end of file +Derive([("bin","/nix/store/cnpasdljgkhnwaf78cf3qygcp4qbki1c-advanced-attributes-structured-attrs-bin","",""),("dev","/nix/store/ijq6mwpa9jbnpnl33qldfqihrr38kprx-advanced-attributes-structured-attrs-dev","",""),("out","/nix/store/h1vh648d3p088kdimy0r8ngpfx7c3nzw-advanced-attributes-structured-attrs","","")],[("/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv",["dev","out"]),("/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv",["dev","out"])],["/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__json","{\"__darwinAllowLocalNetworking\":true,\"__impureHostDeps\":[\"/usr/bin/ditto\"],\"__noChroot\":true,\"__sandboxProfile\":\"sandcastle\",\"allowSubstitutes\":false,\"builder\":\"/bin/bash\",\"exportReferencesGraph\":{\"refs1\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"refs2\":[\"/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv\"]},\"impureEnvVars\":[\"UNICORN\"],\"name\":\"advanced-attributes-structured-attrs\",\"outputChecks\":{\"bin\":{\"disallowedReferences\":[\"/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar\",\"dev\"],\"disallowedRequisites\":[\"/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev\"]},\"dev\":{\"maxClosureSize\":5909,\"maxSize\":789},\"out\":{\"allowedReferences\":[\"/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo\"],\"allowedRequisites\":[\"/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev\",\"bin\"]}},\"outputs\":[\"out\",\"bin\",\"dev\"],\"preferLocalBuild\":true,\"requiredSystemFeatures\":[\"rainbow\",\"uid-range\"],\"system\":\"my-system\"}"),("bin","/nix/store/cnpasdljgkhnwaf78cf3qygcp4qbki1c-advanced-attributes-structured-attrs-bin"),("dev","/nix/store/ijq6mwpa9jbnpnl33qldfqihrr38kprx-advanced-attributes-structured-attrs-dev"),("out","/nix/store/h1vh648d3p088kdimy0r8ngpfx7c3nzw-advanced-attributes-structured-attrs")]) \ No newline at end of file diff --git a/tests/functional/derivation/ia/advanced-attributes.drv b/tests/functional/derivation/ia/advanced-attributes.drv index c71a88886..4bc7320f5 100644 --- a/tests/functional/derivation/ia/advanced-attributes.drv +++ b/tests/functional/derivation/ia/advanced-attributes.drv @@ -1 +1 @@ -Derive([("out","/nix/store/wyhpwd748pns4k7svh48wdrc8kvjk0ra-advanced-attributes","","")],[("/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv",["dev","out"]),("/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv",["dev","out"])],["/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"),("allowedRequisites","/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev"),("builder","/bin/bash"),("disallowedReferences","/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar"),("disallowedRequisites","/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"),("exportReferencesGraph","refs1 /nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo refs2 /nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/nix/store/wyhpwd748pns4k7svh48wdrc8kvjk0ra-advanced-attributes"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")]) \ No newline at end of file +Derive([("out","/nix/store/ymqmybkq5j4nd1xplw6ccdpbjnfi017v-advanced-attributes","","")],[("/nix/store/afc3vbjbzql750v2lp8gxgaxsajphzih-foo.drv",["dev","out"]),("/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv",["dev","out"])],["/nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"],"my-system","/bin/bash",["-c","echo hello > $out"],[("__darwinAllowLocalNetworking","1"),("__impureHostDeps","/usr/bin/ditto"),("__noChroot","1"),("__sandboxProfile","sandcastle"),("allowSubstitutes",""),("allowedReferences","/nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo"),("allowedRequisites","/nix/store/z0rjzy29v9k5qa4nqpykrbzirj7sd43v-foo-dev bin"),("builder","/bin/bash"),("disallowedReferences","/nix/store/r5cff30838majxk5mp3ip2diffi8vpaj-bar dev"),("disallowedRequisites","/nix/store/9b61w26b4avv870dw0ymb6rw4r1hzpws-bar-dev"),("exportReferencesGraph","refs1 /nix/store/p0hax2lzvjpfc2gwkk62xdglz0fcqfzn-foo refs2 /nix/store/vj2i49jm2868j2fmqvxm70vlzmzvgv14-bar.drv"),("impureEnvVars","UNICORN"),("name","advanced-attributes"),("out","/nix/store/ymqmybkq5j4nd1xplw6ccdpbjnfi017v-advanced-attributes"),("preferLocalBuild","1"),("requiredSystemFeatures","rainbow uid-range"),("system","my-system")]) \ No newline at end of file From 2f3ec16793b4ad029254be1ae25e7824ec0c1a71 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=B6rg=20Thalheim?= Date: Fri, 7 Nov 2025 20:27:49 +0100 Subject: [PATCH 179/213] build: Disable libstdc++ TBB backend to avoid unnecessary dependency boost::concurrent_flat_map (used in libutil and libstore) includes the C++17 header. GCC's libstdc++ implements parallel algorithms using Intel TBB as the backend, which creates a link-time dependency on libtbb even though we don't actually use any parallel algorithms. Disable the TBB backend for libstdc++ by setting _GLIBCXX_USE_TBB_PAR_BACKEND=0. This makes parallel algorithms fall back to serial execution, which is acceptable since we don't use them anyway. This only affects libstdc++ (GCC's standard library); other standard libraries like libc++ (LLVM) are unaffected. --- nix-meson-build-support/common/meson.build | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/nix-meson-build-support/common/meson.build b/nix-meson-build-support/common/meson.build index 1405974d2..5fcf557e7 100644 --- a/nix-meson-build-support/common/meson.build +++ b/nix-meson-build-support/common/meson.build @@ -42,6 +42,26 @@ if cxx.get_id() == 'clang' add_project_arguments('-fpch-instantiate-templates', language : 'cpp') endif +# Detect if we're using libstdc++ (GCC's standard library) +# libstdc++ uses Intel TBB as backend for C++17 parallel algorithms when is included. +# boost::concurrent_flat_map includes , which would require linking against TBB. +# Since we don't actually use parallel algorithms, disable the TBB backend to avoid the dependency. +# TBB is a dependency of blake3 and leaking into our build environment. +is_using_libstdcxx = cxx.compiles( + ''' + #include + #ifndef __GLIBCXX__ + #error "not libstdc++" + #endif + int main() { return 0; } +''', + name : 'using libstdc++', +) + +if is_using_libstdcxx + add_project_arguments('-D_GLIBCXX_USE_TBB_PAR_BACKEND=0', language : 'cpp') +endif + # Darwin ld doesn't like "X.Y.ZpreABCD+W" nix_soversion = meson.project_version().split('+')[0].split('pre')[0] From e2040aecacdf81dde9aad59a6bbc66162a04f668 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 6 Nov 2025 01:25:22 +0100 Subject: [PATCH 180/213] meson.build: Make schema checks optional --- meson.build | 7 ++++++- meson.options | 14 ++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/meson.build b/meson.build index f3158ea6d..c072a4821 100644 --- a/meson.build +++ b/meson.build @@ -60,4 +60,9 @@ if get_option('unit-tests') subproject('libflake-tests') endif subproject('nix-functional-tests') -subproject('json-schema-checks') +if get_option('json-schema-checks') + subproject('json-schema-checks') +endif +if get_option('kaitai-struct-checks') + subproject('kaitai-struct-checks') +endif diff --git a/meson.options b/meson.options index d2c9fa40c..2739b0c71 100644 --- a/meson.options +++ b/meson.options @@ -27,3 +27,17 @@ option( value : false, description : 'Build benchmarks (requires gbenchmark)', ) + +option( + 'kaitai-struct-checks', + type : 'boolean', + value : true, + description : 'Check the Kaitai Struct specifications (requires Kaitai Struct)', +) + +option( + 'json-schema-checks', + type : 'boolean', + value : true, + description : 'Check JSON schema validity of schemas and examples (requires jv)', +) From 1fa235b77c9cab1ab8dfdd97187ca2b6bf9dbf3d Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 6 Nov 2025 01:29:25 +0100 Subject: [PATCH 181/213] devShells: Infer inputs from input closure boundary --- packaging/dev-shell.nix | 228 +++++++++++++++++++++++++++++++++++----- 1 file changed, 203 insertions(+), 25 deletions(-) diff --git a/packaging/dev-shell.nix b/packaging/dev-shell.nix index 153e7a3eb..d50cab759 100644 --- a/packaging/dev-shell.nix +++ b/packaging/dev-shell.nix @@ -3,10 +3,118 @@ devFlake, }: +let + # Some helper functions + + /** + Compute a filtered closure of build inputs. + + Specifically, `buildInputsClosure cond startSet` computes the closure formed + by recursive application of `p: filter cond p.buildInputs ++ filter cond p.propagatedBuildInputs` + to `startSet`. + + Example: + ```nix + builtInputsClosure isInternal [ pkg1 pkg2 ] + => [ pkg1 pkg3 pkg2 pkg10 ] + ``` + + Note: order tbd + + Note: `startSet` is *NOT* filtered. + */ + buildInputsClosureCond = + cond: startSet: + let + closure = builtins.genericClosure { + startSet = map (d: { + key = d.drvPath; + value = d; + }) startSet; + operator = + d: + let + r = + map + (d': { + key = d'.drvPath; + value = d'; + }) + ( + lib.filter cond d.value.buildInputs or [ ] ++ lib.filter cond d.value.propagatedBuildInputs or [ ] + ); + in + r; + }; + in + map (item: item.value) closure; + + /** + `[ pkg1 pkg2 ]` -> `{ "...-pkg2.drv" = null; "...-pkg1.drv" = null }` + + Note: fairly arbitrary order (hash based). Use for efficient set membership test only. + */ + byDrvPath = + l: + lib.listToAttrs ( + map (c: { + name = + # Just a lookup key + builtins.unsafeDiscardStringContext c.drvPath; + value = null; + }) l + ); + + /** + Stable dedup. + + Unlike `listToAttrs` -> `attrValues`, this preserves the input ordering, + which is more predictable ("deterministic") than e.g. sorting store paths, + whose hashes affect the ordering on every change. + */ + # TODO: add to Nixpkgs lib, refer from uniqueStrings + dedupByString = + key: l: + let + r = + lib.foldl' + ( + a@{ list, set }: + elem: + let + k = builtins.unsafeDiscardStringContext (key elem); + in + if set ? ${k} then + a + else + let + # Note: O(n²) copying. Use linkedLists to concat them in one go at the end. + # https://github.com/NixOS/nixpkgs/pull/452088 + newList = [ elem ] ++ list; + newSet = set // { + ${k} = null; + }; + in + builtins.seq newList builtins.seq newSet { + list = newList; + set = newSet; + } + ) + { + list = [ ]; + set = { }; + } + l; + in + r.list; + +in + { pkgs }: +# TODO: don't use nix-util for this? pkgs.nixComponents2.nix-util.overrideAttrs ( - attrs: + finalAttrs: prevAttrs: let stdenv = pkgs.nixDependencies2.stdenv; @@ -21,13 +129,89 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( "-D${prefix}:${rest}"; havePerl = stdenv.buildPlatform == stdenv.hostPlatform && stdenv.hostPlatform.isUnix; ignoreCrossFile = flags: builtins.filter (flag: !(lib.strings.hasInfix "cross-file" flag)) flags; + + activeComponents = buildInputsClosureCond isInternal ( + lib.attrValues (finalAttrs.passthru.config.getComponents allComponents) + ); + + allComponents = lib.filterAttrs (k: v: lib.isDerivation v) pkgs.nixComponents2; + internalDrvs = byDrvPath ( + # Drop the attr names (not present in buildInputs anyway) + lib.attrValues allComponents + ++ lib.concatMap (c: lib.attrValues c.tests or { }) (lib.attrValues allComponents) + ); + + isInternal = + dep: internalDrvs ? ${builtins.unsafeDiscardStringContext dep.drvPath or "_non-existent_"}; + in { - pname = "shell-for-" + attrs.pname; + pname = "shell-for-nix"; + + passthru = { + inherit activeComponents; + + # We use this attribute to store non-derivation values like functions and + # perhaps other things that are primarily for overriding and not the shell. + config = { + # Default getComponents + getComponents = + c: + builtins.removeAttrs c ( + lib.optionals (!havePerl) [ "nix-perl-bindings" ] + ++ lib.optionals (!buildCanExecuteHost) [ "nix-manual" ] + ); + }; + + /** + Produce a devShell for a given set of nix components + + Example: + + ```nix + shell.withActiveComponents (c: { + inherit (c) nix-util; + }) + ``` + */ + withActiveComponents = + f2: + finalAttrs.finalPackage.overrideAttrs ( + finalAttrs: prevAttrs: { + passthru = prevAttrs.passthru // { + config = prevAttrs.passthru.config // { + getComponents = f2; + }; + }; + } + ); + + small = + (finalAttrs.finalPackage.withActiveComponents (c: { + inherit (c) + nix-cli + nix-util-tests + nix-store-tests + nix-expr-tests + nix-fetchers-tests + nix-flake-tests + nix-functional-tests + # Currently required + nix-perl-bindings + ; + })).overrideAttrs + (o: { + mesonFlags = o.mesonFlags ++ [ + # TODO: infer from activeComponents or vice versa + "-Dkaitai-struct-checks=false" + "-Djson-schema-checks=false" + ]; + }); + }; # Remove the version suffix to avoid unnecessary attempts to substitute in nix develop version = lib.fileContents ../.version; - name = attrs.pname; + name = finalAttrs.pname; installFlags = "sysconfdir=$(out)/etc"; shellHook = '' @@ -98,17 +282,9 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( nativeBuildInputs = let inputs = - attrs.nativeBuildInputs or [ ] - ++ pkgs.nixComponents2.nix-util.nativeBuildInputs - ++ pkgs.nixComponents2.nix-store.nativeBuildInputs - ++ pkgs.nixComponents2.nix-fetchers.nativeBuildInputs - ++ pkgs.nixComponents2.nix-expr.nativeBuildInputs - ++ lib.optionals havePerl pkgs.nixComponents2.nix-perl-bindings.nativeBuildInputs - ++ lib.optionals buildCanExecuteHost pkgs.nixComponents2.nix-manual.externalNativeBuildInputs - ++ pkgs.nixComponents2.nix-internal-api-docs.nativeBuildInputs - ++ pkgs.nixComponents2.nix-external-api-docs.nativeBuildInputs - ++ pkgs.nixComponents2.nix-functional-tests.externalNativeBuildInputs - ++ pkgs.nixComponents2.nix-json-schema-checks.externalNativeBuildInputs + dedupByString (v: "${v}") ( + lib.filter (x: !isInternal x) (lib.lists.concatMap (c: c.nativeBuildInputs) activeComponents) + ) ++ lib.optional ( !buildCanExecuteHost # Hack around https://github.com/nixos/nixpkgs/commit/bf7ad8cfbfa102a90463433e2c5027573b462479 @@ -117,9 +293,7 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( && lib.meta.availableOn stdenv.buildPlatform (stdenv.hostPlatform.emulator pkgs.buildPackages) ) pkgs.buildPackages.mesonEmulatorHook ++ [ - pkgs.buildPackages.cmake pkgs.buildPackages.gnused - pkgs.buildPackages.changelog-d modular.pre-commit.settings.package (pkgs.writeScriptBin "pre-commit-hooks-install" modular.pre-commit.settings.installationScript) pkgs.buildPackages.nixfmt-rfc-style @@ -136,18 +310,22 @@ pkgs.nixComponents2.nix-util.overrideAttrs ( # from making its way into NIX_CFLAGS_COMPILE. lib.filter (p: !lib.hasInfix "separate-debug-info" p) inputs; + propagatedNativeBuildInputs = dedupByString (v: "${v}") ( + lib.filter (x: !isInternal x) ( + lib.lists.concatMap (c: c.propagatedNativeBuildInputs) activeComponents + ) + ); + buildInputs = [ pkgs.gbenchmark ] - ++ attrs.buildInputs or [ ] - ++ pkgs.nixComponents2.nix-util.buildInputs - ++ pkgs.nixComponents2.nix-store.buildInputs - ++ pkgs.nixComponents2.nix-store-tests.externalBuildInputs - ++ pkgs.nixComponents2.nix-fetchers.buildInputs - ++ pkgs.nixComponents2.nix-expr.buildInputs - ++ pkgs.nixComponents2.nix-expr.externalPropagatedBuildInputs - ++ pkgs.nixComponents2.nix-cmd.buildInputs - ++ lib.optionals havePerl pkgs.nixComponents2.nix-perl-bindings.externalBuildInputs + ++ dedupByString (v: "${v}") ( + lib.filter (x: !isInternal x) (lib.lists.concatMap (c: c.buildInputs) activeComponents) + ) ++ lib.optional havePerl pkgs.perl; + + propagatedBuildInputs = dedupByString (v: "${v}") ( + lib.filter (x: !isInternal x) (lib.lists.concatMap (c: c.propagatedBuildInputs) activeComponents) + ); } ) From cb5b0c30aa5733b40cc70089fe3829cb1046c352 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Thu, 6 Nov 2025 01:52:38 +0100 Subject: [PATCH 182/213] Drop external*Inputs from packages Get rid of some manual package set resolution in favor of splicing again, too. Co-authored-by: John Ericson --- doc/manual/package.nix | 8 ++---- src/json-schema-checks/package.nix | 8 ++---- src/kaitai-struct-checks/package.nix | 24 ++++++---------- src/libexpr/package.nix | 5 ---- src/libstore-tests/package.nix | 12 +++----- src/perl/package.nix | 5 ---- tests/functional/package.nix | 43 ++++++++++++++++------------ 7 files changed, 41 insertions(+), 64 deletions(-) diff --git a/doc/manual/package.nix b/doc/manual/package.nix index 81061b7a1..05368e34e 100644 --- a/doc/manual/package.nix +++ b/doc/manual/package.nix @@ -58,8 +58,8 @@ mkMesonDerivation (finalAttrs: { "man" ]; - # Hack for sake of the dev shell - passthru.externalNativeBuildInputs = [ + nativeBuildInputs = [ + nix-cli meson ninja (lib.getBin lowdown-unsandboxed) @@ -78,10 +78,6 @@ mkMesonDerivation (finalAttrs: { changelog-d ]; - nativeBuildInputs = finalAttrs.passthru.externalNativeBuildInputs ++ [ - nix-cli - ]; - preConfigure = '' chmod u+w ./.version echo ${finalAttrs.version} > ./.version diff --git a/src/json-schema-checks/package.nix b/src/json-schema-checks/package.nix index 5365fe75e..609d396be 100644 --- a/src/json-schema-checks/package.nix +++ b/src/json-schema-checks/package.nix @@ -34,15 +34,11 @@ mkMesonDerivation (finalAttrs: { outputs = [ "out" ]; - passthru.externalNativeBuildInputs = [ - jsonschema - ]; - nativeBuildInputs = [ meson ninja - ] - ++ finalAttrs.passthru.externalNativeBuildInputs; + jsonschema + ]; doCheck = true; diff --git a/src/kaitai-struct-checks/package.nix b/src/kaitai-struct-checks/package.nix index 97d56aabd..a466441d3 100644 --- a/src/kaitai-struct-checks/package.nix +++ b/src/kaitai-struct-checks/package.nix @@ -37,7 +37,15 @@ mkMesonDerivation (finalAttrs: { outputs = [ "out" ]; - passthru.externalNativeBuildInputs = [ + buildInputs = [ + gtest + kaitai-struct-cpp-stl-runtime + ]; + + nativeBuildInputs = [ + meson + ninja + pkg-config # This can go away when we bump up to 25.11 (kaitai-struct-compiler.overrideAttrs (finalAttrs: { version = "0.11"; @@ -48,20 +56,6 @@ mkMesonDerivation (finalAttrs: { })) ]; - passthru.externalBuildInputs = [ - gtest - kaitai-struct-cpp-stl-runtime - ]; - - buildInputs = finalAttrs.passthru.externalBuildInputs; - - nativeBuildInputs = [ - meson - ninja - pkg-config - ] - ++ finalAttrs.passthru.externalNativeBuildInputs; - doCheck = true; mesonCheckFlags = [ "--print-errorlogs" ]; diff --git a/src/libexpr/package.nix b/src/libexpr/package.nix index a67a8cc49..d0aef34e9 100644 --- a/src/libexpr/package.nix +++ b/src/libexpr/package.nix @@ -70,11 +70,6 @@ mkMesonLibrary (finalAttrs: { nix-util nix-store nix-fetchers - ] - ++ finalAttrs.passthru.externalPropagatedBuildInputs; - - # Hack for sake of the dev shell - passthru.externalPropagatedBuildInputs = [ boost nlohmann_json ] diff --git a/src/libstore-tests/package.nix b/src/libstore-tests/package.nix index 90e6af519..ac547aca3 100644 --- a/src/libstore-tests/package.nix +++ b/src/libstore-tests/package.nix @@ -42,20 +42,16 @@ mkMesonExecutable (finalAttrs: { (fileset.fileFilter (file: file.hasExt "hh") ./.) ]; - # Hack for sake of the dev shell - passthru.externalBuildInputs = [ + buildInputs = [ sqlite rapidcheck gtest - ] - ++ lib.optionals withBenchmarks [ - gbenchmark - ]; - - buildInputs = finalAttrs.passthru.externalBuildInputs ++ [ nix-store nix-store-c nix-store-test-support + ] + ++ lib.optionals withBenchmarks [ + gbenchmark ]; mesonFlags = [ diff --git a/src/perl/package.nix b/src/perl/package.nix index 10d84de77..96a41ae47 100644 --- a/src/perl/package.nix +++ b/src/perl/package.nix @@ -45,11 +45,6 @@ perl.pkgs.toPerlModule ( buildInputs = [ nix-store - ] - ++ finalAttrs.passthru.externalBuildInputs; - - # Hack for sake of the dev shell - passthru.externalBuildInputs = [ bzip2 libsodium ]; diff --git a/tests/functional/package.nix b/tests/functional/package.nix index 6830a9e58..b3b314a50 100644 --- a/tests/functional/package.nix +++ b/tests/functional/package.nix @@ -2,7 +2,16 @@ lib, stdenv, mkMesonDerivation, - buildPackages, + + meson, + ninja, + pkg-config, + + jq, + git, + mercurial, + unixtools, + util-linux, nix-store, nix-expr, @@ -37,17 +46,20 @@ mkMesonDerivation ( ./. ]; - # Hack for sake of the dev shell. Need to "manually splice" since - # this isn't a specially-recognized list of dependencies. - passthru.externalNativeBuildInputs = [ - buildPackages.meson - buildPackages.ninja - buildPackages.pkg-config + nativeBuildInputs = [ + meson + ninja + pkg-config - buildPackages.jq - buildPackages.git - buildPackages.mercurial - buildPackages.unixtools.script + jq + git + mercurial + unixtools.script + + # Explicitly splice the hostHost variant to fix LLVM tests. The nix-cli + # has to be in PATH, but must come from the host context where it's built + # with libc++. + (nix-cli.__spliced.hostHost or nix-cli) ] ++ lib.optionals stdenv.hostPlatform.isLinux [ # For various sandboxing tests that needs a statically-linked shell, @@ -56,14 +68,7 @@ mkMesonDerivation ( # For Overlay FS tests need `mount`, `umount`, and `unshare`. # For `script` command (ensuring a TTY) # TODO use `unixtools` to be precise over which executables instead? - buildPackages.util-linux - ]; - - nativeBuildInputs = finalAttrs.passthru.externalNativeBuildInputs ++ [ - # Explicitly splice the hostHost variant to fix LLVM tests. The nix-cli - # has to be in PATH, but must come from the host context where it's built - # with libc++. - (nix-cli.__spliced.hostHost or nix-cli) + util-linux ]; buildInputs = [ From d6fc64ac3876dd144a7ca4604c0a371d2c43f624 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sat, 8 Nov 2025 00:07:50 +0300 Subject: [PATCH 183/213] libfetchers-tests: Add InputFromAttrsTest for #14429 Previous commit fixed an issue. This commit adds a test to validate that. --- src/libfetchers-tests/input.cc | 61 +++++++++++++++++++++++++++++++ src/libfetchers-tests/meson.build | 1 + 2 files changed, 62 insertions(+) create mode 100644 src/libfetchers-tests/input.cc diff --git a/src/libfetchers-tests/input.cc b/src/libfetchers-tests/input.cc new file mode 100644 index 000000000..faff55f2c --- /dev/null +++ b/src/libfetchers-tests/input.cc @@ -0,0 +1,61 @@ +#include "nix/fetchers/fetch-settings.hh" +#include "nix/fetchers/attrs.hh" +#include "nix/fetchers/fetchers.hh" + +#include + +#include + +namespace nix { + +using fetchers::Attr; + +struct InputFromAttrsTestCase +{ + fetchers::Attrs attrs; + std::string expectedUrl; + std::string description; + fetchers::Attrs expectedAttrs = attrs; +}; + +class InputFromAttrsTest : public ::testing::WithParamInterface, public ::testing::Test +{}; + +TEST_P(InputFromAttrsTest, attrsAreCorrectAndRoundTrips) +{ + fetchers::Settings fetchSettings; + + const auto & testCase = GetParam(); + + auto input = fetchers::Input::fromAttrs(fetchSettings, fetchers::Attrs(testCase.attrs)); + + EXPECT_EQ(input.toAttrs(), testCase.expectedAttrs); + EXPECT_EQ(input.toURLString(), testCase.expectedUrl); + + auto input2 = fetchers::Input::fromAttrs(fetchSettings, input.toAttrs()); + EXPECT_EQ(input, input2); + EXPECT_EQ(input.toAttrs(), input2.toAttrs()); +} + +INSTANTIATE_TEST_SUITE_P( + InputFromAttrs, + InputFromAttrsTest, + ::testing::Values( + // Test for issue #14429. + InputFromAttrsTestCase{ + .attrs = + { + {"url", Attr("git+ssh://git@github.com/NixOS/nixpkgs")}, + {"type", Attr("git")}, + }, + .expectedUrl = "git+ssh://git@github.com/NixOS/nixpkgs", + .description = "strips_git_plus_prefix", + .expectedAttrs = + { + {"url", Attr("ssh://git@github.com/NixOS/nixpkgs")}, + {"type", Attr("git")}, + }, + }), + [](const ::testing::TestParamInfo & info) { return info.param.description; }); + +} // namespace nix diff --git a/src/libfetchers-tests/meson.build b/src/libfetchers-tests/meson.build index a18f64d79..6bccdb05c 100644 --- a/src/libfetchers-tests/meson.build +++ b/src/libfetchers-tests/meson.build @@ -42,6 +42,7 @@ sources = files( 'access-tokens.cc', 'git-utils.cc', 'git.cc', + 'input.cc', 'nix_api_fetchers.cc', 'public-key.cc', ) From 0c53c88367c2af8e56fc32d14afe19104d515b0d Mon Sep 17 00:00:00 2001 From: Marcel Date: Thu, 30 Oct 2025 00:16:31 +0100 Subject: [PATCH 184/213] progress-bar: use dynamic size units --- src/libmain/progress-bar.cc | 67 +++++++++++++++++++++++++--- src/libutil-tests/util.cc | 53 ++++++++++++++++++++++ src/libutil/include/nix/util/util.hh | 36 +++++++++++++++ src/libutil/util.cc | 63 ++++++++++++++++++++++---- 4 files changed, 205 insertions(+), 14 deletions(-) diff --git a/src/libmain/progress-bar.cc b/src/libmain/progress-bar.cc index edec8460d..6cefae6be 100644 --- a/src/libmain/progress-bar.cc +++ b/src/libmain/progress-bar.cc @@ -467,8 +467,6 @@ public: std::string getStatus(State & state) { - auto MiB = 1024.0 * 1024.0; - std::string res; auto renderActivity = @@ -516,6 +514,65 @@ public: return s; }; + auto renderSizeActivity = [&](ActivityType type, const std::string & itemFmt = "%s") { + auto & act = state.activitiesByType[type]; + uint64_t done = act.done, expected = act.done, running = 0, failed = act.failed; + for (auto & j : act.its) { + done += j.second->done; + expected += j.second->expected; + running += j.second->running; + failed += j.second->failed; + } + + expected = std::max(expected, act.expected); + + std::optional commonUnit; + std::string s; + + if (running || done || expected || failed) { + if (running) + if (expected != 0) { + commonUnit = getCommonSizeUnit({(int64_t) running, (int64_t) done, (int64_t) expected}); + s = + fmt(ANSI_BLUE "%s" ANSI_NORMAL "/" ANSI_GREEN "%s" ANSI_NORMAL "/%s", + commonUnit ? renderSizeWithoutUnit(running, *commonUnit) : renderSize(running), + commonUnit ? renderSizeWithoutUnit(done, *commonUnit) : renderSize(done), + commonUnit ? renderSizeWithoutUnit(expected, *commonUnit) : renderSize(expected)); + } else { + commonUnit = getCommonSizeUnit({(int64_t) running, (int64_t) done}); + s = + fmt(ANSI_BLUE "%s" ANSI_NORMAL "/" ANSI_GREEN "%s" ANSI_NORMAL, + commonUnit ? renderSizeWithoutUnit(running, *commonUnit) : renderSize(running), + commonUnit ? renderSizeWithoutUnit(done, *commonUnit) : renderSize(done)); + } + else if (expected != done) + if (expected != 0) { + commonUnit = getCommonSizeUnit({(int64_t) done, (int64_t) expected}); + s = + fmt(ANSI_GREEN "%s" ANSI_NORMAL "/%s", + commonUnit ? renderSizeWithoutUnit(done, *commonUnit) : renderSize(done), + commonUnit ? renderSizeWithoutUnit(expected, *commonUnit) : renderSize(expected)); + } else { + commonUnit = getSizeUnit(done); + s = fmt(ANSI_GREEN "%s" ANSI_NORMAL, renderSizeWithoutUnit(done, *commonUnit)); + } + else { + commonUnit = getSizeUnit(done); + s = fmt(done ? ANSI_GREEN "%s" ANSI_NORMAL : "%s", renderSizeWithoutUnit(done, *commonUnit)); + } + + if (commonUnit) + s = fmt("%s %siB", s, getSizeUnitSuffix(*commonUnit)); + + s = fmt(itemFmt, s); + + if (failed) + s += fmt(" (" ANSI_RED "%s failed" ANSI_NORMAL ")", renderSize(failed)); + } + + return s; + }; + auto showActivity = [&](ActivityType type, const std::string & itemFmt, const std::string & numberFmt = "%d", double unit = 1) { auto s = renderActivity(type, itemFmt, numberFmt, unit); @@ -529,7 +586,7 @@ public: showActivity(actBuilds, "%s built"); auto s1 = renderActivity(actCopyPaths, "%s copied"); - auto s2 = renderActivity(actCopyPath, "%s MiB", "%.1f", MiB); + auto s2 = renderSizeActivity(actCopyPath); if (!s1.empty() || !s2.empty()) { if (!res.empty()) @@ -545,12 +602,12 @@ public: } } - showActivity(actFileTransfer, "%s MiB DL", "%.1f", MiB); + renderSizeActivity(actFileTransfer, "%s DL"); { auto s = renderActivity(actOptimiseStore, "%s paths optimised"); if (s != "") { - s += fmt(", %.1f MiB / %d inodes freed", state.bytesLinked / MiB, state.filesLinked); + s += fmt(", %s / %d inodes freed", renderSize(state.bytesLinked), state.filesLinked); if (!res.empty()) res += ", "; res += s; diff --git a/src/libutil-tests/util.cc b/src/libutil-tests/util.cc index 32114d9da..a299cd978 100644 --- a/src/libutil-tests/util.cc +++ b/src/libutil-tests/util.cc @@ -146,6 +146,59 @@ TEST(string2Int, trivialConversions) ASSERT_EQ(string2Int("-100"), -100); } +/* ---------------------------------------------------------------------------- + * getSizeUnit + * --------------------------------------------------------------------------*/ + +TEST(getSizeUnit, misc) +{ + ASSERT_EQ(getSizeUnit(0), SizeUnit::Base); + ASSERT_EQ(getSizeUnit(100), SizeUnit::Base); + ASSERT_EQ(getSizeUnit(100), SizeUnit::Base); + ASSERT_EQ(getSizeUnit(972), SizeUnit::Base); + ASSERT_EQ(getSizeUnit(973), SizeUnit::Base); // FIXME: should round down + ASSERT_EQ(getSizeUnit(1024), SizeUnit::Base); + ASSERT_EQ(getSizeUnit(-1024), SizeUnit::Base); + ASSERT_EQ(getSizeUnit(1024 * 1024), SizeUnit::Kilo); + ASSERT_EQ(getSizeUnit(1100 * 1024), SizeUnit::Mega); + ASSERT_EQ(getSizeUnit(2ULL * 1024 * 1024 * 1024), SizeUnit::Giga); + ASSERT_EQ(getSizeUnit(2100ULL * 1024 * 1024 * 1024), SizeUnit::Tera); +} + +/* ---------------------------------------------------------------------------- + * getCommonSizeUnit + * --------------------------------------------------------------------------*/ + +TEST(getCommonSizeUnit, misc) +{ + ASSERT_EQ(getCommonSizeUnit({0}), SizeUnit::Base); + ASSERT_EQ(getCommonSizeUnit({0, 100}), SizeUnit::Base); + ASSERT_EQ(getCommonSizeUnit({100, 0}), SizeUnit::Base); + ASSERT_EQ(getCommonSizeUnit({100, 1024 * 1024}), std::nullopt); + ASSERT_EQ(getCommonSizeUnit({1024 * 1024, 100}), std::nullopt); + ASSERT_EQ(getCommonSizeUnit({1024 * 1024, 1024 * 1024}), SizeUnit::Kilo); + ASSERT_EQ(getCommonSizeUnit({2100ULL * 1024 * 1024 * 1024, 2100ULL * 1024 * 1024 * 1024}), SizeUnit::Tera); +} + +/* ---------------------------------------------------------------------------- + * renderSizeWithoutUnit + * --------------------------------------------------------------------------*/ + +TEST(renderSizeWithoutUnit, misc) +{ + ASSERT_EQ(renderSizeWithoutUnit(0, SizeUnit::Base, true), " 0.0"); + ASSERT_EQ(renderSizeWithoutUnit(100, SizeUnit::Base, true), " 0.1"); + ASSERT_EQ(renderSizeWithoutUnit(100, SizeUnit::Base), "0.1"); + ASSERT_EQ(renderSizeWithoutUnit(972, SizeUnit::Base, true), " 0.9"); + ASSERT_EQ(renderSizeWithoutUnit(973, SizeUnit::Base, true), " 1.0"); // FIXME: should round down + ASSERT_EQ(renderSizeWithoutUnit(1024, SizeUnit::Base, true), " 1.0"); + ASSERT_EQ(renderSizeWithoutUnit(-1024, SizeUnit::Base, true), " -1.0"); + ASSERT_EQ(renderSizeWithoutUnit(1024 * 1024, SizeUnit::Kilo, true), "1024.0"); + ASSERT_EQ(renderSizeWithoutUnit(1100 * 1024, SizeUnit::Mega, true), " 1.1"); + ASSERT_EQ(renderSizeWithoutUnit(2ULL * 1024 * 1024 * 1024, SizeUnit::Giga, true), " 2.0"); + ASSERT_EQ(renderSizeWithoutUnit(2100ULL * 1024 * 1024 * 1024, SizeUnit::Tera, true), " 2.1"); +} + /* ---------------------------------------------------------------------------- * renderSize * --------------------------------------------------------------------------*/ diff --git a/src/libutil/include/nix/util/util.hh b/src/libutil/include/nix/util/util.hh index 1234937b4..ffec8f1a4 100644 --- a/src/libutil/include/nix/util/util.hh +++ b/src/libutil/include/nix/util/util.hh @@ -99,6 +99,42 @@ N string2IntWithUnitPrefix(std::string_view s) throw UsageError("'%s' is not an integer", s); } +// Base also uses 'K', because it should also displayed as KiB => 100 Bytes => 0.1 KiB +#define NIX_UTIL_SIZE_UNITS \ + NIX_UTIL_DEFINE_SIZE_UNIT(Base, 'K') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Kilo, 'K') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Mega, 'M') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Giga, 'G') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Tera, 'T') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Peta, 'P') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Exa, 'E') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Zetta, 'Z') \ + NIX_UTIL_DEFINE_SIZE_UNIT(Yotta, 'Y') + +enum class SizeUnit { +#define NIX_UTIL_DEFINE_SIZE_UNIT(name, suffix) name, + NIX_UTIL_SIZE_UNITS +#undef NIX_UTIL_DEFINE_SIZE_UNIT +}; + +constexpr inline auto sizeUnits = std::to_array({ +#define NIX_UTIL_DEFINE_SIZE_UNIT(name, suffix) SizeUnit::name, + NIX_UTIL_SIZE_UNITS +#undef NIX_UTIL_DEFINE_SIZE_UNIT +}); + +SizeUnit getSizeUnit(int64_t value); + +/** + * Returns the unit if all values would be rendered using the same unit + * otherwise returns `std::nullopt`. + */ +std::optional getCommonSizeUnit(std::initializer_list values); + +std::string renderSizeWithoutUnit(int64_t value, SizeUnit unit, bool align = false); + +char getSizeUnitSuffix(SizeUnit unit); + /** * Pretty-print a byte value, e.g. 12433615056 is rendered as `11.6 * GiB`. If `align` is set, the number will be right-justified by diff --git a/src/libutil/util.cc b/src/libutil/util.cc index f14bc63ac..d75aa4d67 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -132,17 +132,62 @@ std::optional string2Float(const std::string_view s) template std::optional string2Float(const std::string_view s); template std::optional string2Float(const std::string_view s); +static const int64_t conversionNumber = 1024; + +SizeUnit getSizeUnit(int64_t value) +{ + auto unit = sizeUnits.begin(); + uint64_t absValue = std::abs(value); + while (absValue > conversionNumber && unit < sizeUnits.end()) { + unit++; + absValue /= conversionNumber; + } + return *unit; +} + +std::optional getCommonSizeUnit(std::initializer_list values) +{ + assert(values.size() > 0); + + auto it = values.begin(); + SizeUnit unit = getSizeUnit(*it); + it++; + + for (; it != values.end(); it++) { + if (unit != getSizeUnit(*it)) { + return std::nullopt; + } + } + + return unit; +} + +std::string renderSizeWithoutUnit(int64_t value, SizeUnit unit, bool align) +{ + // bytes should also displayed as KiB => 100 Bytes => 0.1 KiB + auto power = std::max>(1, std::to_underlying(unit)); + double denominator = std::pow(conversionNumber, power); + double result = (double) value / denominator; + return fmt(align ? "%6.1f" : "%.1f", result); +} + +char getSizeUnitSuffix(SizeUnit unit) +{ + switch (unit) { +#define NIX_UTIL_DEFINE_SIZE_UNIT(name, suffix) \ + case SizeUnit::name: \ + return suffix; + NIX_UTIL_SIZE_UNITS +#undef NIX_UTIL_DEFINE_SIZE_UNIT + } + + assert(false); +} + std::string renderSize(int64_t value, bool align) { - static const std::array prefixes{{'K', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y'}}; - size_t power = 0; - double abs_value = std::abs(value); - while (abs_value > 1024 && power < prefixes.size()) { - ++power; - abs_value /= 1024; - } - double res = (double) value / std::pow(1024.0, power); - return fmt(align ? "%6.1f %ciB" : "%.1f %ciB", power == 0 ? res / 1024 : res, prefixes.at(power)); + SizeUnit unit = getSizeUnit(value); + return fmt("%s %ciB", renderSizeWithoutUnit(value, unit, align), getSizeUnitSuffix(unit)); } bool hasPrefix(std::string_view s, std::string_view prefix) From 98e61c6da960341d498bd566fc604b3ad218f41e Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 9 Nov 2025 16:52:51 +0300 Subject: [PATCH 185/213] tests/functional: Output an actually correct command to accept test changes I've run into this quite a few times when working with characterization test infra. It would print an invalid command: _NIX_TEST_ACCEPT=1 meson test main/lang Which you'd then proceed to run and it would fail. This commit makes it be honest about the command you need to run: _NIX_TEST_ACCEPT=1 meson test --suite main lang --- tests/functional/characterisation/framework.sh | 2 +- tests/functional/common/test-root.sh | 3 ++- tests/functional/meson.build | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/tests/functional/characterisation/framework.sh b/tests/functional/characterisation/framework.sh index d2c2155db..6490d4ff5 100644 --- a/tests/functional/characterisation/framework.sh +++ b/tests/functional/characterisation/framework.sh @@ -71,7 +71,7 @@ function characterisationTestExit() { echo >&2 '' echo >&2 'You can rerun this test with:' echo >&2 '' - echo >&2 " _NIX_TEST_ACCEPT=1 meson test ${TEST_NAME}" + echo >&2 " _NIX_TEST_ACCEPT=1 meson test --suite ${TEST_SUITE_NAME} ${TEST_NAME}" echo >&2 '' echo >&2 'to regenerate the files containing the expected output,' echo >&2 'and then view the git diff to decide whether a change is' diff --git a/tests/functional/common/test-root.sh b/tests/functional/common/test-root.sh index b50a06267..db17132f4 100644 --- a/tests/functional/common/test-root.sh +++ b/tests/functional/common/test-root.sh @@ -1,4 +1,5 @@ # shellcheck shell=bash -TEST_ROOT=$(realpath "${TMPDIR:-/tmp}/nix-test")/${TEST_NAME:-default/tests\/functional//} +TEST_SUBDIR="${TEST_SUITE_NAME:-default}/${TEST_NAME:-tests/functional/}" +TEST_ROOT=$(realpath "${TMPDIR:-/tmp}/nix-test")/"$TEST_SUBDIR" export TEST_ROOT diff --git a/tests/functional/meson.build b/tests/functional/meson.build index 6f649c836..d917d91c3 100644 --- a/tests/functional/meson.build +++ b/tests/functional/meson.build @@ -263,7 +263,8 @@ foreach suite : suites 'ASAN_OPTIONS' : asan_options, '_NIX_TEST_SOURCE_DIR' : meson.current_source_dir(), '_NIX_TEST_BUILD_DIR' : meson.current_build_dir(), - 'TEST_NAME' : suite_name / name, + 'TEST_SUITE_NAME' : suite_name, + 'TEST_NAME' : name, 'NIX_REMOTE' : '', 'PS4' : '+(${BASH_SOURCE[0]-$0}:$LINENO) ', }, From f71599234607ee8b1df328d96605ed91c5489033 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Sun, 9 Nov 2025 15:13:15 +0100 Subject: [PATCH 186/213] Change channel URLs to channels.nixos.org subdomain Update all channel URLs from https://nixos.org/channels/ to https://channels.nixos.org/ to use the more reliable subdomain. The nixos.org domain apex lacks IPv6 support due to DNS hoster limitations. Using the subdomain allows better CDN distribution and improved reliability. Updated files: - Installation scripts (multi-user and tarball installers) - Channel URL resolution in eval-settings.cc - Documentation and examples - Docker image default channel URL - Release notes (added note about URL change) Fixes #14517 --- doc/manual/rl-next/channels-subdomain.md | 9 +++++++++ doc/manual/source/command-ref/nix-channel.md | 6 +++--- doc/manual/source/release-notes/rl-2.0.md | 2 +- docker.nix | 2 +- scripts/install-multi-user.sh | 2 +- scripts/install-nix-from-tarball.sh | 2 +- src/libexpr/eval-settings.cc | 2 +- src/libexpr/primops.cc | 2 +- 8 files changed, 18 insertions(+), 9 deletions(-) create mode 100644 doc/manual/rl-next/channels-subdomain.md diff --git a/doc/manual/rl-next/channels-subdomain.md b/doc/manual/rl-next/channels-subdomain.md new file mode 100644 index 000000000..38dc7bb33 --- /dev/null +++ b/doc/manual/rl-next/channels-subdomain.md @@ -0,0 +1,9 @@ +--- +synopsis: Channel URLs migrated to channels.nixos.org subdomain +prs: [14518] +issues: [14517] +--- + +Channel URLs have been updated from `https://nixos.org/channels/` to `https://channels.nixos.org/` throughout Nix. + +The subdomain provides better reliability with IPv6 support and improved CDN distribution. The old domain apex (`nixos.org/channels/`) currently redirects to the new location but may be deprecated in the future. diff --git a/doc/manual/source/command-ref/nix-channel.md b/doc/manual/source/command-ref/nix-channel.md index 3d02a7d40..865f43ccc 100644 --- a/doc/manual/source/command-ref/nix-channel.md +++ b/doc/manual/source/command-ref/nix-channel.md @@ -11,7 +11,7 @@ Channels are a mechanism for referencing remote Nix expressions and conveniently retrieving their latest version. The moving parts of channels are: -- The official channels listed at +- The official channels listed at - The user-specific list of [subscribed channels](#subscribed-channels) - The [downloaded channel contents](#channels) - The [Nix expression search path](@docroot@/command-ref/conf-file.md#conf-nix-path), set with the [`-I` option](#opt-I) or the [`NIX_PATH` environment variable](#env-NIX_PATH) @@ -88,9 +88,9 @@ This command has the following operations: Subscribe to the Nixpkgs channel and run `hello` from the GNU Hello package: ```console -$ nix-channel --add https://nixos.org/channels/nixpkgs-unstable +$ nix-channel --add https://channels.nixos.org/nixpkgs-unstable $ nix-channel --list -nixpkgs https://nixos.org/channels/nixpkgs +nixpkgs https://channels.nixos.org/nixpkgs $ nix-channel --update $ nix-shell -p hello --run hello hello diff --git a/doc/manual/source/release-notes/rl-2.0.md b/doc/manual/source/release-notes/rl-2.0.md index aad0de211..25cc5e0a5 100644 --- a/doc/manual/source/release-notes/rl-2.0.md +++ b/doc/manual/source/release-notes/rl-2.0.md @@ -358,7 +358,7 @@ This release has the following new features: they are needed for evaluation. - You can now use `channel:` as a short-hand for - . For example, + [now ]. For example, `nix-build channel:nixos-15.09 -A hello` will build the GNU Hello package from the `nixos-15.09` channel. In the future, this may use Git to fetch updates more efficiently. diff --git a/docker.nix b/docker.nix index 9dbc34d61..32205224b 100644 --- a/docker.nix +++ b/docker.nix @@ -10,7 +10,7 @@ tag ? "latest", bundleNixpkgs ? true, channelName ? "nixpkgs", - channelURL ? "https://nixos.org/channels/nixpkgs-unstable", + channelURL ? "https://channels.nixos.org/nixpkgs-unstable", extraPkgs ? [ ], maxLayers ? 70, nixConf ? { }, diff --git a/scripts/install-multi-user.sh b/scripts/install-multi-user.sh index b013190f9..5ff760a61 100644 --- a/scripts/install-multi-user.sh +++ b/scripts/install-multi-user.sh @@ -714,7 +714,7 @@ EOF place_channel_configuration() { if [ -z "${NIX_INSTALLER_NO_CHANNEL_ADD:-}" ]; then - echo "https://nixos.org/channels/nixpkgs-unstable nixpkgs" > "$SCRATCH/.nix-channels" + echo "https://channels.nixos.org/nixpkgs-unstable nixpkgs" > "$SCRATCH/.nix-channels" _sudo "to set up the default system channel (part 1)" \ install -m 0644 "$SCRATCH/.nix-channels" "$ROOT_HOME/.nix-channels" fi diff --git a/scripts/install-nix-from-tarball.sh b/scripts/install-nix-from-tarball.sh index fd00460ec..f17e4c2af 100644 --- a/scripts/install-nix-from-tarball.sh +++ b/scripts/install-nix-from-tarball.sh @@ -213,7 +213,7 @@ fi # Subscribe the user to the Nixpkgs channel and fetch it. if [ -z "$NIX_INSTALLER_NO_CHANNEL_ADD" ]; then if ! "$nix/bin/nix-channel" --list | grep -q "^nixpkgs "; then - "$nix/bin/nix-channel" --add https://nixos.org/channels/nixpkgs-unstable + "$nix/bin/nix-channel" --add https://channels.nixos.org/nixpkgs-unstable fi if [ -z "$_NIX_INSTALLER_TEST" ]; then if ! "$nix/bin/nix-channel" --update nixpkgs; then diff --git a/src/libexpr/eval-settings.cc b/src/libexpr/eval-settings.cc index 93db5aebb..33c90259f 100644 --- a/src/libexpr/eval-settings.cc +++ b/src/libexpr/eval-settings.cc @@ -92,7 +92,7 @@ bool EvalSettings::isPseudoUrl(std::string_view s) std::string EvalSettings::resolvePseudoUrl(std::string_view url) { if (hasPrefix(url, "channel:")) - return "https://nixos.org/channels/" + std::string(url.substr(8)) + "/nixexprs.tar.xz"; + return "https://channels.nixos.org/" + std::string(url.substr(8)) + "/nixexprs.tar.xz"; else return std::string(url); } diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 0bd03b232..33aa29f3a 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -2234,7 +2234,7 @@ static RegisterPrimOp primop_findFile( > - ``` > { > prefix = "nixpkgs"; - > path = "https://nixos.org/channels/nixos-unstable/nixexprs.tar.xz"; + > path = "https://channels.nixos.org/nixos-unstable/nixexprs.tar.xz"; > } > ``` From 86f090837b07e8dacf8f839f603a44768b2ed999 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 9 Nov 2025 18:55:11 +0300 Subject: [PATCH 187/213] tests/functional: Add tests for builtins.dirOf These will change in the next commit to fix the silent regression from 2.23 in the handling of multiple subsequent path separators. --- .../lang/eval-okay-builtins-dirOf.exp | 1 + .../lang/eval-okay-builtins-dirOf.nix | 21 +++++++++++++++++++ 2 files changed, 22 insertions(+) create mode 100644 tests/functional/lang/eval-okay-builtins-dirOf.exp create mode 100644 tests/functional/lang/eval-okay-builtins-dirOf.nix diff --git a/tests/functional/lang/eval-okay-builtins-dirOf.exp b/tests/functional/lang/eval-okay-builtins-dirOf.exp new file mode 100644 index 000000000..1130b1510 --- /dev/null +++ b/tests/functional/lang/eval-okay-builtins-dirOf.exp @@ -0,0 +1 @@ +{ pathDoesntExistNested1 = /totallydoesntexistreally; pathDoesntExistNested2 = /totallydoesntexistreally/subdir1; pathDoesntExistRoot = /; pathRoot = /; stringEmpty = "."; stringMultipleSeps = "a"; stringNoSep = "."; stringRoot = "/"; stringRootA = "/"; stringRootSlash = "//"; stringRootSlashSlash = "///"; stringSingleDir = "a"; stringWithDot = "a/b/c/."; stringWithDotAndDotDot = "a/b/c/../."; stringWithDotAndDotDotSep2 = "a/b/c/../."; stringWithDotDot = "a/b/c/.."; stringWithDotDotSep2 = "a/b/c/.."; stringWithDotSep2 = "a/b/c/."; } diff --git a/tests/functional/lang/eval-okay-builtins-dirOf.nix b/tests/functional/lang/eval-okay-builtins-dirOf.nix new file mode 100644 index 000000000..d2eae1c4e --- /dev/null +++ b/tests/functional/lang/eval-okay-builtins-dirOf.nix @@ -0,0 +1,21 @@ +{ + stringEmpty = dirOf ""; + stringNoSep = dirOf "filename"; + stringSingleDir = dirOf "a/b"; + stringMultipleSeps = dirOf "a///b"; + stringRoot = dirOf "/"; + stringRootSlash = dirOf "//"; + stringRootSlashSlash = dirOf "///"; + stringRootA = dirOf "/a"; + stringWithDot = dirOf "a/b/c/./d"; + stringWithDotSep2 = dirOf "a/b/c/.//d"; + stringWithDotDot = dirOf "a/b/c/../d"; + stringWithDotDotSep2 = dirOf "a/b/c/..//d"; + stringWithDotAndDotDot = dirOf "a/b/c/.././d"; + stringWithDotAndDotDotSep2 = dirOf "a/b/c/.././/d"; + + pathRoot = dirOf /.; + pathDoesntExistRoot = dirOf /totallydoesntexistreally; + pathDoesntExistNested1 = dirOf /totallydoesntexistreally/subdir1; + pathDoesntExistNested2 = dirOf /totallydoesntexistreally/subdir1/subdir2; +} From a33fccf55a8450f54c91c56a54d462ada57c7712 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 9 Nov 2025 18:33:03 +0300 Subject: [PATCH 188/213] libexpr: Don't use nix::dirOf in prim_dirOf This gets us back to pre-2.23 behavior of this primop. Done by inlining the code of `nix::dirOf` from 2.2-maintenance. --- src/libexpr/primops.cc | 9 +++++++-- tests/functional/lang/eval-okay-builtins-dirOf.exp | 2 +- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 0bd03b232..5bd262071 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -2022,8 +2022,13 @@ static void prim_dirOf(EvalState & state, const PosIdx pos, Value ** args, Value NixStringContext context; auto path = state.coerceToString( pos, *args[0], context, "while evaluating the first argument passed to 'builtins.dirOf'", false, false); - auto dir = dirOf(*path); - v.mkString(dir, context); + auto pos = path->rfind('/'); + if (pos == path->npos) + v.mkStringMove(".", context); + else if (pos == 0) + v.mkStringMove("/", context); + else + v.mkString(path->substr(0, pos), context); } } diff --git a/tests/functional/lang/eval-okay-builtins-dirOf.exp b/tests/functional/lang/eval-okay-builtins-dirOf.exp index 1130b1510..e0093e93a 100644 --- a/tests/functional/lang/eval-okay-builtins-dirOf.exp +++ b/tests/functional/lang/eval-okay-builtins-dirOf.exp @@ -1 +1 @@ -{ pathDoesntExistNested1 = /totallydoesntexistreally; pathDoesntExistNested2 = /totallydoesntexistreally/subdir1; pathDoesntExistRoot = /; pathRoot = /; stringEmpty = "."; stringMultipleSeps = "a"; stringNoSep = "."; stringRoot = "/"; stringRootA = "/"; stringRootSlash = "//"; stringRootSlashSlash = "///"; stringSingleDir = "a"; stringWithDot = "a/b/c/."; stringWithDotAndDotDot = "a/b/c/../."; stringWithDotAndDotDotSep2 = "a/b/c/../."; stringWithDotDot = "a/b/c/.."; stringWithDotDotSep2 = "a/b/c/.."; stringWithDotSep2 = "a/b/c/."; } +{ pathDoesntExistNested1 = /totallydoesntexistreally; pathDoesntExistNested2 = /totallydoesntexistreally/subdir1; pathDoesntExistRoot = /; pathRoot = /; stringEmpty = "."; stringMultipleSeps = "a//"; stringNoSep = "."; stringRoot = "/"; stringRootA = "/"; stringRootSlash = "/"; stringRootSlashSlash = "//"; stringSingleDir = "a"; stringWithDot = "a/b/c/."; stringWithDotAndDotDot = "a/b/c/../."; stringWithDotAndDotDotSep2 = "a/b/c/.././"; stringWithDotDot = "a/b/c/.."; stringWithDotDotSep2 = "a/b/c/../"; stringWithDotSep2 = "a/b/c/./"; } From a091a8100a8587185e579d4cff04381e8e074f12 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Sun, 9 Nov 2025 22:09:18 +0300 Subject: [PATCH 189/213] libexpr: Clear PosTable contents in EvalState::resetFileCache Otherwise PosTable grows indefinitely for each reload. Since the total input size is limited to 4GB (uint32_t for byte offset PosIdx) it can get exhausted pretty. This ensures that we don't waste memory on reloads as well. --- src/libexpr/eval.cc | 1 + src/libutil/include/nix/util/pos-table.hh | 10 ++++++++++ 2 files changed, 11 insertions(+) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 7036df957..5ddbac634 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -1113,6 +1113,7 @@ void EvalState::resetFileCache() importResolutionCache->clear(); fileEvalCache->clear(); inputCache->clear(); + positions.clear(); } void EvalState::eval(Expr * e, Value & v) diff --git a/src/libutil/include/nix/util/pos-table.hh b/src/libutil/include/nix/util/pos-table.hh index d944b1353..c5f93a3d5 100644 --- a/src/libutil/include/nix/util/pos-table.hh +++ b/src/libutil/include/nix/util/pos-table.hh @@ -111,6 +111,16 @@ public: return o->origin; return std::monostate{}; } + + /** + * Remove all origins from the table. + */ + void clear() + { + auto lines = linesCache.lock(); + lines->clear(); + origins.clear(); + } }; } // namespace nix From 318eea040f7ebca3719fc802648be7c1016d495c Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 3 Nov 2025 18:53:27 -0500 Subject: [PATCH 190/213] Encapsulate and slightly optimize string contexts These steps are done (originally in order, but I squashed it as the end result is still pretty small, and the churn in the code comments was a bit annoying to keep straight). 1. Create proper struct type for string contexts on the heap This will make it easier to change this type in the future. 2. Make `Value::StringWithContext` iterable This make some for loops a lot more terse. 3. Encapsulate `Value::StringWithContext::Context::elems` It turns out the iterators we just exposed are sufficient. 4. Make `StringWithContext::Context` length-prefixed instead Rather than having a null pointer at the end, have a `size_t` at the beginning. This is the exact same size (note that null pointer is longer than null byte) and thus takes no more space! Also, see the new TODO on naming. The thing we already so-named is a builder type for string contexts, not the on-heap type. The `fromBuilder` static method reflects what the names ought to be too. Co-authored-by: Sergei Zimmerman --- src/libexpr/eval-cache.cc | 10 +-- src/libexpr/eval.cc | 32 ++++----- src/libexpr/include/nix/expr/value.hh | 65 ++++++++++++++++++- src/libexpr/include/nix/expr/value/context.hh | 13 ++++ 4 files changed, 97 insertions(+), 23 deletions(-) diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index de74d2143..0372d6cc1 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -136,17 +136,19 @@ struct AttrDb }); } - AttrId setString(AttrKey key, std::string_view s, const char ** context = nullptr) + AttrId setString(AttrKey key, std::string_view s, const Value::StringWithContext::Context * context = nullptr) { return doSQLite([&]() { auto state(_state->lock()); if (context) { std::string ctx; - for (const char ** p = context; *p; ++p) { - if (p != context) + bool first = true; + for (auto * elem : *context) { + if (!first) ctx.push_back(' '); - ctx.append(*p); + ctx.append(elem); + first = false; } state->insertAttributeWithContext.use()(key.first)(symbols[key.second])(AttrType::String) (s) (ctx) .exec(); diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 7036df957..940c6a937 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -36,6 +36,7 @@ #include #include #include +#include #include #include @@ -821,28 +822,25 @@ void Value::mkString(std::string_view s) mkStringNoCopy(makeImmutableString(s)); } -static const char ** encodeContext(const NixStringContext & context) +Value::StringWithContext::Context * Value::StringWithContext::Context::fromBuilder(const NixStringContext & context) { - if (!context.empty()) { - size_t n = 0; - auto ctx = (const char **) allocBytes((context.size() + 1) * sizeof(char *)); - for (auto & i : context) { - ctx[n++] = makeImmutableString({i.to_string()}); - } - ctx[n] = nullptr; - return ctx; - } else + if (context.empty()) return nullptr; + + auto ctx = new (allocBytes(sizeof(Context) + context.size() * sizeof(value_type))) Context(context.size()); + std::ranges::transform( + context, ctx->elems, [](const NixStringContextElem & elt) { return makeImmutableString(elt.to_string()); }); + return ctx; } void Value::mkString(std::string_view s, const NixStringContext & context) { - mkStringNoCopy(makeImmutableString(s), encodeContext(context)); + mkStringNoCopy(makeImmutableString(s), Value::StringWithContext::Context::fromBuilder(context)); } void Value::mkStringMove(const char * s, const NixStringContext & context) { - mkStringNoCopy(s, encodeContext(context)); + mkStringNoCopy(s, Value::StringWithContext::Context::fromBuilder(context)); } void Value::mkPath(const SourcePath & path) @@ -2287,9 +2285,9 @@ std::string_view EvalState::forceString(Value & v, const PosIdx pos, std::string void copyContext(const Value & v, NixStringContext & context, const ExperimentalFeatureSettings & xpSettings) { - if (v.context()) - for (const char ** p = v.context(); *p; ++p) - context.insert(NixStringContextElem::parse(*p, xpSettings)); + if (auto * ctx = v.context()) + for (auto * elem : *ctx) + context.insert(NixStringContextElem::parse(elem, xpSettings)); } std::string_view EvalState::forceString( @@ -2309,7 +2307,9 @@ std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::s auto s = forceString(v, pos, errorCtx); if (v.context()) { error( - "the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), v.context()[0]) + "the string '%1%' is not allowed to refer to a store path (such as '%2%')", + v.string_view(), + *v.context()->begin()) .withTrace(pos, errorCtx) .debugThrow(); } diff --git a/src/libexpr/include/nix/expr/value.hh b/src/libexpr/include/nix/expr/value.hh index 706a4fe3f..0fb36e1b2 100644 --- a/src/libexpr/include/nix/expr/value.hh +++ b/src/libexpr/include/nix/expr/value.hh @@ -220,7 +220,66 @@ struct ValueBase struct StringWithContext { const char * c_str; - const char ** context; // must be in sorted order + + /** + * The type of the context itself. + * + * Currently, it is length-prefixed array of pointers to + * null-terminated strings. The strings are specially formatted + * to represent a flattening of the recursive sum type that is a + * context element. + * + * @See NixStringContext for an more easily understood type, + * that of the "builder" for this data structure. + */ + struct Context + { + using value_type = const char *; + using size_type = std::size_t; + using iterator = const value_type *; + + Context(size_type size) + : size_(size) + { + } + + private: + /** + * Number of items in the array + */ + size_type size_; + + /** + * @pre must be in sorted order + */ + value_type elems[]; + + public: + iterator begin() const + { + return elems; + } + + iterator end() const + { + return elems + size(); + } + + size_type size() const + { + return size_; + } + + /** + * @return null pointer when context.empty() + */ + static Context * fromBuilder(const NixStringContext & context); + }; + + /** + * May be null for a string without context. + */ + const Context * context; }; struct Path @@ -991,7 +1050,7 @@ public: setStorage(b); } - void mkStringNoCopy(const char * s, const char ** context = 0) noexcept + void mkStringNoCopy(const char * s, const Value::StringWithContext::Context * context = nullptr) noexcept { setStorage(StringWithContext{.c_str = s, .context = context}); } @@ -1117,7 +1176,7 @@ public: return getStorage().c_str; } - const char ** context() const noexcept + const Value::StringWithContext::Context * context() const noexcept { return getStorage().context; } diff --git a/src/libexpr/include/nix/expr/value/context.hh b/src/libexpr/include/nix/expr/value/context.hh index dcfacbb21..054516bc2 100644 --- a/src/libexpr/include/nix/expr/value/context.hh +++ b/src/libexpr/include/nix/expr/value/context.hh @@ -24,6 +24,14 @@ public: } }; +/** + * @todo This should be renamed to `StringContextBuilderElem`, since: + * + * 1. We use `*Builder` for off-heap temporary data structures + * + * 2. The `Nix*` is totally redundant. (And my mistake from a long time + * ago.) + */ struct NixStringContextElem { /** @@ -77,6 +85,11 @@ struct NixStringContextElem std::string to_string() const; }; +/** + * @todo This should be renamed to `StringContextBuilder`. + * + * @see NixStringContextElem for explanation why. + */ typedef std::set NixStringContext; } // namespace nix From 8c113f80f3e03b89207e1f606a533a1cbd39c345 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Mon, 10 Nov 2025 00:54:20 -0500 Subject: [PATCH 191/213] Make string matcher for libexpr texts like others Forgot to print in one case Co-authored-by: Aspen Smith --- src/libexpr-test-support/include/nix/expr/tests/libexpr.hh | 1 + 1 file changed, 1 insertion(+) diff --git a/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh b/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh index daae00802..658a6ffe0 100644 --- a/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh +++ b/src/libexpr-test-support/include/nix/expr/tests/libexpr.hh @@ -104,6 +104,7 @@ MATCHER(IsAttrs, "") MATCHER_P(IsStringEq, s, fmt("The string is equal to \"%1%\"", s)) { if (arg.type() != nString) { + *result_listener << "Expected a string got " << arg.type(); return false; } return arg.string_view() == s; From 3bf8c760725c4225f1f0c7e132609237b44985be Mon Sep 17 00:00:00 2001 From: Aspen Smith Date: Fri, 12 Sep 2025 20:45:20 -0400 Subject: [PATCH 192/213] Use hybrid C / Pascal strings in the evaluator Replace the null-terminated C-style strings in Value with hybrid C / Pascal strings, where the length is stored in the allocation before the data, and there is still a null byte at the end for the sake of C interopt. Co-Authored-By: Taeer Bar-Yam Co-Authored-By: Sergei Zimmerman --- src/libexpr-tests/json.cc | 5 +- src/libexpr-tests/value/print.cc | 21 +-- src/libexpr-tests/value/value.cc | 13 +- src/libexpr/eval-cache.cc | 2 +- src/libexpr/eval.cc | 62 ++++++--- src/libexpr/include/nix/expr/meson.build | 1 + src/libexpr/include/nix/expr/nixexpr.hh | 20 +-- src/libexpr/include/nix/expr/parser-state.hh | 6 +- .../include/nix/expr/static-string-data.hh | 44 ++++++ src/libexpr/include/nix/expr/symbol-table.hh | 35 +++-- src/libexpr/include/nix/expr/value.hh | 131 +++++++++++++++--- src/libexpr/primops.cc | 33 ++--- src/libexpr/primops/fromTOML.cc | 3 +- src/nix/nix-env/user-env.cc | 3 +- 14 files changed, 279 insertions(+), 100 deletions(-) create mode 100644 src/libexpr/include/nix/expr/static-string-data.hh diff --git a/src/libexpr-tests/json.cc b/src/libexpr-tests/json.cc index 8b1bd7d96..aa71c4d86 100644 --- a/src/libexpr-tests/json.cc +++ b/src/libexpr-tests/json.cc @@ -1,5 +1,6 @@ #include "nix/expr/tests/libexpr.hh" #include "nix/expr/value-to-json.hh" +#include "nix/expr/static-string-data.hh" namespace nix { // Testing the conversion to JSON @@ -54,7 +55,7 @@ TEST_F(JSONValueTest, IntNegative) TEST_F(JSONValueTest, String) { Value v; - v.mkStringNoCopy("test"); + v.mkStringNoCopy("test"_sds); ASSERT_EQ(getJSONValue(v), "\"test\""); } @@ -62,7 +63,7 @@ TEST_F(JSONValueTest, StringQuotes) { Value v; - v.mkStringNoCopy("test\""); + v.mkStringNoCopy("test\""_sds); ASSERT_EQ(getJSONValue(v), "\"test\\\"\""); } diff --git a/src/libexpr-tests/value/print.cc b/src/libexpr-tests/value/print.cc index 0006da2ff..0456835b4 100644 --- a/src/libexpr-tests/value/print.cc +++ b/src/libexpr-tests/value/print.cc @@ -1,4 +1,5 @@ #include "nix/expr/tests/libexpr.hh" +#include "nix/expr/static-string-data.hh" #include "nix/expr/value.hh" #include "nix/expr/print.hh" @@ -35,14 +36,14 @@ TEST_F(ValuePrintingTests, tBool) TEST_F(ValuePrintingTests, tString) { Value vString; - vString.mkStringNoCopy("some-string"); + vString.mkStringNoCopy("some-string"_sds); test(vString, "\"some-string\""); } TEST_F(ValuePrintingTests, tPath) { Value vPath; - vPath.mkStringNoCopy("/foo"); + vPath.mkStringNoCopy("/foo"_sds); test(vPath, "\"/foo\""); } @@ -289,10 +290,10 @@ TEST_F(StringPrintingTests, maxLengthTruncation) TEST_F(ValuePrintingTests, attrsTypeFirst) { Value vType; - vType.mkStringNoCopy("puppy"); + vType.mkStringNoCopy("puppy"_sds); Value vApple; - vApple.mkStringNoCopy("apple"); + vApple.mkStringNoCopy("apple"_sds); BindingsBuilder builder = state.buildBindings(10); builder.insert(state.symbols.create("type"), &vType); @@ -333,7 +334,7 @@ TEST_F(ValuePrintingTests, ansiColorsBool) TEST_F(ValuePrintingTests, ansiColorsString) { Value v; - v.mkStringNoCopy("puppy"); + v.mkStringNoCopy("puppy"_sds); test(v, ANSI_MAGENTA "\"puppy\"" ANSI_NORMAL, PrintOptions{.ansiColors = true}); } @@ -341,7 +342,7 @@ TEST_F(ValuePrintingTests, ansiColorsString) TEST_F(ValuePrintingTests, ansiColorsStringElided) { Value v; - v.mkStringNoCopy("puppy"); + v.mkStringNoCopy("puppy"_sds); test( v, @@ -389,7 +390,7 @@ TEST_F(ValuePrintingTests, ansiColorsAttrs) TEST_F(ValuePrintingTests, ansiColorsDerivation) { Value vDerivation; - vDerivation.mkStringNoCopy("derivation"); + vDerivation.mkStringNoCopy("derivation"_sds); BindingsBuilder builder = state.buildBindings(10); builder.insert(state.s.type, &vDerivation); @@ -412,7 +413,7 @@ TEST_F(ValuePrintingTests, ansiColorsError) { Value throw_ = state.getBuiltin("throw"); Value message; - message.mkStringNoCopy("uh oh!"); + message.mkStringNoCopy("uh oh!"_sds); Value vError; vError.mkApp(&throw_, &message); @@ -429,12 +430,12 @@ TEST_F(ValuePrintingTests, ansiColorsDerivationError) { Value throw_ = state.getBuiltin("throw"); Value message; - message.mkStringNoCopy("uh oh!"); + message.mkStringNoCopy("uh oh!"_sds); Value vError; vError.mkApp(&throw_, &message); Value vDerivation; - vDerivation.mkStringNoCopy("derivation"); + vDerivation.mkStringNoCopy("derivation"_sds); BindingsBuilder builder = state.buildBindings(10); builder.insert(state.s.type, &vDerivation); diff --git a/src/libexpr-tests/value/value.cc b/src/libexpr-tests/value/value.cc index 229e449db..420db0f31 100644 --- a/src/libexpr-tests/value/value.cc +++ b/src/libexpr-tests/value/value.cc @@ -1,4 +1,5 @@ #include "nix/expr/value.hh" +#include "nix/expr/static-string-data.hh" #include "nix/store/tests/libstore.hh" #include @@ -27,17 +28,17 @@ TEST_F(ValueTest, staticString) { Value vStr1; Value vStr2; - vStr1.mkStringNoCopy("foo"); - vStr2.mkStringNoCopy("foo"); + vStr1.mkStringNoCopy("foo"_sds); + vStr2.mkStringNoCopy("foo"_sds); - auto sd1 = vStr1.string_view(); - auto sd2 = vStr2.string_view(); + auto & sd1 = vStr1.string_data(); + auto & sd2 = vStr2.string_data(); // The strings should be the same - ASSERT_EQ(sd1, sd2); + ASSERT_EQ(sd1.view(), sd2.view()); // The strings should also be backed by the same (static) allocation - ASSERT_EQ(sd1.data(), sd2.data()); + ASSERT_EQ(&sd1, &sd2); } } // namespace nix diff --git a/src/libexpr/eval-cache.cc b/src/libexpr/eval-cache.cc index 0372d6cc1..4cfa3dabb 100644 --- a/src/libexpr/eval-cache.cc +++ b/src/libexpr/eval-cache.cc @@ -147,7 +147,7 @@ struct AttrDb for (auto * elem : *context) { if (!first) ctx.push_back(' '); - ctx.append(elem); + ctx.append(elem->view()); first = false; } state->insertAttributeWithContext.use()(key.first)(symbols[key.second])(AttrType::String) (s) (ctx) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 807f7544c..71619a9be 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -3,6 +3,7 @@ #include "nix/expr/primops.hh" #include "nix/expr/print-options.hh" #include "nix/expr/symbol-table.hh" +#include "nix/expr/value.hh" #include "nix/util/exit.hh" #include "nix/util/types.hh" #include "nix/util/util.hh" @@ -28,6 +29,8 @@ #include "parser-tab.hh" #include +#include +#include #include #include #include @@ -48,6 +51,9 @@ using json = nlohmann::json; namespace nix { +/** + * Just for doc strings. Not for regular string values. + */ static char * allocString(size_t size) { char * t; @@ -61,6 +67,9 @@ static char * allocString(size_t size) // string allocations. // This function handles makeImmutableString(std::string_view()) by returning // the empty string. +/** + * Just for doc strings. Not for regular string values. + */ static const char * makeImmutableString(std::string_view s) { const size_t size = s.size(); @@ -72,6 +81,25 @@ static const char * makeImmutableString(std::string_view s) return t; } +StringData & StringData::alloc(size_t size) +{ + void * t = GC_MALLOC_ATOMIC(sizeof(StringData) + size + 1); + if (!t) + throw std::bad_alloc(); + auto res = new (t) StringData(size); + return *res; +} + +const StringData & StringData::make(std::string_view s) +{ + if (s.empty()) + return ""_sds; + auto & res = alloc(s.size()); + std::memcpy(&res.data_, s.data(), s.size()); + res.data_[s.size()] = '\0'; + return res; +} + RootValue allocRootValue(Value * v) { return std::allocate_shared(traceable_allocator(), v); @@ -585,7 +613,9 @@ std::optional EvalState::getDoc(Value & v) .name = name, .arity = 0, // FIXME: figure out how deep by syntax only? It's not semantically useful though... .args = {}, - .doc = makeImmutableString(s.view()), // NOTE: memory leak when compiled without GC + /* N.B. Can't use StringData here, because that would lead to an interior pointer. + NOTE: memory leak when compiled without GC. */ + .doc = makeImmutableString(s.view()), }; } if (isFunctor(v)) { @@ -819,7 +849,7 @@ DebugTraceStacker::DebugTraceStacker(EvalState & evalState, DebugTrace t) void Value::mkString(std::string_view s) { - mkStringNoCopy(makeImmutableString(s)); + mkStringNoCopy(StringData::make(s)); } Value::StringWithContext::Context * Value::StringWithContext::Context::fromBuilder(const NixStringContext & context) @@ -829,23 +859,23 @@ Value::StringWithContext::Context * Value::StringWithContext::Context::fromBuild auto ctx = new (allocBytes(sizeof(Context) + context.size() * sizeof(value_type))) Context(context.size()); std::ranges::transform( - context, ctx->elems, [](const NixStringContextElem & elt) { return makeImmutableString(elt.to_string()); }); + context, ctx->elems, [](const NixStringContextElem & elt) { return &StringData::make(elt.to_string()); }); return ctx; } void Value::mkString(std::string_view s, const NixStringContext & context) { - mkStringNoCopy(makeImmutableString(s), Value::StringWithContext::Context::fromBuilder(context)); + mkStringNoCopy(StringData::make(s), Value::StringWithContext::Context::fromBuilder(context)); } -void Value::mkStringMove(const char * s, const NixStringContext & context) +void Value::mkStringMove(const StringData & s, const NixStringContext & context) { mkStringNoCopy(s, Value::StringWithContext::Context::fromBuilder(context)); } void Value::mkPath(const SourcePath & path) { - mkPath(&*path.accessor, makeImmutableString(path.path.abs())); + mkPath(&*path.accessor, StringData::make(path.path.abs())); } inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval) @@ -2099,21 +2129,21 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) .atPos(pos) .withFrame(env, *this) .debugThrow(); - std::string result_str; - result_str.reserve(sSize); + std::string resultStr; + resultStr.reserve(sSize); for (const auto & part : strings) { - result_str += *part; + resultStr += *part; } - v.mkPath(state.rootPath(CanonPath(result_str))); + v.mkPath(state.rootPath(CanonPath(resultStr))); } else { - char * result_str = allocString(sSize + 1); - char * tmp = result_str; + auto & resultStr = StringData::alloc(sSize); + auto * tmp = resultStr.data(); for (const auto & part : strings) { - memcpy(tmp, part->data(), part->size()); + std::memcpy(tmp, part->data(), part->size()); tmp += part->size(); } - *tmp = 0; - v.mkStringMove(result_str, context); + *tmp = '\0'; + v.mkStringMove(resultStr, context); } } @@ -2288,7 +2318,7 @@ void copyContext(const Value & v, NixStringContext & context, const Experimental { if (auto * ctx = v.context()) for (auto * elem : *ctx) - context.insert(NixStringContextElem::parse(elem, xpSettings)); + context.insert(NixStringContextElem::parse(elem->view(), xpSettings)); } std::string_view EvalState::forceString( diff --git a/src/libexpr/include/nix/expr/meson.build b/src/libexpr/include/nix/expr/meson.build index 44ff171c2..2b0fbc406 100644 --- a/src/libexpr/include/nix/expr/meson.build +++ b/src/libexpr/include/nix/expr/meson.build @@ -31,6 +31,7 @@ headers = [ config_pub_h ] + files( 'print.hh', 'repl-exit-status.hh', 'search-path.hh', + 'static-string-data.hh', 'symbol-table.hh', 'value-to-json.hh', 'value-to-xml.hh', diff --git a/src/libexpr/include/nix/expr/nixexpr.hh b/src/libexpr/include/nix/expr/nixexpr.hh index 08d39cd87..3f7c883f3 100644 --- a/src/libexpr/include/nix/expr/nixexpr.hh +++ b/src/libexpr/include/nix/expr/nixexpr.hh @@ -3,6 +3,7 @@ #include #include +#include #include #include #include @@ -11,6 +12,7 @@ #include "nix/expr/value.hh" #include "nix/expr/symbol-table.hh" #include "nix/expr/eval-error.hh" +#include "nix/expr/static-string-data.hh" #include "nix/util/pos-idx.hh" #include "nix/expr/counter.hh" #include "nix/util/pos-table.hh" @@ -186,22 +188,18 @@ struct ExprString : Expr * This is only for strings already allocated in our polymorphic allocator, * or that live at least that long (e.g. c++ string literals) */ - ExprString(const char * s) + ExprString(const StringData & s) { v.mkStringNoCopy(s); }; ExprString(std::pmr::polymorphic_allocator & alloc, std::string_view sv) { - auto len = sv.length(); - if (len == 0) { - v.mkStringNoCopy(""); + if (sv.size() == 0) { + v.mkStringNoCopy(""_sds); return; } - char * s = alloc.allocate(len + 1); - sv.copy(s, len); - s[len] = '\0'; - v.mkStringNoCopy(s); + v.mkStringNoCopy(StringData::make(*alloc.resource(), sv)); }; Value * maybeThunk(EvalState & state, Env & env) override; @@ -216,11 +214,7 @@ struct ExprPath : Expr ExprPath(std::pmr::polymorphic_allocator & alloc, ref accessor, std::string_view sv) : accessor(accessor) { - auto len = sv.length(); - char * s = alloc.allocate(len + 1); - sv.copy(s, len); - s[len] = '\0'; - v.mkPath(&*accessor, s); + v.mkPath(&*accessor, StringData::make(*alloc.resource(), sv)); } Value * maybeThunk(EvalState & state, Env & env) override; diff --git a/src/libexpr/include/nix/expr/parser-state.hh b/src/libexpr/include/nix/expr/parser-state.hh index 89c424f82..661584ea0 100644 --- a/src/libexpr/include/nix/expr/parser-state.hh +++ b/src/libexpr/include/nix/expr/parser-state.hh @@ -4,6 +4,8 @@ #include #include "nix/expr/eval.hh" +#include "nix/expr/value.hh" +#include "nix/expr/static-string-data.hh" namespace nix { @@ -240,7 +242,7 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos, std::vector>> && es) { if (es.empty()) - return exprs.add(""); + return exprs.add(""_sds); /* Figure out the minimum indentation. Note that by design whitespace-only final lines are not taken into account. (So @@ -332,7 +334,7 @@ ParserState::stripIndentation(const PosIdx pos, std::vector(""); + auto * const result = exprs.add(""_sds); return result; } diff --git a/src/libexpr/include/nix/expr/static-string-data.hh b/src/libexpr/include/nix/expr/static-string-data.hh new file mode 100644 index 000000000..93b5d46a0 --- /dev/null +++ b/src/libexpr/include/nix/expr/static-string-data.hh @@ -0,0 +1,44 @@ +#pragma once +///@file + +#include "nix/expr/value.hh" + +namespace nix { + +template +struct StringData::Static +{ + /** + * @note Must be first to make layout compatible with StringData. + */ + const size_t size = N - 1; + char data[N]; + + consteval Static(const char (&str)[N]) + { + static_assert(N > 0); + if (str[size] != '\0') + throw; + std::copy_n(str, N, data); + } + + operator const StringData &() const & + { + static_assert(sizeof(decltype(*this)) >= sizeof(StringData)); + static_assert(alignof(decltype(*this)) == alignof(StringData)); + /* NOTE: This cast is somewhat on the fence of what's legal in C++. + The question boils down to whether flexible array members are + layout compatible with fixed-size arrays. This is a gray area, since + FAMs are not standard anyway. + */ + return *reinterpret_cast(this); + } +}; + +template +const StringData & operator""_sds() +{ + return S; +} + +} // namespace nix diff --git a/src/libexpr/include/nix/expr/symbol-table.hh b/src/libexpr/include/nix/expr/symbol-table.hh index 5f2b47dd6..ccee8bee0 100644 --- a/src/libexpr/include/nix/expr/symbol-table.hh +++ b/src/libexpr/include/nix/expr/symbol-table.hh @@ -3,6 +3,7 @@ #include #include "nix/expr/value.hh" +#include "nix/expr/static-string-data.hh" #include "nix/util/chunked-vector.hh" #include "nix/util/error.hh" @@ -16,7 +17,6 @@ class SymbolValue : protected Value friend class SymbolStr; friend class SymbolTable; - uint32_t size_; uint32_t idx; SymbolValue() = default; @@ -24,7 +24,7 @@ class SymbolValue : protected Value public: operator std::string_view() const noexcept { - return {c_str(), size_}; + return string_view(); } }; @@ -96,13 +96,13 @@ class SymbolStr SymbolValueStore & store; std::string_view s; std::size_t hash; - std::pmr::polymorphic_allocator & alloc; + std::pmr::memory_resource & resource; - Key(SymbolValueStore & store, std::string_view s, std::pmr::polymorphic_allocator & stringAlloc) + Key(SymbolValueStore & store, std::string_view s, std::pmr::memory_resource & stringMemory) : store(store) , s(s) , hash(HashType{}(s)) - , alloc(stringAlloc) + , resource(stringMemory) { } }; @@ -122,14 +122,10 @@ public: // for multi-threaded implementations: lock store and allocator here const auto & [v, idx] = key.store.add(SymbolValue{}); if (size == 0) { - v.mkStringNoCopy("", nullptr); + v.mkStringNoCopy(""_sds, nullptr); } else { - auto s = key.alloc.allocate(size + 1); - memcpy(s, key.s.data(), size); - s[size] = '\0'; - v.mkStringNoCopy(s, nullptr); + v.mkStringNoCopy(StringData::make(key.resource, key.s)); } - v.size_ = size; v.idx = idx; this->s = &v; } @@ -139,6 +135,12 @@ public: return *s == s2; } + [[gnu::always_inline]] + const StringData & string_data() const noexcept + { + return s->string_data(); + } + [[gnu::always_inline]] const char * c_str() const noexcept { @@ -155,13 +157,17 @@ public: [[gnu::always_inline]] bool empty() const noexcept { - return s->size_ == 0; + auto * p = &s->string_data(); + // Save a dereference in the sentinel value case + if (p == &""_sds) + return true; + return p->size() == 0; } [[gnu::always_inline]] size_t size() const noexcept { - return s->size_; + return s->string_data().size(); } [[gnu::always_inline]] @@ -259,7 +265,6 @@ private: * During its lifetime the monotonic buffer holds all strings and nodes, if the symbol set is node based. */ std::pmr::monotonic_buffer_resource buffer; - std::pmr::polymorphic_allocator stringAlloc{&buffer}; SymbolStr::SymbolValueStore store{16}; /** @@ -282,7 +287,7 @@ public: // Most symbols are looked up more than once, so we trade off insertion performance // for lookup performance. // FIXME: make this thread-safe. - return Symbol(*symbols.insert(SymbolStr::Key{store, s, stringAlloc}).first); + return Symbol(*symbols.insert(SymbolStr::Key{store, s, buffer}).first); } std::vector resolve(const std::vector & symbols) const diff --git a/src/libexpr/include/nix/expr/value.hh b/src/libexpr/include/nix/expr/value.hh index 0fb36e1b2..54a735fbd 100644 --- a/src/libexpr/include/nix/expr/value.hh +++ b/src/libexpr/include/nix/expr/value.hh @@ -1,8 +1,14 @@ #pragma once ///@file +#include #include +#include +#include +#include +#include #include +#include #include #include @@ -186,6 +192,91 @@ public: friend struct Value; }; +class StringData +{ +public: + using size_type = std::size_t; + + size_type size_; + char data_[]; + + /* + * This in particular ensures that we cannot have a `StringData` + * that we use by value, which is just what we want! + * + * Dynamically sized types aren't a thing in C++ and even flexible array + * members are a language extension and beyond the realm of standard C++. + * Technically, sizeof data_ member is 0 and the intended way to use flexible + * array members is to allocate sizeof(StrindData) + count * sizeof(char) bytes + * and the compiler will consider alignment restrictions for the FAM. + * + */ + + StringData(StringData &&) = delete; + StringData & operator=(StringData &&) = delete; + StringData(const StringData &) = delete; + StringData & operator=(const StringData &) = delete; + ~StringData() = default; + +private: + StringData() = delete; + + explicit StringData(size_type size) + : size_(size) + { + } + +public: + /** + * Allocate StringData on the (possibly) GC-managed heap and copy + * the contents of s to it. + */ + static const StringData & make(std::string_view s); + + /** + * Allocate StringData on the (possibly) GC-managed heap. + * @param size Length of the string (without the NUL terminator). + */ + static StringData & alloc(size_t size); + + size_t size() const + { + return size_; + } + + char * data() noexcept + { + return data_; + } + + const char * data() const noexcept + { + return data_; + } + + const char * c_str() const noexcept + { + return data_; + } + + constexpr std::string_view view() const noexcept + { + return std::string_view(data_, size_); + } + + template + struct Static; + + static StringData & make(std::pmr::memory_resource & resource, std::string_view s) + { + auto & res = + *new (resource.allocate(sizeof(StringData) + s.size() + 1, alignof(StringData))) StringData(s.size()); + std::memcpy(res.data_, s.data(), s.size()); + res.data_[s.size()] = '\0'; + return res; + } +}; + namespace detail { /** @@ -219,7 +310,7 @@ struct ValueBase */ struct StringWithContext { - const char * c_str; + const StringData * str; /** * The type of the context itself. @@ -234,7 +325,7 @@ struct ValueBase */ struct Context { - using value_type = const char *; + using value_type = const StringData *; using size_type = std::size_t; using iterator = const value_type *; @@ -285,7 +376,7 @@ struct ValueBase struct Path { SourceAccessor * accessor; - const char * path; + const StringData * path; }; struct Null @@ -646,13 +737,13 @@ protected: void getStorage(StringWithContext & string) const noexcept { string.context = untagPointer(payload[0]); - string.c_str = std::bit_cast(payload[1]); + string.str = std::bit_cast(payload[1]); } void getStorage(Path & path) const noexcept { path.accessor = untagPointer(payload[0]); - path.path = std::bit_cast(payload[1]); + path.path = std::bit_cast(payload[1]); } void setStorage(NixInt integer) noexcept @@ -697,7 +788,7 @@ protected: void setStorage(StringWithContext string) noexcept { - setUntaggablePayload(string.context, string.c_str); + setUntaggablePayload(string.context, string.str); } void setStorage(Path path) noexcept @@ -1050,22 +1141,22 @@ public: setStorage(b); } - void mkStringNoCopy(const char * s, const Value::StringWithContext::Context * context = nullptr) noexcept + void mkStringNoCopy(const StringData & s, const Value::StringWithContext::Context * context = nullptr) noexcept { - setStorage(StringWithContext{.c_str = s, .context = context}); + setStorage(StringWithContext{.str = &s, .context = context}); } void mkString(std::string_view s); void mkString(std::string_view s, const NixStringContext & context); - void mkStringMove(const char * s, const NixStringContext & context); + void mkStringMove(const StringData & s, const NixStringContext & context); void mkPath(const SourcePath & path); - inline void mkPath(SourceAccessor * accessor, const char * path) noexcept + inline void mkPath(SourceAccessor * accessor, const StringData & path) noexcept { - setStorage(Path{.accessor = accessor, .path = path}); + setStorage(Path{.accessor = accessor, .path = &path}); } inline void mkNull() noexcept @@ -1163,17 +1254,23 @@ public: SourcePath path() const { - return SourcePath(ref(pathAccessor()->shared_from_this()), CanonPath(CanonPath::unchecked_t(), pathStr())); + return SourcePath( + ref(pathAccessor()->shared_from_this()), CanonPath(CanonPath::unchecked_t(), std::string(pathStrView()))); } - std::string_view string_view() const noexcept + const StringData & string_data() const noexcept { - return std::string_view{getStorage().c_str}; + return *getStorage().str; } const char * c_str() const noexcept { - return getStorage().c_str; + return getStorage().str->data(); + } + + std::string_view string_view() const noexcept + { + return string_data().view(); } const Value::StringWithContext::Context * context() const noexcept @@ -1233,12 +1330,12 @@ public: const char * pathStr() const noexcept { - return getStorage().path; + return getStorage().path->c_str(); } std::string_view pathStrView() const noexcept { - return std::string_view{getStorage().path}; + return getStorage().path->view(); } SourceAccessor * pathAccessor() const noexcept diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 0daeea77c..3b39b7f20 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -5,6 +5,7 @@ #include "nix/expr/eval-settings.hh" #include "nix/expr/gc-small-vector.hh" #include "nix/expr/json-to-value.hh" +#include "nix/expr/static-string-data.hh" #include "nix/store/globals.hh" #include "nix/store/names.hh" #include "nix/store/path-references.hh" @@ -487,34 +488,34 @@ static void prim_typeOf(EvalState & state, const PosIdx pos, Value ** args, Valu state.forceValue(*args[0], pos); switch (args[0]->type()) { case nInt: - v.mkStringNoCopy("int"); + v.mkStringNoCopy("int"_sds); break; case nBool: - v.mkStringNoCopy("bool"); + v.mkStringNoCopy("bool"_sds); break; case nString: - v.mkStringNoCopy("string"); + v.mkStringNoCopy("string"_sds); break; case nPath: - v.mkStringNoCopy("path"); + v.mkStringNoCopy("path"_sds); break; case nNull: - v.mkStringNoCopy("null"); + v.mkStringNoCopy("null"_sds); break; case nAttrs: - v.mkStringNoCopy("set"); + v.mkStringNoCopy("set"_sds); break; case nList: - v.mkStringNoCopy("list"); + v.mkStringNoCopy("list"_sds); break; case nFunction: - v.mkStringNoCopy("lambda"); + v.mkStringNoCopy("lambda"_sds); break; case nExternal: v.mkString(args[0]->external()->typeOf()); break; case nFloat: - v.mkStringNoCopy("float"); + v.mkStringNoCopy("float"_sds); break; case nThunk: unreachable(); @@ -2024,9 +2025,9 @@ static void prim_dirOf(EvalState & state, const PosIdx pos, Value ** args, Value pos, *args[0], context, "while evaluating the first argument passed to 'builtins.dirOf'", false, false); auto pos = path->rfind('/'); if (pos == path->npos) - v.mkStringMove(".", context); + v.mkStringMove("."_sds, context); else if (pos == 0) - v.mkStringMove("/", context); + v.mkStringMove("/"_sds, context); else v.mkString(path->substr(0, pos), context); } @@ -2309,10 +2310,10 @@ static const Value & fileTypeToString(EvalState & state, SourceAccessor::Type ty static const Constants stringValues = []() { Constants res; - res.regular.mkStringNoCopy("regular"); - res.directory.mkStringNoCopy("directory"); - res.symlink.mkStringNoCopy("symlink"); - res.unknown.mkStringNoCopy("unknown"); + res.regular.mkStringNoCopy("regular"_sds); + res.directory.mkStringNoCopy("directory"_sds); + res.symlink.mkStringNoCopy("symlink"_sds); + res.unknown.mkStringNoCopy("unknown"_sds); return res; }(); @@ -4463,7 +4464,7 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value ** args, V if (len == 0) { state.forceValue(*args[2], pos); if (args[2]->type() == nString) { - v.mkStringNoCopy("", args[2]->context()); + v.mkStringNoCopy(""_sds, args[2]->context()); return; } } diff --git a/src/libexpr/primops/fromTOML.cc b/src/libexpr/primops/fromTOML.cc index 0d165f5c3..a06224fee 100644 --- a/src/libexpr/primops/fromTOML.cc +++ b/src/libexpr/primops/fromTOML.cc @@ -1,5 +1,6 @@ #include "nix/expr/primops.hh" #include "nix/expr/eval-inline.hh" +#include "nix/expr/static-string-data.hh" #include "expr-config-private.hh" @@ -136,7 +137,7 @@ static void prim_fromTOML(EvalState & state, const PosIdx pos, Value ** args, Va normalizeDatetimeFormat(t); #endif auto attrs = state.buildBindings(2); - attrs.alloc("_type").mkStringNoCopy("timestamp"); + attrs.alloc("_type").mkStringNoCopy("timestamp"_sds); std::ostringstream s; s << t; auto str = s.view(); diff --git a/src/nix/nix-env/user-env.cc b/src/nix/nix-env/user-env.cc index 81e2c4f80..21fdf25bc 100644 --- a/src/nix/nix-env/user-env.cc +++ b/src/nix/nix-env/user-env.cc @@ -9,6 +9,7 @@ #include "nix/expr/eval-inline.hh" #include "nix/store/profiles.hh" #include "nix/expr/print-ambiguous.hh" +#include "nix/expr/static-string-data.hh" #include #include @@ -56,7 +57,7 @@ bool createUserEnv( auto attrs = state.buildBindings(7 + outputs.size()); - attrs.alloc(state.s.type).mkStringNoCopy("derivation"); + attrs.alloc(state.s.type).mkStringNoCopy("derivation"_sds); attrs.alloc(state.s.name).mkString(i.queryName()); auto system = i.querySystem(); if (!system.empty()) From 496e43ec72643ad4fc48ce15e6b7220763e823a8 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Mon, 10 Nov 2025 21:12:07 +0300 Subject: [PATCH 193/213] Restore isAllowed check in ChrootLinuxDerivationBuilder This early return was lost in d4ef822add1074483627c5dbbaa9077f15daf7bc. By doing some https://en.wikipedia.org/wiki/Non-virtual_interface_pattern, we can ensure that we don't make this mistake again --- implementations are no longer responsible for implementing the caching/memoization mechanism. --- .../include/nix/store/restricted-store.hh | 16 +++++++++++++++- src/libstore/unix/build/derivation-builder.cc | 7 ++----- .../unix/build/linux-derivation-builder.cc | 5 ++++- 3 files changed, 21 insertions(+), 7 deletions(-) diff --git a/src/libstore/include/nix/store/restricted-store.hh b/src/libstore/include/nix/store/restricted-store.hh index 8bbb2ff54..62cac3856 100644 --- a/src/libstore/include/nix/store/restricted-store.hh +++ b/src/libstore/include/nix/store/restricted-store.hh @@ -52,7 +52,21 @@ struct RestrictionContext * Add 'path' to the set of paths that may be referenced by the * outputs, and make it appear in the sandbox. */ - virtual void addDependency(const StorePath & path) = 0; + void addDependency(const StorePath & path) + { + if (isAllowed(path)) + return; + addDependencyImpl(path); + } + +protected: + + /** + * This is the underlying implementation to be defined. The caller + * will ensure that this is only called on newly added dependencies, + * and that idempotent calls are a no-op. + */ + virtual void addDependencyImpl(const StorePath & path) = 0; }; /** diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index c2ef730dc..a052d29af 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -334,7 +334,7 @@ private: protected: - void addDependency(const StorePath & path) override; + void addDependencyImpl(const StorePath & path) override; /** * Make a file owned by the builder. @@ -1203,11 +1203,8 @@ void DerivationBuilderImpl::stopDaemon() daemonSocket.close(); } -void DerivationBuilderImpl::addDependency(const StorePath & path) +void DerivationBuilderImpl::addDependencyImpl(const StorePath & path) { - if (isAllowed(path)) - return; - addedPaths.insert(path); } diff --git a/src/libstore/unix/build/linux-derivation-builder.cc b/src/libstore/unix/build/linux-derivation-builder.cc index e96f83700..95ef7eafe 100644 --- a/src/libstore/unix/build/linux-derivation-builder.cc +++ b/src/libstore/unix/build/linux-derivation-builder.cc @@ -709,8 +709,11 @@ struct ChrootLinuxDerivationBuilder : ChrootDerivationBuilder, LinuxDerivationBu DerivationBuilderImpl::killSandbox(getStats); } - void addDependency(const StorePath & path) override + void addDependencyImpl(const StorePath & path) override { + if (isAllowed(path)) + return; + auto [source, target] = ChrootDerivationBuilder::addDependencyPrep(path); /* Bind-mount the path into the sandbox. This requires From 4e64dea21b845b2ee71a53ee17143fb8ac9a3ae0 Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Thu, 23 Oct 2025 04:32:58 +0000 Subject: [PATCH 194/213] feat(libstore): add S3 storage class support Add support for configuring S3 storage class via the storage-class parameter for S3BinaryCacheStore. This allows users to optimize costs by selecting appropriate storage tiers (STANDARD, GLACIER, INTELLIGENT_TIERING, etc.) based on access patterns. The storage class is applied via the x-amz-storage-class header for both regular PUT uploads and multipart upload initiation. --- doc/manual/rl-next/s3-storage-class.md | 21 +++++++++++++++++++ src/libstore-tests/s3-binary-cache-store.cc | 18 ++++++++++++++++ .../nix/store/s3-binary-cache-store.hh | 20 ++++++++++++++++++ src/libstore/s3-binary-cache-store.cc | 8 +++++-- 4 files changed, 65 insertions(+), 2 deletions(-) create mode 100644 doc/manual/rl-next/s3-storage-class.md diff --git a/doc/manual/rl-next/s3-storage-class.md b/doc/manual/rl-next/s3-storage-class.md new file mode 100644 index 000000000..d742b5747 --- /dev/null +++ b/doc/manual/rl-next/s3-storage-class.md @@ -0,0 +1,21 @@ +--- +synopsis: "S3 binary cache stores now support storage class configuration" +prs: [14464] +issues: [7015] +--- + +S3 binary cache stores now support configuring the storage class for uploaded objects via the `storage-class` parameter. This allows users to optimize costs by selecting appropriate storage tiers based on access patterns. + +Example usage: + +```bash +# Use Glacier storage for long-term archival +nix copy --to 's3://my-bucket?storage-class=GLACIER' /nix/store/... + +# Use Intelligent Tiering for automatic cost optimization +nix copy --to 's3://my-bucket?storage-class=INTELLIGENT_TIERING' /nix/store/... +``` + +The storage class applies to both regular uploads and multipart uploads. When not specified, objects use the bucket's default storage class. + +See the [S3 storage classes documentation](https://docs.aws.amazon.com/AmazonS3/latest/userguide/storage-class-intro.html) for available storage classes and their characteristics. diff --git a/src/libstore-tests/s3-binary-cache-store.cc b/src/libstore-tests/s3-binary-cache-store.cc index f01759771..59090a589 100644 --- a/src/libstore-tests/s3-binary-cache-store.cc +++ b/src/libstore-tests/s3-binary-cache-store.cc @@ -122,4 +122,22 @@ TEST(S3BinaryCacheStore, parameterFiltering) EXPECT_EQ(ref.params["priority"], "10"); } +/** + * Test storage class configuration + */ +TEST(S3BinaryCacheStore, storageClassDefault) +{ + S3BinaryCacheStoreConfig config{"s3", "test-bucket", {}}; + EXPECT_EQ(config.storageClass.get(), std::nullopt); +} + +TEST(S3BinaryCacheStore, storageClassConfiguration) +{ + StringMap params; + params["storage-class"] = "GLACIER"; + + S3BinaryCacheStoreConfig config("s3", "test-bucket", params); + EXPECT_EQ(config.storageClass.get(), std::optional("GLACIER")); +} + } // namespace nix diff --git a/src/libstore/include/nix/store/s3-binary-cache-store.hh b/src/libstore/include/nix/store/s3-binary-cache-store.hh index bf86d0671..5896293f1 100644 --- a/src/libstore/include/nix/store/s3-binary-cache-store.hh +++ b/src/libstore/include/nix/store/s3-binary-cache-store.hh @@ -93,6 +93,26 @@ struct S3BinaryCacheStoreConfig : HttpBinaryCacheStoreConfig Default is 100 MiB. Only takes effect when multipart-upload is enabled. )"}; + const Setting> storageClass{ + this, + std::nullopt, + "storage-class", + R"( + The S3 storage class to use for uploaded objects. When not set (default), + uses the bucket's default storage class. Valid values include: + - STANDARD (default, frequently accessed data) + - REDUCED_REDUNDANCY (less frequently accessed data) + - STANDARD_IA (infrequent access) + - ONEZONE_IA (infrequent access, single AZ) + - INTELLIGENT_TIERING (automatic cost optimization) + - GLACIER (archival with retrieval times in minutes to hours) + - DEEP_ARCHIVE (long-term archival with 12-hour retrieval) + - GLACIER_IR (instant retrieval archival) + + See AWS S3 documentation for detailed storage class descriptions and pricing: + https://docs.aws.amazon.com/AmazonS3/latest/userguide/storage-class-intro.html + )"}; + /** * Set of settings that are part of the S3 URI itself. * These are needed for region specification and other S3-specific settings. diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 51b7a05fc..bc7f53f4d 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -134,10 +134,14 @@ void S3BinaryCacheStore::upsertFile( const std::string & path, RestartableSource & source, const std::string & mimeType, uint64_t sizeHint) { auto doUpload = [&](RestartableSource & src, uint64_t size, std::optional headers) { + Headers uploadHeaders = headers.value_or(Headers()); + if (auto storageClass = s3Config->storageClass.get()) { + uploadHeaders.emplace_back("x-amz-storage-class", *storageClass); + } if (s3Config->multipartUpload && size > s3Config->multipartThreshold) { - uploadMultipart(path, src, size, mimeType, std::move(headers)); + uploadMultipart(path, src, size, mimeType, std::move(uploadHeaders)); } else { - upload(path, src, size, mimeType, std::move(headers)); + upload(path, src, size, mimeType, std::move(uploadHeaders)); } }; From 182ae393d1574f4f97d9f42e8f7397896fffbeab Mon Sep 17 00:00:00 2001 From: Bernardo Meurer Costa Date: Mon, 3 Nov 2025 22:04:12 +0000 Subject: [PATCH 195/213] refactor(libutil/topo-sort): return variant instead of throwing The variant has on the left-hand side the topologically sorted vector and the right-hand side is a pair showing the path and its parent that represent a cycle in the graph making the sort impossible. This change prepares for enhanced cycle error messages that can provide more context about the cycle. The variant approach allows callers to handle cycles more flexibly, enabling better error reporting that shows the full cycle path and which files are involved. Adapted from Lix commit f7871fcb5. Change-Id: I70a987f470437df8beb3b1cc203ff88701d0aa1b Co-Authored-By: Maximilian Bosch --- src/libstore/local-store.cc | 29 +- src/libstore/misc.cc | 35 +- src/libstore/unix/build/derivation-builder.cc | 73 ++-- src/libutil-tests/meson.build | 1 + src/libutil-tests/topo-sort.cc | 318 ++++++++++++++++++ src/libutil/include/nix/util/topo-sort.hh | 48 ++- 6 files changed, 427 insertions(+), 77 deletions(-) create mode 100644 src/libutil-tests/topo-sort.cc diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index c6aeaf0d2..2c4d546f8 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -989,19 +989,22 @@ void LocalStore::registerValidPaths(const ValidPathInfos & infos) error if a cycle is detected and roll back the transaction. Cycles can only occur when a derivation has multiple outputs. */ - topoSort( - paths, - {[&](const StorePath & path) { - auto i = infos.find(path); - return i == infos.end() ? StorePathSet() : i->second.references; - }}, - {[&](const StorePath & path, const StorePath & parent) { - return BuildError( - BuildResult::Failure::OutputRejected, - "cycle detected in the references of '%s' from '%s'", - printStorePath(path), - printStorePath(parent)); - }}); + auto topoSortResult = topoSort(paths, {[&](const StorePath & path) { + auto i = infos.find(path); + return i == infos.end() ? StorePathSet() : i->second.references; + }}); + + std::visit( + overloaded{ + [&](const Cycle & cycle) { + throw BuildError( + BuildResult::Failure::OutputRejected, + "cycle detected in the references of '%s' from '%s'", + printStorePath(cycle.path), + printStorePath(cycle.parent)); + }, + [](auto &) { /* Success, continue */ }}, + topoSortResult); txn.commit(); }); diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index 8b2a7287e..34a369810 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -311,22 +311,25 @@ MissingPaths Store::queryMissing(const std::vector & targets) StorePaths Store::topoSortPaths(const StorePathSet & paths) { - return topoSort( - paths, - {[&](const StorePath & path) { - try { - return queryPathInfo(path)->references; - } catch (InvalidPath &) { - return StorePathSet(); - } - }}, - {[&](const StorePath & path, const StorePath & parent) { - return BuildError( - BuildResult::Failure::OutputRejected, - "cycle detected in the references of '%s' from '%s'", - printStorePath(path), - printStorePath(parent)); - }}); + auto result = topoSort(paths, {[&](const StorePath & path) { + try { + return queryPathInfo(path)->references; + } catch (InvalidPath &) { + return StorePathSet(); + } + }}); + + return std::visit( + overloaded{ + [&](const Cycle & cycle) -> StorePaths { + throw BuildError( + BuildResult::Failure::OutputRejected, + "cycle detected in the references of '%s' from '%s'", + printStorePath(cycle.path), + printStorePath(cycle.parent)); + }, + [](const auto & sorted) { return sorted; }}, + result); } std::map diff --git a/src/libstore/unix/build/derivation-builder.cc b/src/libstore/unix/build/derivation-builder.cc index c2ef730dc..6eedc5fa2 100644 --- a/src/libstore/unix/build/derivation-builder.cc +++ b/src/libstore/unix/build/derivation-builder.cc @@ -1473,43 +1473,46 @@ SingleDrvOutputs DerivationBuilderImpl::registerOutputs() outputStats.insert_or_assign(outputName, std::move(st)); } - auto sortedOutputNames = topoSort( - outputsToSort, - {[&](const std::string & name) { - auto orifu = get(outputReferencesIfUnregistered, name); - if (!orifu) + auto topoSortResult = topoSort(outputsToSort, {[&](const std::string & name) { + auto orifu = get(outputReferencesIfUnregistered, name); + if (!orifu) + throw BuildError( + BuildResult::Failure::OutputRejected, + "no output reference for '%s' in build of '%s'", + name, + store.printStorePath(drvPath)); + return std::visit( + overloaded{ + /* Since we'll use the already installed versions of these, we + can treat them as leaves and ignore any references they + have. */ + [&](const AlreadyRegistered &) { return StringSet{}; }, + [&](const PerhapsNeedToRegister & refs) { + StringSet referencedOutputs; + /* FIXME build inverted map up front so no quadratic waste here */ + for (auto & r : refs.refs) + for (auto & [o, p] : scratchOutputs) + if (r == p) + referencedOutputs.insert(o); + return referencedOutputs; + }, + }, + *orifu); + }}); + + auto sortedOutputNames = std::visit( + overloaded{ + [&](Cycle & cycle) -> std::vector { + // TODO with more -vvvv also show the temporary paths for manual inspection. throw BuildError( BuildResult::Failure::OutputRejected, - "no output reference for '%s' in build of '%s'", - name, - store.printStorePath(drvPath)); - return std::visit( - overloaded{ - /* Since we'll use the already installed versions of these, we - can treat them as leaves and ignore any references they - have. */ - [&](const AlreadyRegistered &) { return StringSet{}; }, - [&](const PerhapsNeedToRegister & refs) { - StringSet referencedOutputs; - /* FIXME build inverted map up front so no quadratic waste here */ - for (auto & r : refs.refs) - for (auto & [o, p] : scratchOutputs) - if (r == p) - referencedOutputs.insert(o); - return referencedOutputs; - }, - }, - *orifu); - }}, - {[&](const std::string & path, const std::string & parent) { - // TODO with more -vvvv also show the temporary paths for manual inspection. - return BuildError( - BuildResult::Failure::OutputRejected, - "cycle detected in build of '%s' in the references of output '%s' from output '%s'", - store.printStorePath(drvPath), - path, - parent); - }}); + "cycle detected in build of '%s' in the references of output '%s' from output '%s'", + store.printStorePath(drvPath), + cycle.path, + cycle.parent); + }, + [](auto & sorted) { return sorted; }}, + topoSortResult); std::reverse(sortedOutputNames.begin(), sortedOutputNames.end()); diff --git a/src/libutil-tests/meson.build b/src/libutil-tests/meson.build index c75f4d90a..2772ab060 100644 --- a/src/libutil-tests/meson.build +++ b/src/libutil-tests/meson.build @@ -74,6 +74,7 @@ sources = files( 'strings.cc', 'suggestions.cc', 'terminal.cc', + 'topo-sort.cc', 'url.cc', 'util.cc', 'xml-writer.cc', diff --git a/src/libutil-tests/topo-sort.cc b/src/libutil-tests/topo-sort.cc new file mode 100644 index 000000000..91030247e --- /dev/null +++ b/src/libutil-tests/topo-sort.cc @@ -0,0 +1,318 @@ +#include +#include +#include +#include +#include + +#include + +#include "nix/util/topo-sort.hh" +#include "nix/util/util.hh" + +namespace nix { + +/** + * Helper function to create a graph and run topoSort + */ +TopoSortResult +runTopoSort(const std::set & nodes, const std::map> & edges) +{ + return topoSort( + nodes, + std::function(const std::string &)>( + [&](const std::string & node) -> std::set { + auto it = edges.find(node); + return it != edges.end() ? it->second : std::set{}; + })); +} + +/** + * Helper to check if a sorted result respects dependencies + * + * @note `topoSort` returns results in REVERSE topological order (see + * line 61 of topo-sort.hh). This means dependents come BEFORE their + * dependencies in the output. + * + * In the edges std::map, if parent -> child, it means parent depends on + * child, so parent must come BEFORE child in the output from topoSort. + */ +bool isValidTopologicalOrder( + const std::vector & sorted, const std::map> & edges) +{ + std::map position; + for (size_t i = 0; i < sorted.size(); ++i) { + position[sorted[i]] = i; + } + + // For each edge parent -> children, parent depends on children + // topoSort reverses the output, so parent comes BEFORE children + for (const auto & [parent, children] : edges) { + for (const auto & child : children) { + if (position.count(parent) && position.count(child)) { + // parent should come before child (have a smaller index) + if (position[parent] > position[child]) { + return false; + } + } + } + } + return true; +} + +// ============================================================================ +// Parametrized Tests for Topological Sort +// ============================================================================ + +struct ExpectSuccess +{ + std::optional> order; // std::nullopt = any valid order is acceptable +}; + +struct ExpectCycle +{ + std::set involvedNodes; +}; + +using ExpectedResult = std::variant; + +struct TopoSortCase +{ + std::string name; + std::set nodes; + std::map> edges; + ExpectedResult expected; +}; + +class TopoSortTest : public ::testing::TestWithParam +{}; + +TEST_P(TopoSortTest, ProducesCorrectResult) +{ + const auto & testCase = GetParam(); + auto result = runTopoSort(testCase.nodes, testCase.edges); + + std::visit( + overloaded{ + [&](const ExpectSuccess & expect) { + // Success case + ASSERT_TRUE(holds_alternative>(result)) + << "Expected successful sort for: " << testCase.name; + + auto sorted = get>(result); + ASSERT_EQ(sorted.size(), testCase.nodes.size()) + << "Sorted output should contain all nodes for: " << testCase.name; + + ASSERT_TRUE(isValidTopologicalOrder(sorted, testCase.edges)) + << "Invalid topological order for: " << testCase.name; + + if (expect.order) { + ASSERT_EQ(sorted, *expect.order) << "Expected specific order for: " << testCase.name; + } + }, + [&](const ExpectCycle & expect) { + // Cycle detection case + ASSERT_TRUE(holds_alternative>(result)) + << "Expected cycle detection for: " << testCase.name; + + auto cycle = get>(result); + + // Verify that the cycle involves expected nodes + ASSERT_TRUE(expect.involvedNodes.count(cycle.path) > 0) + << "Cycle path '" << cycle.path << "' not in expected cycle nodes for: " << testCase.name; + ASSERT_TRUE(expect.involvedNodes.count(cycle.parent) > 0) + << "Cycle parent '" << cycle.parent << "' not in expected cycle nodes for: " << testCase.name; + + // Verify that there's actually an edge in the cycle + auto it = testCase.edges.find(cycle.parent); + ASSERT_TRUE(it != testCase.edges.end()) << "Parent node should have edges for: " << testCase.name; + ASSERT_TRUE(it->second.count(cycle.path) > 0) + << "Should be an edge from parent to path for: " << testCase.name; + }}, + testCase.expected); +} + +INSTANTIATE_TEST_SUITE_P( + TopoSort, + TopoSortTest, + ::testing::Values( + // Success cases + TopoSortCase{ + .name = "EmptySet", + .nodes = {}, + .edges = {}, + .expected = ExpectSuccess{.order = std::vector{}}, + }, + TopoSortCase{ + .name = "SingleNode", + .nodes = {"A"}, + .edges = {}, + .expected = ExpectSuccess{.order = std::vector{"A"}}, + }, + TopoSortCase{ + .name = "TwoIndependentNodes", + .nodes = {"A", "B"}, + .edges = {}, + // Order between independent nodes is unspecified + .expected = ExpectSuccess{.order = std::nullopt}, + }, + TopoSortCase{ + .name = "SimpleChain", + .nodes = {"A", "B", "C"}, + .edges{ + {"A", {"B"}}, + {"B", {"C"}}, + }, + .expected = ExpectSuccess{.order = std::vector{"A", "B", "C"}}, + }, + TopoSortCase{ + .name = "SimpleDag", + .nodes = {"A", "B", "C", "D"}, + .edges{ + {"A", {"B", "C"}}, + {"B", {"D"}}, + {"C", {"D"}}, + }, + .expected = ExpectSuccess{.order = std::nullopt}, + }, + TopoSortCase{ + .name = "DiamondDependency", + .nodes = {"A", "B", "C", "D"}, + .edges{ + {"A", {"B", "C"}}, + {"B", {"D"}}, + {"C", {"D"}}, + }, + .expected = ExpectSuccess{.order = std::nullopt}, + }, + TopoSortCase{ + .name = "DisconnectedComponents", + .nodes = {"A", "B", "C", "D"}, + .edges{ + {"A", {"B"}}, + {"C", {"D"}}, + }, + .expected = ExpectSuccess{.order = std::nullopt}, + }, + TopoSortCase{ + .name = "NodeWithNoReferences", + .nodes = {"A", "B", "C"}, + .edges{ + {"A", {"B"}}, + // C has no dependencies + }, + .expected = ExpectSuccess{.order = std::nullopt}, + }, + TopoSortCase{ + .name = "MissingReferences", + .nodes = {"A", "B"}, + .edges{ + // Z doesn't exist in nodes, should be ignored + {"A", {"B", "Z"}}, + }, + .expected = ExpectSuccess{.order = std::vector{"A", "B"}}, + }, + TopoSortCase{ + .name = "ComplexDag", + .nodes = {"A", "B", "C", "D", "E", "F", "G", "H"}, + .edges{ + {"A", {"B", "C", "D"}}, + {"B", {"E", "F"}}, + {"C", {"E", "F"}}, + {"D", {"G"}}, + {"E", {"H"}}, + {"F", {"H"}}, + {"G", {"H"}}, + }, + .expected = ExpectSuccess{.order = std::nullopt}, + }, + TopoSortCase{ + .name = "LongChain", + .nodes = {"A", "B", "C", "D", "E", "F", "G", "H"}, + .edges{ + {"A", {"B"}}, + {"B", {"C"}}, + {"C", {"D"}}, + {"D", {"E"}}, + {"E", {"F"}}, + {"F", {"G"}}, + {"G", {"H"}}, + }, + .expected = ExpectSuccess{.order = std::vector{"A", "B", "C", "D", "E", "F", "G", "H"}}, + }, + TopoSortCase{ + .name = "SelfLoopIgnored", + .nodes = {"A"}, + .edges{ + // Self-reference should be ignored per line 41 of topo-sort.hh + {"A", {"A"}}, + }, + .expected = ExpectSuccess{.order = std::vector{"A"}}, + }, + TopoSortCase{ + .name = "SelfLoopInChainIgnored", + .nodes = {"A", "B", "C"}, + .edges{ + // B has self-reference that should be ignored + {"A", {"B"}}, + {"B", {"B", "C"}}, + }, + .expected = ExpectSuccess{.order = std::vector{"A", "B", "C"}}, + }, + // Cycle detection cases + TopoSortCase{ + .name = "TwoNodeCycle", + .nodes = {"A", "B"}, + .edges{ + {"A", {"B"}}, + {"B", {"A"}}, + }, + .expected = ExpectCycle{.involvedNodes = {"A", "B"}}, + }, + TopoSortCase{ + .name = "ThreeNodeCycle", + .nodes = {"A", "B", "C"}, + .edges{ + {"A", {"B"}}, + {"B", {"C"}}, + {"C", {"A"}}, + }, + .expected = ExpectCycle{.involvedNodes = {"A", "B", "C"}}, + }, + TopoSortCase{ + .name = "CycleInLargerGraph", + .nodes = {"A", "B", "C", "D"}, + .edges{ + {"A", {"B"}}, + {"B", {"C"}}, + {"C", {"A"}}, + {"D", {"A"}}, + }, + .expected = ExpectCycle{.involvedNodes = {"A", "B", "C"}}, + }, + TopoSortCase{ + .name = "MultipleCycles", + .nodes = {"A", "B", "C", "D"}, + .edges{ + {"A", {"B"}}, + {"B", {"A"}}, + {"C", {"D"}}, + {"D", {"C"}}, + }, + // Either cycle is valid + .expected = ExpectCycle{.involvedNodes = {"A", "B", "C", "D"}}, + }, + TopoSortCase{ + .name = "ComplexCycleWithBranches", + .nodes = {"A", "B", "C", "D", "E"}, + .edges{ + // Cycle: B -> D -> E -> B + {"A", {"B", "C"}}, + {"B", {"D"}}, + {"C", {"D"}}, + {"D", {"E"}}, + {"E", {"B"}}, + }, + .expected = ExpectCycle{.involvedNodes = {"B", "D", "E"}}, + })); + +} // namespace nix diff --git a/src/libutil/include/nix/util/topo-sort.hh b/src/libutil/include/nix/util/topo-sort.hh index aaf5dff16..285c34316 100644 --- a/src/libutil/include/nix/util/topo-sort.hh +++ b/src/libutil/include/nix/util/topo-sort.hh @@ -2,39 +2,61 @@ ///@file #include "nix/util/error.hh" +#include namespace nix { +template +struct Cycle +{ + T path; + T parent; +}; + +template +using TopoSortResult = std::variant, Cycle>; + template -std::vector topoSort( - std::set items, - std::function(const T &)> getChildren, - std::function makeCycleError) +TopoSortResult topoSort(std::set items, std::function(const T &)> getChildren) { std::vector sorted; decltype(items) visited, parents; - auto dfsVisit = [&](this auto & dfsVisit, const T & path, const T * parent) { - if (parents.count(path)) - throw makeCycleError(path, *parent); + std::function>(const T & path, const T * parent)> dfsVisit; - if (!visited.insert(path).second) - return; + dfsVisit = [&](const T & path, const T * parent) -> std::optional> { + if (parents.count(path)) { + return Cycle{path, *parent}; + } + + if (!visited.insert(path).second) { + return std::nullopt; + } parents.insert(path); auto references = getChildren(path); for (auto & i : references) /* Don't traverse into items that don't exist in our starting set. */ - if (i != path && items.count(i)) - dfsVisit(i, &path); + if (i != path && items.count(i)) { + auto result = dfsVisit(i, &path); + if (result.has_value()) { + return result; + } + } sorted.push_back(path); parents.erase(path); + + return std::nullopt; }; - for (auto & i : items) - dfsVisit(i, nullptr); + for (auto & i : items) { + auto cycle = dfsVisit(i, nullptr); + if (cycle.has_value()) { + return *cycle; + } + } std::reverse(sorted.begin(), sorted.end()); From 5ffc9fd2536710bdc5493c1ff9cf8fa74f5bc1d5 Mon Sep 17 00:00:00 2001 From: Taeer Bar-Yam Date: Thu, 6 Nov 2025 19:51:09 +0100 Subject: [PATCH 196/213] parser.y: remove pointless std::move()s --- src/libexpr/include/nix/expr/nixexpr.hh | 8 ++++---- src/libexpr/include/nix/expr/parser-state.hh | 9 +++++---- src/libexpr/parser.y | 20 ++++++++++---------- 3 files changed, 19 insertions(+), 18 deletions(-) diff --git a/src/libexpr/include/nix/expr/nixexpr.hh b/src/libexpr/include/nix/expr/nixexpr.hh index 3f7c883f3..3bf92cc12 100644 --- a/src/libexpr/include/nix/expr/nixexpr.hh +++ b/src/libexpr/include/nix/expr/nixexpr.hh @@ -289,7 +289,7 @@ struct ExprSelect : Expr std::pmr::polymorphic_allocator & alloc, const PosIdx & pos, Expr * e, - std::span attrPath, + const std::span & attrPath, Expr * def) : pos(pos) , nAttrPath(attrPath.size()) @@ -339,7 +339,7 @@ struct ExprOpHasAttr : Expr Expr * e; std::span attrPath; - ExprOpHasAttr(std::pmr::polymorphic_allocator & alloc, Expr * e, std::vector attrPath) + ExprOpHasAttr(std::pmr::polymorphic_allocator & alloc, Expr * e, const std::vector & attrPath) : e(e) , attrPath({alloc.allocate_object(attrPath.size()), attrPath.size()}) { @@ -433,7 +433,7 @@ struct ExprList : Expr { std::span elems; - ExprList(std::pmr::polymorphic_allocator & alloc, std::vector exprs) + ExprList(std::pmr::polymorphic_allocator & alloc, const std::vector & exprs) : elems({alloc.allocate_object(exprs.size()), exprs.size()}) { std::ranges::copy(exprs, elems.begin()); @@ -562,7 +562,7 @@ public: const PosTable & positions, std::pmr::polymorphic_allocator & alloc, PosIdx pos, - FormalsBuilder formals, + const FormalsBuilder & formals, Expr * body) : ExprLambda(positions, alloc, pos, Symbol(), formals, body) {}; diff --git a/src/libexpr/include/nix/expr/parser-state.hh b/src/libexpr/include/nix/expr/parser-state.hh index 661584ea0..0020d001e 100644 --- a/src/libexpr/include/nix/expr/parser-state.hh +++ b/src/libexpr/include/nix/expr/parser-state.hh @@ -96,7 +96,8 @@ struct ParserState ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc); void addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symbol, ExprAttrs::AttrDef && def); void validateFormals(FormalsBuilder & formals, PosIdx pos = noPos, Symbol arg = {}); - Expr * stripIndentation(const PosIdx pos, std::vector>> && es); + Expr * + stripIndentation(const PosIdx pos, const std::vector>> & es); PosIdx at(const ParserLocation & loc); }; @@ -238,8 +239,8 @@ inline void ParserState::validateFormals(FormalsBuilder & formals, PosIdx pos, S {.msg = HintFmt("duplicate formal function argument '%1%'", symbols[arg]), .pos = positions[pos]}); } -inline Expr * -ParserState::stripIndentation(const PosIdx pos, std::vector>> && es) +inline Expr * ParserState::stripIndentation( + const PosIdx pos, const std::vector>> & es) { if (es.empty()) return exprs.add(""_sds); @@ -343,7 +344,7 @@ ParserState::stripIndentation(const PosIdx pos, std::vector(exprs.alloc, pos, true, std::move(es2)); + return exprs.add(exprs.alloc, pos, true, es2); } inline PosIdx LexerState::at(const ParserLocation & loc) diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 8b56eba15..82c7b964f 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -186,7 +186,7 @@ expr_function | formal_set ':' expr_function[body] { state->validateFormals($formal_set); - auto me = state->exprs.add(state->positions, state->exprs.alloc, CUR_POS, std::move($formal_set), $body); + auto me = state->exprs.add(state->positions, state->exprs.alloc, CUR_POS, $formal_set, $body); $$ = me; SET_DOC_POS(me, @1); } @@ -194,7 +194,7 @@ expr_function { auto arg = state->symbols.create($ID); state->validateFormals($formal_set, CUR_POS, arg); - auto me = state->exprs.add(state->positions, state->exprs.alloc, CUR_POS, arg, std::move($formal_set), $body); + auto me = state->exprs.add(state->positions, state->exprs.alloc, CUR_POS, arg, $formal_set, $body); $$ = me; SET_DOC_POS(me, @1); } @@ -202,7 +202,7 @@ expr_function { auto arg = state->symbols.create($ID); state->validateFormals($formal_set, CUR_POS, arg); - auto me = state->exprs.add(state->positions, state->exprs.alloc, CUR_POS, arg, std::move($formal_set), $body); + auto me = state->exprs.add(state->positions, state->exprs.alloc, CUR_POS, arg, $formal_set, $body); $$ = me; SET_DOC_POS(me, @1); } @@ -251,7 +251,7 @@ expr_op | expr_op OR expr_op { $$ = state->exprs.add(state->at(@2), $1, $3); } | expr_op IMPL expr_op { $$ = state->exprs.add(state->at(@2), $1, $3); } | expr_op UPDATE expr_op { $$ = state->exprs.add(state->at(@2), $1, $3); } - | expr_op '?' attrpath { $$ = state->exprs.add(state->exprs.alloc, $1, std::move($3)); } + | expr_op '?' attrpath { $$ = state->exprs.add(state->exprs.alloc, $1, $3); } | expr_op '+' expr_op { $$ = state->exprs.add(state->exprs.alloc, state->at(@2), false, {{state->at(@1), $1}, {state->at(@3), $3}}); } | expr_op '-' expr_op { $$ = state->exprs.add(state->at(@2), state->exprs.add(state->s.sub), {$1, $3}); } @@ -272,9 +272,9 @@ expr_app expr_select : expr_simple '.' attrpath - { $$ = state->exprs.add(state->exprs.alloc, CUR_POS, $1, std::move($3), nullptr); } + { $$ = state->exprs.add(state->exprs.alloc, CUR_POS, $1, $3, nullptr); } | expr_simple '.' attrpath OR_KW expr_select - { $$ = state->exprs.add(state->exprs.alloc, CUR_POS, $1, std::move($3), $5); $5->warnIfCursedOr(state->symbols, state->positions); } + { $$ = state->exprs.add(state->exprs.alloc, CUR_POS, $1, $3, $5); $5->warnIfCursedOr(state->symbols, state->positions); } | /* Backwards compatibility: because Nixpkgs has a function named ‘or’, allow stuff like ‘map or [...]’. This production is problematic (see https://github.com/NixOS/nix/issues/11118) and will be refactored in the @@ -304,12 +304,12 @@ expr_simple $2); } | IND_STRING_OPEN ind_string_parts IND_STRING_CLOSE { - $$ = state->stripIndentation(CUR_POS, std::move($2)); + $$ = state->stripIndentation(CUR_POS, $2); } | path_start PATH_END | path_start string_parts_interpolated PATH_END { $2.insert($2.begin(), {state->at(@1), $1}); - $$ = state->exprs.add(state->exprs.alloc, CUR_POS, false, std::move($2)); + $$ = state->exprs.add(state->exprs.alloc, CUR_POS, false, $2); } | SPATH { std::string_view path($1.p + 1, $1.l - 2); @@ -338,12 +338,12 @@ expr_simple { $2->pos = CUR_POS; $$ = $2; } | '{' '}' { $$ = state->exprs.add(CUR_POS); } - | '[' list ']' { $$ = state->exprs.add(state->exprs.alloc, std::move($2)); } + | '[' list ']' { $$ = state->exprs.add(state->exprs.alloc, $2); } ; string_parts : STR { $$ = $1; } - | string_parts_interpolated { $$ = state->exprs.add(state->exprs.alloc, CUR_POS, true, std::move($1)); } + | string_parts_interpolated { $$ = state->exprs.add(state->exprs.alloc, CUR_POS, true, $1); } | { $$ = std::string_view(); } ; From 2d728f0c56c28ac0bcc2bf156c236c21d1f27945 Mon Sep 17 00:00:00 2001 From: Taeer Bar-Yam Date: Thu, 6 Nov 2025 19:53:17 +0100 Subject: [PATCH 197/213] parser.y: get rid of most nix:: prefix --- src/libexpr/parser.y | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index 82c7b964f..df700cab2 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -126,26 +126,26 @@ static Expr * makeCall(Exprs & exprs, PosIdx pos, Expr * fn, Expr * arg) { %define api.value.type variant -%type start expr expr_function expr_if expr_op -%type expr_select expr_simple expr_app -%type expr_pipe_from expr_pipe_into +%type start expr expr_function expr_if expr_op +%type expr_select expr_simple expr_app +%type expr_pipe_from expr_pipe_into %type > list -%type binds binds1 -%type formals formal_set -%type formal -%type > attrpath -%type >> attrs -%type >> string_parts_interpolated -%type >>> ind_string_parts -%type path_start -%type > string_parts string_attr -%type attr -%token ID -%token STR IND_STR -%token INT_LIT -%token FLOAT_LIT -%token PATH HPATH SPATH PATH_END -%token URI +%type binds binds1 +%type formals formal_set +%type formal +%type > attrpath +%type >> attrs +%type >> string_parts_interpolated +%type >>> ind_string_parts +%type path_start +%type > string_parts string_attr +%type attr +%token ID +%token STR IND_STR +%token INT_LIT +%token FLOAT_LIT +%token PATH HPATH SPATH PATH_END +%token URI %token IF THEN ELSE ASSERT WITH LET IN_KW REC INHERIT EQ NEQ AND OR IMPL OR_KW %token PIPE_FROM PIPE_INTO /* <| and |> */ %token DOLLAR_CURLY /* == ${ */ @@ -425,7 +425,7 @@ binds1 if (!$accum->inheritFromExprs) $accum->inheritFromExprs = std::make_unique>(); $accum->inheritFromExprs->push_back($expr); - auto from = new nix::ExprInheritFrom(state->at(@expr), $accum->inheritFromExprs->size() - 1); + auto from = state->exprs.add(state->at(@expr), $accum->inheritFromExprs->size() - 1); for (auto & [i, iPos] : $attrs) { if ($accum->attrs.find(i.symbol) != $accum->attrs.end()) state->dupAttr(i.symbol, iPos, $accum->attrs[i.symbol].pos); From 90ba96a3d610566124529516d71f84f2a8c26ec0 Mon Sep 17 00:00:00 2001 From: Taeer Bar-Yam Date: Mon, 10 Nov 2025 21:33:20 +0100 Subject: [PATCH 198/213] libexpr: use std::span rather than const std::vector & --- src/libexpr/include/nix/expr/nixexpr.hh | 34 +++++++++++++++++--- src/libexpr/include/nix/expr/parser-state.hh | 7 ++-- 2 files changed, 32 insertions(+), 9 deletions(-) diff --git a/src/libexpr/include/nix/expr/nixexpr.hh b/src/libexpr/include/nix/expr/nixexpr.hh index 3bf92cc12..673c14cb3 100644 --- a/src/libexpr/include/nix/expr/nixexpr.hh +++ b/src/libexpr/include/nix/expr/nixexpr.hh @@ -289,7 +289,7 @@ struct ExprSelect : Expr std::pmr::polymorphic_allocator & alloc, const PosIdx & pos, Expr * e, - const std::span & attrPath, + std::span attrPath, Expr * def) : pos(pos) , nAttrPath(attrPath.size()) @@ -339,7 +339,7 @@ struct ExprOpHasAttr : Expr Expr * e; std::span attrPath; - ExprOpHasAttr(std::pmr::polymorphic_allocator & alloc, Expr * e, const std::vector & attrPath) + ExprOpHasAttr(std::pmr::polymorphic_allocator & alloc, Expr * e, std::span attrPath) : e(e) , attrPath({alloc.allocate_object(attrPath.size()), attrPath.size()}) { @@ -433,7 +433,7 @@ struct ExprList : Expr { std::span elems; - ExprList(std::pmr::polymorphic_allocator & alloc, const std::vector & exprs) + ExprList(std::pmr::polymorphic_allocator & alloc, std::span exprs) : elems({alloc.allocate_object(exprs.size()), exprs.size()}) { std::ranges::copy(exprs, elems.begin()); @@ -753,7 +753,19 @@ struct ExprConcatStrings : Expr std::pmr::polymorphic_allocator & alloc, const PosIdx & pos, bool forceString, - const std::vector> & es) + std::span> es) + : pos(pos) + , forceString(forceString) + , es({alloc.allocate_object>(es.size()), es.size()}) + { + std::ranges::copy(es, this->es.begin()); + }; + + ExprConcatStrings( + std::pmr::polymorphic_allocator & alloc, + const PosIdx & pos, + bool forceString, + std::initializer_list> es) : pos(pos) , forceString(forceString) , es({alloc.allocate_object>(es.size()), es.size()}) @@ -833,7 +845,19 @@ public: add(std::pmr::polymorphic_allocator & alloc, const PosIdx & pos, bool forceString, - const std::vector> & es) + std::span> es) + requires(std::same_as) + { + return alloc.new_object(alloc, pos, forceString, es); + } + + template + [[gnu::always_inline]] + C * + add(std::pmr::polymorphic_allocator & alloc, + const PosIdx & pos, + bool forceString, + std::initializer_list> es) requires(std::same_as) { return alloc.new_object(alloc, pos, forceString, es); diff --git a/src/libexpr/include/nix/expr/parser-state.hh b/src/libexpr/include/nix/expr/parser-state.hh index 0020d001e..9e1d17b53 100644 --- a/src/libexpr/include/nix/expr/parser-state.hh +++ b/src/libexpr/include/nix/expr/parser-state.hh @@ -96,8 +96,7 @@ struct ParserState ExprAttrs * attrs, AttrPath && attrPath, const ParserLocation & loc, Expr * e, const ParserLocation & exprLoc); void addAttr(ExprAttrs * attrs, AttrPath & attrPath, const Symbol & symbol, ExprAttrs::AttrDef && def); void validateFormals(FormalsBuilder & formals, PosIdx pos = noPos, Symbol arg = {}); - Expr * - stripIndentation(const PosIdx pos, const std::vector>> & es); + Expr * stripIndentation(const PosIdx pos, std::span>> es); PosIdx at(const ParserLocation & loc); }; @@ -239,8 +238,8 @@ inline void ParserState::validateFormals(FormalsBuilder & formals, PosIdx pos, S {.msg = HintFmt("duplicate formal function argument '%1%'", symbols[arg]), .pos = positions[pos]}); } -inline Expr * ParserState::stripIndentation( - const PosIdx pos, const std::vector>> & es) +inline Expr * +ParserState::stripIndentation(const PosIdx pos, std::span>> es) { if (es.empty()) return exprs.add(""_sds); From af1db7774fb0dcc7a35d9c010ca071acb0e43a56 Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 8 Sep 2025 09:18:52 +0200 Subject: [PATCH 199/213] Convert C++ function doc to Doxygen style Otherwise Doxygen won't pick it up --- src/nix/develop.cc | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/nix/develop.cc b/src/nix/develop.cc index d23dce10b..5a37f4a13 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -227,11 +227,13 @@ const static std::string getEnvSh = #include "get-env.sh.gen.hh" ; -/* Given an existing derivation, return the shell environment as - initialised by stdenv's setup script. We do this by building a - modified derivation with the same dependencies and nearly the same - initial environment variables, that just writes the resulting - environment to a file and exits. */ +/** + * Given an existing derivation, return the shell environment as + * initialised by stdenv's setup script. We do this by building a + * modified derivation with the same dependencies and nearly the same + * initial environment variables, that just writes the resulting + * environment to a file and exits. + */ static StorePath getDerivationEnvironment(ref store, ref evalStore, const StorePath & drvPath) { auto drv = evalStore->derivationFromPath(drvPath); From d1f750a71446e6107b8c1cadbef1af856524fd9d Mon Sep 17 00:00:00 2001 From: Eelco Dolstra Date: Mon, 8 Sep 2025 09:39:44 +0200 Subject: [PATCH 200/213] nix develop: `getBuildEnvironment` return `StorePath` --- src/nix/develop.cc | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/src/nix/develop.cc b/src/nix/develop.cc index 5a37f4a13..1eff735da 100644 --- a/src/nix/develop.cc +++ b/src/nix/develop.cc @@ -304,6 +304,8 @@ static StorePath getDerivationEnvironment(ref store, ref evalStore bmNormal, evalStore); + // `get-env.sh` will write its JSON output to an arbitrary output + // path, so return the first non-empty output path. for (auto & [_0, optPath] : evalStore->queryPartialDerivationOutputMap(shellDrvPath)) { assert(optPath); auto accessor = evalStore->requireStoreObjectAccessor(*optPath); @@ -497,19 +499,18 @@ struct Common : InstallableCommand, MixProfile } } - std::pair getBuildEnvironment(ref store, ref installable) + std::pair getBuildEnvironment(ref store, ref installable) { auto shellOutPath = getShellOutPath(store, installable); - auto strPath = store->printStorePath(shellOutPath); - updateProfile(shellOutPath); - debug("reading environment file '%s'", strPath); + debug("reading environment file '%s'", store->printStorePath(shellOutPath)); return { BuildEnvironment::parseJSON(store->requireStoreObjectAccessor(shellOutPath)->readFile(CanonPath::root)), - strPath}; + shellOutPath, + }; } }; @@ -636,7 +637,7 @@ struct CmdDevelop : Common, MixEnvironment setEnviron(); // prevent garbage collection until shell exits - setEnv("NIX_GCROOT", gcroot.c_str()); + setEnv("NIX_GCROOT", store->printStorePath(gcroot).c_str()); Path shell = "bash"; bool foundInteractive = false; From 2150d7a754a20cd396f0462cbf08b4c69aab8e96 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 10 Nov 2025 22:00:54 +0000 Subject: [PATCH 201/213] build(deps): bump cachix/install-nix-action from 31.8.2 to 31.8.3 Bumps [cachix/install-nix-action](https://github.com/cachix/install-nix-action) from 31.8.2 to 31.8.3. - [Release notes](https://github.com/cachix/install-nix-action/releases) - [Changelog](https://github.com/cachix/install-nix-action/blob/master/RELEASE.md) - [Commits](https://github.com/cachix/install-nix-action/compare/456688f15bc354bef6d396e4a35f4f89d40bf2b7...7ec16f2c061ab07b235a7245e06ed46fe9a1cab6) --- updated-dependencies: - dependency-name: cachix/install-nix-action dependency-version: 31.8.3 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9a299b765..8c7868228 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -174,7 +174,7 @@ jobs: echo "installer-url=file://$GITHUB_WORKSPACE/out" >> "$GITHUB_OUTPUT" TARBALL_PATH="$(find "$GITHUB_WORKSPACE/out" -name 'nix*.tar.xz' -print | head -n 1)" echo "tarball-path=file://$TARBALL_PATH" >> "$GITHUB_OUTPUT" - - uses: cachix/install-nix-action@456688f15bc354bef6d396e4a35f4f89d40bf2b7 # v31.8.2 + - uses: cachix/install-nix-action@7ec16f2c061ab07b235a7245e06ed46fe9a1cab6 # v31.8.3 if: ${{ !matrix.experimental-installer }} with: install_url: ${{ format('{0}/install', steps.installer-tarball-url.outputs.installer-url) }} From f5390e76e42780a933b1f5d1c2c250bd6a553eee Mon Sep 17 00:00:00 2001 From: John Ericson Date: Sat, 13 Sep 2025 08:43:21 -0400 Subject: [PATCH 202/213] Make `ValidPathInfo`, `NarInfo` JSON instances, but don't yet use in the CLI Make instances for them that share code with `nix path-info`, but do a slightly different format without store paths containing store dirs (matching the other latest JSON formats). Progress on #13570. If we depend on the store dir, our JSON serializers/deserializers take extra arguements, and that interfaces with the likes of various frameworks for associating these with types (e.g. nlohmann in C++, Serde in Rust, and Aeson in Haskell). For now, `nix path-info` still uses the previous format, with store dirs. We may yet decide to "rip of the band-aid", and just switch it over, but that is left as a future PR. --- src/libstore-tests/nar-info.cc | 4 +-- src/libstore-tests/path-info.cc | 4 +-- src/libstore/include/nix/store/nar-info.hh | 36 +++++++++++++++++---- src/libstore/include/nix/store/path-info.hh | 18 +++++++---- src/libstore/nar-info.cc | 31 +++++++++++++----- src/libstore/path-info.cc | 30 +++++++++++++---- src/nix/path-info.cc | 2 +- 7 files changed, 93 insertions(+), 32 deletions(-) diff --git a/src/libstore-tests/nar-info.cc b/src/libstore-tests/nar-info.cc index 41faa9274..1add98053 100644 --- a/src/libstore-tests/nar-info.cc +++ b/src/libstore-tests/nar-info.cc @@ -65,7 +65,7 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) readTest(#STEM, [&](const auto & encoded_) { \ auto encoded = json::parse(encoded_); \ auto expected = makeNarInfo(*store, PURE); \ - NarInfo got = NarInfo::fromJSON(*store, expected.path, encoded); \ + auto got = UnkeyedNarInfo::fromJSON(&*store, encoded); \ ASSERT_EQ(got, expected); \ }); \ } \ @@ -74,7 +74,7 @@ static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) { \ writeTest( \ #STEM, \ - [&]() -> json { return makeNarInfo(*store, PURE).toJSON(*store, PURE); }, \ + [&]() -> json { return makeNarInfo(*store, PURE).toJSON(&*store, PURE); }, \ [](const auto & file) { return json::parse(readFile(file)); }, \ [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ } diff --git a/src/libstore-tests/path-info.cc b/src/libstore-tests/path-info.cc index a40b26149..8c02bf403 100644 --- a/src/libstore-tests/path-info.cc +++ b/src/libstore-tests/path-info.cc @@ -70,7 +70,7 @@ static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo { \ readTest(#STEM, [&](const auto & encoded_) { \ auto encoded = json::parse(encoded_); \ - UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(*store, encoded); \ + UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON(&*store, encoded); \ auto expected = OBJ; \ ASSERT_EQ(got, expected); \ }); \ @@ -80,7 +80,7 @@ static UnkeyedValidPathInfo makeFull(const Store & store, bool includeImpureInfo { \ writeTest( \ #STEM, \ - [&]() -> json { return OBJ.toJSON(*store, PURE); }, \ + [&]() -> json { return OBJ.toJSON(&*store, PURE); }, \ [](const auto & file) { return json::parse(readFile(file)); }, \ [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); \ } diff --git a/src/libstore/include/nix/store/nar-info.hh b/src/libstore/include/nix/store/nar-info.hh index 34606a89b..74ce2b057 100644 --- a/src/libstore/include/nix/store/nar-info.hh +++ b/src/libstore/include/nix/store/nar-info.hh @@ -9,17 +9,42 @@ namespace nix { struct StoreDirConfig; -struct NarInfo : ValidPathInfo +struct UnkeyedNarInfo : virtual UnkeyedValidPathInfo { std::string url; std::string compression; std::optional fileHash; uint64_t fileSize = 0; + UnkeyedNarInfo(UnkeyedValidPathInfo info) + : UnkeyedValidPathInfo(std::move(info)) + { + } + + bool operator==(const UnkeyedNarInfo &) const = default; + // TODO libc++ 16 (used by darwin) missing `std::optional::operator <=>`, can't do yet + // auto operator <=>(const NarInfo &) const = default; + + nlohmann::json toJSON(const StoreDirConfig * store, bool includeImpureInfo) const override; + static UnkeyedNarInfo fromJSON(const StoreDirConfig * store, const nlohmann::json & json); +}; + +/** + * Key and the extra NAR fields + */ +struct NarInfo : ValidPathInfo, UnkeyedNarInfo +{ NarInfo() = delete; NarInfo(ValidPathInfo info) - : ValidPathInfo{std::move(info)} + : UnkeyedValidPathInfo{static_cast(info)} + /* Later copies from `*this` are pointless. The argument is only + there so the constructors can also call + `UnkeyedValidPathInfo`, but this won't happen since the base + class is virtual. Only this counstructor (assuming it is most + derived) will initialize that virtual base class. */ + , ValidPathInfo{info.path, static_cast(*this)} + , UnkeyedNarInfo{static_cast(*this)} { } @@ -37,13 +62,10 @@ struct NarInfo : ValidPathInfo NarInfo(const StoreDirConfig & store, const std::string & s, const std::string & whence); bool operator==(const NarInfo &) const = default; - // TODO libc++ 16 (used by darwin) missing `std::optional::operator <=>`, can't do yet - // auto operator <=>(const NarInfo &) const = default; std::string to_string(const StoreDirConfig & store) const; - - nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo) const override; - static NarInfo fromJSON(const StoreDirConfig & store, const StorePath & path, const nlohmann::json & json); }; } // namespace nix + +JSON_IMPL(nix::UnkeyedNarInfo) diff --git a/src/libstore/include/nix/store/path-info.hh b/src/libstore/include/nix/store/path-info.hh index 0f00a14b7..8f6115b73 100644 --- a/src/libstore/include/nix/store/path-info.hh +++ b/src/libstore/include/nix/store/path-info.hh @@ -117,11 +117,11 @@ struct UnkeyedValidPathInfo * @param includeImpureInfo If true, variable elements such as the * registration time are included. */ - virtual nlohmann::json toJSON(const StoreDirConfig & store, bool includeImpureInfo) const; - static UnkeyedValidPathInfo fromJSON(const StoreDirConfig & store, const nlohmann::json & json); + virtual nlohmann::json toJSON(const StoreDirConfig * store, bool includeImpureInfo) const; + static UnkeyedValidPathInfo fromJSON(const StoreDirConfig * store, const nlohmann::json & json); }; -struct ValidPathInfo : UnkeyedValidPathInfo +struct ValidPathInfo : virtual UnkeyedValidPathInfo { StorePath path; @@ -174,10 +174,14 @@ struct ValidPathInfo : UnkeyedValidPathInfo ValidPathInfo(StorePath && path, UnkeyedValidPathInfo info) : UnkeyedValidPathInfo(info) - , path(std::move(path)) {}; + , path(std::move(path)) + { + } + ValidPathInfo(const StorePath & path, UnkeyedValidPathInfo info) - : UnkeyedValidPathInfo(info) - , path(path) {}; + : ValidPathInfo(StorePath{path}, std::move(info)) + { + } static ValidPathInfo makeFromCA(const StoreDirConfig & store, std::string_view name, ContentAddressWithReferences && ca, Hash narHash); @@ -191,3 +195,5 @@ static_assert(std::is_move_constructible_v); using ValidPathInfos = std::map; } // namespace nix + +JSON_IMPL(nix::UnkeyedValidPathInfo) diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc index 4d4fb7de2..2dbcee325 100644 --- a/src/libstore/nar-info.cc +++ b/src/libstore/nar-info.cc @@ -7,7 +7,9 @@ namespace nix { NarInfo::NarInfo(const StoreDirConfig & store, const std::string & s, const std::string & whence) - : ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack + : UnkeyedValidPathInfo(Hash::dummy) // FIXME: hack + , ValidPathInfo(StorePath::dummy, static_cast(*this)) // FIXME: hack + , UnkeyedNarInfo(static_cast(*this)) { unsigned line = 1; @@ -130,11 +132,11 @@ std::string NarInfo::to_string(const StoreDirConfig & store) const return res; } -nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo) const +nlohmann::json UnkeyedNarInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo) const { using nlohmann::json; - auto jsonObject = ValidPathInfo::toJSON(store, includeImpureInfo); + auto jsonObject = UnkeyedValidPathInfo::toJSON(store, includeImpureInfo); if (includeImpureInfo) { if (!url.empty()) @@ -150,14 +152,11 @@ nlohmann::json NarInfo::toJSON(const StoreDirConfig & store, bool includeImpureI return jsonObject; } -NarInfo NarInfo::fromJSON(const StoreDirConfig & store, const StorePath & path, const nlohmann::json & json) +UnkeyedNarInfo UnkeyedNarInfo::fromJSON(const StoreDirConfig * store, const nlohmann::json & json) { using nlohmann::detail::value_t; - NarInfo res{ValidPathInfo{ - path, - UnkeyedValidPathInfo::fromJSON(store, json), - }}; + UnkeyedNarInfo res{UnkeyedValidPathInfo::fromJSON(store, json)}; auto & obj = getObject(json); @@ -177,3 +176,19 @@ NarInfo NarInfo::fromJSON(const StoreDirConfig & store, const StorePath & path, } } // namespace nix + +namespace nlohmann { + +using namespace nix; + +UnkeyedNarInfo adl_serializer::from_json(const json & json) +{ + return UnkeyedNarInfo::fromJSON(nullptr, json); +} + +void adl_serializer::to_json(json & json, const UnkeyedNarInfo & c) +{ + json = c.toJSON(nullptr, true); +} + +} // namespace nlohmann diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index 811c397a4..d56ba9475 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -149,7 +149,7 @@ ValidPathInfo ValidPathInfo::makeFromCA( return res; } -nlohmann::json UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool includeImpureInfo) const +nlohmann::json UnkeyedValidPathInfo::toJSON(const StoreDirConfig * store, bool includeImpureInfo) const { using nlohmann::json; @@ -163,13 +163,15 @@ nlohmann::json UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool i { auto & jsonRefs = jsonObject["references"] = json::array(); for (auto & ref : references) - jsonRefs.emplace_back(store.printStorePath(ref)); + jsonRefs.emplace_back(store ? static_cast(store->printStorePath(ref)) : static_cast(ref)); } jsonObject["ca"] = ca; if (includeImpureInfo) { - jsonObject["deriver"] = deriver ? (std::optional{store.printStorePath(*deriver)}) : std::nullopt; + jsonObject["deriver"] = deriver ? (store ? static_cast(std::optional{store->printStorePath(*deriver)}) + : static_cast(std::optional{*deriver})) + : static_cast(std::optional{}); jsonObject["registrationTime"] = registrationTime ? (std::optional{registrationTime}) : std::nullopt; @@ -183,7 +185,7 @@ nlohmann::json UnkeyedValidPathInfo::toJSON(const StoreDirConfig & store, bool i return jsonObject; } -UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store, const nlohmann::json & _json) +UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig * store, const nlohmann::json & _json) { UnkeyedValidPathInfo res{ Hash(Hash::dummy), @@ -203,7 +205,7 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store try { auto references = getStringList(valueAt(json, "references")); for (auto & input : references) - res.references.insert(store.parseStorePath(static_cast(input))); + res.references.insert(store ? store->parseStorePath(getString(input)) : static_cast(input)); } catch (Error & e) { e.addTrace({}, "while reading key 'references'"); throw; @@ -218,7 +220,7 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store if (auto * rawDeriver0 = optionalValueAt(json, "deriver")) if (auto * rawDeriver = getNullable(*rawDeriver0)) - res.deriver = store.parseStorePath(getString(*rawDeriver)); + res.deriver = store ? store->parseStorePath(getString(*rawDeriver)) : static_cast(*rawDeriver); if (auto * rawRegistrationTime0 = optionalValueAt(json, "registrationTime")) if (auto * rawRegistrationTime = getNullable(*rawRegistrationTime0)) @@ -234,3 +236,19 @@ UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(const StoreDirConfig & store } } // namespace nix + +namespace nlohmann { + +using namespace nix; + +UnkeyedValidPathInfo adl_serializer::from_json(const json & json) +{ + return UnkeyedValidPathInfo::fromJSON(nullptr, json); +} + +void adl_serializer::to_json(json & json, const UnkeyedValidPathInfo & c) +{ + json = c.toJSON(nullptr, true); +} + +} // namespace nlohmann diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index 697b73e5c..74921126b 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -51,7 +51,7 @@ static json pathInfoToJSON(Store & store, const StorePathSet & storePaths, bool // know the name yet until we've read the NAR info. printedStorePath = store.printStorePath(info->path); - jsonObject = info->toJSON(store, true); + jsonObject = info->toJSON(&store, true); if (showClosureSize) { StorePathSet closure; From 204749270b5c05ae3400441650d18811dd678078 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Thu, 30 Oct 2025 14:47:05 -0400 Subject: [PATCH 203/213] JSON alongside binary proto serialization test data This makes the proto serializer characterisation test data be accompanied by JSON data. This is arguably useful for a reasons: - The JSON data is human-readable while the binary data is not, so it provides some indication of what the test data means beyond the C++ literals. - The JSON data is language-agnostic, and so can be used to quickly rig up tests for implementation in other languages, without having source code literals at all (just go back and forth between the JSON and the binary). - Even though we have no concrete plans to place the binary protocol 1-1 or with JSON, it is still nice to ensure that the JSON serializers and binary protocols have (near) equal coverage over data types, to help ensure we didn't forget a JSON (de)serializer. --- .../include/nix/store/tests/protocol.hh | 52 +++++++++++---- src/libstore-tests/common-protocol.cc | 13 +++- .../data/common-protocol/content-address.json | 26 ++++++++ .../data/common-protocol/drv-output.json | 4 ++ .../optional-content-address.json | 11 ++++ .../common-protocol/optional-store-path.json | 4 ++ .../realisation-with-deps.json | 13 ++++ .../data/common-protocol/realisation.json | 17 +++++ .../data/common-protocol/set.json | 22 +++++++ .../data/common-protocol/store-path.json | 4 ++ .../data/common-protocol/string.json | 7 ++ .../data/common-protocol/vector.json | 22 +++++++ .../data/serve-protocol/build-result-2.2.json | 28 ++++++++ .../data/serve-protocol/build-result-2.3.json | 28 ++++++++ .../data/serve-protocol/build-result-2.6.json | 41 ++++++++++++ .../data/serve-protocol/content-address.json | 26 ++++++++ .../data/serve-protocol/drv-output.json | 4 ++ .../optional-content-address.json | 11 ++++ .../serve-protocol/optional-store-path.json | 4 ++ .../serve-protocol/realisation-with-deps.json | 13 ++++ .../data/serve-protocol/realisation.json | 17 +++++ .../data/serve-protocol/set.json | 22 +++++++ .../data/serve-protocol/store-path.json | 4 ++ .../data/serve-protocol/string.json | 7 ++ .../unkeyed-valid-path-info-2.3.json | 34 ++++++++++ .../unkeyed-valid-path-info-2.4.json | 47 +++++++++++++ .../data/serve-protocol/vector.json | 22 +++++++ .../data/worker-protocol/build-mode.json | 5 ++ .../worker-protocol/build-result-1.27.json | 28 ++++++++ .../worker-protocol/build-result-1.28.json | 41 ++++++++++++ .../worker-protocol/build-result-1.29.json | 41 ++++++++++++ .../worker-protocol/build-result-1.37.json | 43 ++++++++++++ .../data/worker-protocol/content-address.json | 26 ++++++++ .../worker-protocol/derived-path-1.29.json | 16 +++++ .../worker-protocol/derived-path-1.30.json | 17 +++++ .../data/worker-protocol/drv-output.json | 4 ++ .../keyed-build-result-1.29.json | 27 ++++++++ .../optional-content-address.json | 11 ++++ .../worker-protocol/optional-store-path.json | 4 ++ .../optional-trusted-flag.json | 5 ++ .../realisation-with-deps.json | 13 ++++ .../data/worker-protocol/realisation.json | 17 +++++ .../data/worker-protocol/set.json | 22 +++++++ .../data/worker-protocol/store-path.json | 4 ++ .../data/worker-protocol/string.json | 7 ++ .../unkeyed-valid-path-info-1.15.json | 34 ++++++++++ .../worker-protocol/valid-path-info-1.15.json | 37 +++++++++++ .../worker-protocol/valid-path-info-1.16.json | 66 +++++++++++++++++++ .../data/worker-protocol/vector.json | 22 +++++++ src/libstore-tests/serve-protocol.cc | 17 ++--- src/libstore-tests/worker-protocol.cc | 15 +++-- src/libstore/build-result.cc | 16 +++++ .../include/nix/store/build-result.hh | 1 + src/libstore/include/nix/store/path-info.hh | 1 + src/libstore/include/nix/store/realisation.hh | 1 + src/libstore/include/nix/store/store-api.hh | 6 ++ src/libstore/misc.cc | 17 +++++ src/libstore/path-info.cc | 16 +++++ src/libstore/realisation.cc | 14 +++- .../nix/util/tests/json-characterization.hh | 42 ++++++++---- 60 files changed, 1097 insertions(+), 42 deletions(-) create mode 100644 src/libstore-tests/data/common-protocol/content-address.json create mode 100644 src/libstore-tests/data/common-protocol/drv-output.json create mode 100644 src/libstore-tests/data/common-protocol/optional-content-address.json create mode 100644 src/libstore-tests/data/common-protocol/optional-store-path.json create mode 100644 src/libstore-tests/data/common-protocol/realisation-with-deps.json create mode 100644 src/libstore-tests/data/common-protocol/realisation.json create mode 100644 src/libstore-tests/data/common-protocol/set.json create mode 100644 src/libstore-tests/data/common-protocol/store-path.json create mode 100644 src/libstore-tests/data/common-protocol/string.json create mode 100644 src/libstore-tests/data/common-protocol/vector.json create mode 100644 src/libstore-tests/data/serve-protocol/build-result-2.2.json create mode 100644 src/libstore-tests/data/serve-protocol/build-result-2.3.json create mode 100644 src/libstore-tests/data/serve-protocol/build-result-2.6.json create mode 100644 src/libstore-tests/data/serve-protocol/content-address.json create mode 100644 src/libstore-tests/data/serve-protocol/drv-output.json create mode 100644 src/libstore-tests/data/serve-protocol/optional-content-address.json create mode 100644 src/libstore-tests/data/serve-protocol/optional-store-path.json create mode 100644 src/libstore-tests/data/serve-protocol/realisation-with-deps.json create mode 100644 src/libstore-tests/data/serve-protocol/realisation.json create mode 100644 src/libstore-tests/data/serve-protocol/set.json create mode 100644 src/libstore-tests/data/serve-protocol/store-path.json create mode 100644 src/libstore-tests/data/serve-protocol/string.json create mode 100644 src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.3.json create mode 100644 src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.4.json create mode 100644 src/libstore-tests/data/serve-protocol/vector.json create mode 100644 src/libstore-tests/data/worker-protocol/build-mode.json create mode 100644 src/libstore-tests/data/worker-protocol/build-result-1.27.json create mode 100644 src/libstore-tests/data/worker-protocol/build-result-1.28.json create mode 100644 src/libstore-tests/data/worker-protocol/build-result-1.29.json create mode 100644 src/libstore-tests/data/worker-protocol/build-result-1.37.json create mode 100644 src/libstore-tests/data/worker-protocol/content-address.json create mode 100644 src/libstore-tests/data/worker-protocol/derived-path-1.29.json create mode 100644 src/libstore-tests/data/worker-protocol/derived-path-1.30.json create mode 100644 src/libstore-tests/data/worker-protocol/drv-output.json create mode 100644 src/libstore-tests/data/worker-protocol/keyed-build-result-1.29.json create mode 100644 src/libstore-tests/data/worker-protocol/optional-content-address.json create mode 100644 src/libstore-tests/data/worker-protocol/optional-store-path.json create mode 100644 src/libstore-tests/data/worker-protocol/optional-trusted-flag.json create mode 100644 src/libstore-tests/data/worker-protocol/realisation-with-deps.json create mode 100644 src/libstore-tests/data/worker-protocol/realisation.json create mode 100644 src/libstore-tests/data/worker-protocol/set.json create mode 100644 src/libstore-tests/data/worker-protocol/store-path.json create mode 100644 src/libstore-tests/data/worker-protocol/string.json create mode 100644 src/libstore-tests/data/worker-protocol/unkeyed-valid-path-info-1.15.json create mode 100644 src/libstore-tests/data/worker-protocol/valid-path-info-1.15.json create mode 100644 src/libstore-tests/data/worker-protocol/valid-path-info-1.16.json create mode 100644 src/libstore-tests/data/worker-protocol/vector.json diff --git a/src/libstore-test-support/include/nix/store/tests/protocol.hh b/src/libstore-test-support/include/nix/store/tests/protocol.hh index 5b57c6585..0f774df0e 100644 --- a/src/libstore-test-support/include/nix/store/tests/protocol.hh +++ b/src/libstore-test-support/include/nix/store/tests/protocol.hh @@ -6,6 +6,7 @@ #include "nix/store/tests/libstore.hh" #include "nix/util/tests/characterization.hh" +#include "nix/util/tests/json-characterization.hh" namespace nix { @@ -16,12 +17,30 @@ class ProtoTest : public CharacterizationTest std::filesystem::path goldenMaster(std::string_view testStem) const override { - return unitTestData / (std::string{testStem + ".bin"}); + return unitTestData / testStem; } public: Path storeDir = "/nix/store"; StoreDirConfig store{storeDir}; + + /** + * Golden test for `T` JSON reading + */ + template + void readJsonTest(PathView testStem, const T & expected) + { + nix::readJsonTest(*this, testStem, expected); + } + + /** + * Golden test for `T` JSON write + */ + template + void writeJsonTest(PathView testStem, const T & decoded) + { + nix::writeJsonTest(*this, testStem, decoded); + } }; template @@ -34,7 +53,7 @@ public: template void readProtoTest(PathView testStem, typename Proto::Version version, T expected) { - CharacterizationTest::readTest(testStem, [&](const auto & encoded) { + CharacterizationTest::readTest(std::string{testStem + ".bin"}, [&](const auto & encoded) { T got = ({ StringSource from{encoded}; Proto::template Serialise::read( @@ -55,7 +74,7 @@ public: template void writeProtoTest(PathView testStem, typename Proto::Version version, const T & decoded) { - CharacterizationTest::writeTest(testStem, [&]() { + CharacterizationTest::writeTest(std::string{testStem + ".bin"}, [&]() { StringSink to; Proto::template Serialise::write( this->store, @@ -69,14 +88,25 @@ public: } }; -#define VERSIONED_CHARACTERIZATION_TEST(FIXTURE, NAME, STEM, VERSION, VALUE) \ - TEST_F(FIXTURE, NAME##_read) \ - { \ - readProtoTest(STEM, VERSION, VALUE); \ - } \ - TEST_F(FIXTURE, NAME##_write) \ - { \ - writeProtoTest(STEM, VERSION, VALUE); \ +#define VERSIONED_CHARACTERIZATION_TEST_NO_JSON(FIXTURE, NAME, STEM, VERSION, VALUE) \ + TEST_F(FIXTURE, NAME##_read) \ + { \ + readProtoTest(STEM, VERSION, VALUE); \ + } \ + TEST_F(FIXTURE, NAME##_write) \ + { \ + writeProtoTest(STEM, VERSION, VALUE); \ + } + +#define VERSIONED_CHARACTERIZATION_TEST(FIXTURE, NAME, STEM, VERSION, VALUE) \ + VERSIONED_CHARACTERIZATION_TEST_NO_JSON(FIXTURE, NAME, STEM, VERSION, VALUE) \ + TEST_F(FIXTURE, NAME##_json_read) \ + { \ + readJsonTest(STEM, VALUE); \ + } \ + TEST_F(FIXTURE, NAME##_json_write) \ + { \ + writeJsonTest(STEM, VALUE); \ } } // namespace nix diff --git a/src/libstore-tests/common-protocol.cc b/src/libstore-tests/common-protocol.cc index 7c40e8cdb..fa676eb7f 100644 --- a/src/libstore-tests/common-protocol.cc +++ b/src/libstore-tests/common-protocol.cc @@ -3,6 +3,7 @@ #include #include +#include "nix/util/json-utils.hh" #include "nix/store/common-protocol.hh" #include "nix/store/common-protocol-impl.hh" #include "nix/store/build-result.hh" @@ -22,7 +23,7 @@ public: template void readProtoTest(PathView testStem, const T & expected) { - CharacterizationTest::readTest(testStem, [&](const auto & encoded) { + CharacterizationTest::readTest(std::string{testStem + ".bin"}, [&](const auto & encoded) { T got = ({ StringSource from{encoded}; CommonProto::Serialise::read(store, CommonProto::ReadConn{.from = from}); @@ -38,7 +39,7 @@ public: template void writeProtoTest(PathView testStem, const T & decoded) { - CharacterizationTest::writeTest(testStem, [&]() -> std::string { + CharacterizationTest::writeTest(std::string{testStem + ".bin"}, [&]() -> std::string { StringSink to; CommonProto::Serialise::write(store, CommonProto::WriteConn{.to = to}, decoded); return to.s; @@ -54,6 +55,14 @@ public: TEST_F(CommonProtoTest, NAME##_write) \ { \ writeProtoTest(STEM, VALUE); \ + } \ + TEST_F(CommonProtoTest, NAME##_json_read) \ + { \ + readJsonTest(STEM, VALUE); \ + } \ + TEST_F(CommonProtoTest, NAME##_json_write) \ + { \ + writeJsonTest(STEM, VALUE); \ } CHARACTERIZATION_TEST( diff --git a/src/libstore-tests/data/common-protocol/content-address.json b/src/libstore-tests/data/common-protocol/content-address.json new file mode 100644 index 000000000..9a0d57154 --- /dev/null +++ b/src/libstore-tests/data/common-protocol/content-address.json @@ -0,0 +1,26 @@ +[ + { + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "+Xc9Ll6mcPltwaewrk/BAQ56Y3G5T//wzhKUc0zrYu0=" + }, + "method": "text" + }, + { + "hash": { + "algorithm": "sha1", + "format": "base64", + "hash": "gGemBoenViNZM3hiwqns/Fgzqwo=" + }, + "method": "flat" + }, + { + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=" + }, + "method": "nar" + } +] diff --git a/src/libstore-tests/data/common-protocol/drv-output.json b/src/libstore-tests/data/common-protocol/drv-output.json new file mode 100644 index 000000000..2668d70c9 --- /dev/null +++ b/src/libstore-tests/data/common-protocol/drv-output.json @@ -0,0 +1,4 @@ +[ + "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!quux" +] diff --git a/src/libstore-tests/data/common-protocol/optional-content-address.json b/src/libstore-tests/data/common-protocol/optional-content-address.json new file mode 100644 index 000000000..6cdaa59a5 --- /dev/null +++ b/src/libstore-tests/data/common-protocol/optional-content-address.json @@ -0,0 +1,11 @@ +[ + null, + { + "hash": { + "algorithm": "sha1", + "format": "base64", + "hash": "gGemBoenViNZM3hiwqns/Fgzqwo=" + }, + "method": "flat" + } +] diff --git a/src/libstore-tests/data/common-protocol/optional-store-path.json b/src/libstore-tests/data/common-protocol/optional-store-path.json new file mode 100644 index 000000000..58519a4d2 --- /dev/null +++ b/src/libstore-tests/data/common-protocol/optional-store-path.json @@ -0,0 +1,4 @@ +[ + null, + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" +] diff --git a/src/libstore-tests/data/common-protocol/realisation-with-deps.json b/src/libstore-tests/data/common-protocol/realisation-with-deps.json new file mode 100644 index 000000000..77148d14c --- /dev/null +++ b/src/libstore-tests/data/common-protocol/realisation-with-deps.json @@ -0,0 +1,13 @@ +[ + { + "dependentRealisations": { + "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!quux": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" + }, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [ + "asdf", + "qwer" + ] + } +] diff --git a/src/libstore-tests/data/common-protocol/realisation.json b/src/libstore-tests/data/common-protocol/realisation.json new file mode 100644 index 000000000..f9ff09dbb --- /dev/null +++ b/src/libstore-tests/data/common-protocol/realisation.json @@ -0,0 +1,17 @@ +[ + { + "dependentRealisations": {}, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + }, + { + "dependentRealisations": {}, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [ + "asdf", + "qwer" + ] + } +] diff --git a/src/libstore-tests/data/common-protocol/set.json b/src/libstore-tests/data/common-protocol/set.json new file mode 100644 index 000000000..acd123082 --- /dev/null +++ b/src/libstore-tests/data/common-protocol/set.json @@ -0,0 +1,22 @@ +[ + [], + [ + "" + ], + [ + "", + "bar", + "foo" + ], + [ + [], + [ + "" + ], + [ + "", + "1", + "2" + ] + ] +] diff --git a/src/libstore-tests/data/common-protocol/store-path.json b/src/libstore-tests/data/common-protocol/store-path.json new file mode 100644 index 000000000..16459245b --- /dev/null +++ b/src/libstore-tests/data/common-protocol/store-path.json @@ -0,0 +1,4 @@ +[ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" +] diff --git a/src/libstore-tests/data/common-protocol/string.json b/src/libstore-tests/data/common-protocol/string.json new file mode 100644 index 000000000..d3db4f3b4 --- /dev/null +++ b/src/libstore-tests/data/common-protocol/string.json @@ -0,0 +1,7 @@ +[ + "", + "hi", + "white rabbit", + "大白兔", + "oh no " +] diff --git a/src/libstore-tests/data/common-protocol/vector.json b/src/libstore-tests/data/common-protocol/vector.json new file mode 100644 index 000000000..2b8cc1b3a --- /dev/null +++ b/src/libstore-tests/data/common-protocol/vector.json @@ -0,0 +1,22 @@ +[ + [], + [ + "" + ], + [ + "", + "foo", + "bar" + ], + [ + [], + [ + "" + ], + [ + "", + "1", + "2" + ] + ] +] diff --git a/src/libstore-tests/data/serve-protocol/build-result-2.2.json b/src/libstore-tests/data/serve-protocol/build-result-2.2.json new file mode 100644 index 000000000..029bcb5a8 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/build-result-2.2.json @@ -0,0 +1,28 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "NotDeterministic", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "builtOutputs": {}, + "startTime": 0, + "status": "Built", + "stopTime": 0, + "success": true, + "timesBuilt": 0 + } +] diff --git a/src/libstore-tests/data/serve-protocol/build-result-2.3.json b/src/libstore-tests/data/serve-protocol/build-result-2.3.json new file mode 100644 index 000000000..be14b0947 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/build-result-2.3.json @@ -0,0 +1,28 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": true, + "startTime": 30, + "status": "NotDeterministic", + "stopTime": 50, + "success": false, + "timesBuilt": 3 + }, + { + "builtOutputs": {}, + "startTime": 30, + "status": "Built", + "stopTime": 50, + "success": true, + "timesBuilt": 0 + } +] diff --git a/src/libstore-tests/data/serve-protocol/build-result-2.6.json b/src/libstore-tests/data/serve-protocol/build-result-2.6.json new file mode 100644 index 000000000..30a8e82e9 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/build-result-2.6.json @@ -0,0 +1,41 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": true, + "startTime": 30, + "status": "NotDeterministic", + "stopTime": 50, + "success": false, + "timesBuilt": 3 + }, + { + "builtOutputs": { + "bar": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!bar", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "signatures": [] + }, + "foo": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!foo", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + } + }, + "startTime": 30, + "status": "Built", + "stopTime": 50, + "success": true, + "timesBuilt": 1 + } +] diff --git a/src/libstore-tests/data/serve-protocol/content-address.json b/src/libstore-tests/data/serve-protocol/content-address.json new file mode 100644 index 000000000..9a0d57154 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/content-address.json @@ -0,0 +1,26 @@ +[ + { + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "+Xc9Ll6mcPltwaewrk/BAQ56Y3G5T//wzhKUc0zrYu0=" + }, + "method": "text" + }, + { + "hash": { + "algorithm": "sha1", + "format": "base64", + "hash": "gGemBoenViNZM3hiwqns/Fgzqwo=" + }, + "method": "flat" + }, + { + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=" + }, + "method": "nar" + } +] diff --git a/src/libstore-tests/data/serve-protocol/drv-output.json b/src/libstore-tests/data/serve-protocol/drv-output.json new file mode 100644 index 000000000..2668d70c9 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/drv-output.json @@ -0,0 +1,4 @@ +[ + "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!quux" +] diff --git a/src/libstore-tests/data/serve-protocol/optional-content-address.json b/src/libstore-tests/data/serve-protocol/optional-content-address.json new file mode 100644 index 000000000..6cdaa59a5 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/optional-content-address.json @@ -0,0 +1,11 @@ +[ + null, + { + "hash": { + "algorithm": "sha1", + "format": "base64", + "hash": "gGemBoenViNZM3hiwqns/Fgzqwo=" + }, + "method": "flat" + } +] diff --git a/src/libstore-tests/data/serve-protocol/optional-store-path.json b/src/libstore-tests/data/serve-protocol/optional-store-path.json new file mode 100644 index 000000000..58519a4d2 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/optional-store-path.json @@ -0,0 +1,4 @@ +[ + null, + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" +] diff --git a/src/libstore-tests/data/serve-protocol/realisation-with-deps.json b/src/libstore-tests/data/serve-protocol/realisation-with-deps.json new file mode 100644 index 000000000..77148d14c --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/realisation-with-deps.json @@ -0,0 +1,13 @@ +[ + { + "dependentRealisations": { + "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!quux": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" + }, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [ + "asdf", + "qwer" + ] + } +] diff --git a/src/libstore-tests/data/serve-protocol/realisation.json b/src/libstore-tests/data/serve-protocol/realisation.json new file mode 100644 index 000000000..f9ff09dbb --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/realisation.json @@ -0,0 +1,17 @@ +[ + { + "dependentRealisations": {}, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + }, + { + "dependentRealisations": {}, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [ + "asdf", + "qwer" + ] + } +] diff --git a/src/libstore-tests/data/serve-protocol/set.json b/src/libstore-tests/data/serve-protocol/set.json new file mode 100644 index 000000000..acd123082 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/set.json @@ -0,0 +1,22 @@ +[ + [], + [ + "" + ], + [ + "", + "bar", + "foo" + ], + [ + [], + [ + "" + ], + [ + "", + "1", + "2" + ] + ] +] diff --git a/src/libstore-tests/data/serve-protocol/store-path.json b/src/libstore-tests/data/serve-protocol/store-path.json new file mode 100644 index 000000000..16459245b --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/store-path.json @@ -0,0 +1,4 @@ +[ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" +] diff --git a/src/libstore-tests/data/serve-protocol/string.json b/src/libstore-tests/data/serve-protocol/string.json new file mode 100644 index 000000000..d3db4f3b4 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/string.json @@ -0,0 +1,7 @@ +[ + "", + "hi", + "white rabbit", + "大白兔", + "oh no " +] diff --git a/src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.3.json b/src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.3.json new file mode 100644 index 000000000..b8d914380 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.3.json @@ -0,0 +1,34 @@ +[ + { + "ca": null, + "deriver": null, + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=" + }, + "narSize": 34878, + "references": [], + "registrationTime": null, + "signatures": [], + "ultimate": false, + "version": 2 + }, + { + "ca": null, + "deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=" + }, + "narSize": 34878, + "references": [ + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv" + ], + "registrationTime": null, + "signatures": [], + "ultimate": false, + "version": 2 + } +] diff --git a/src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.4.json b/src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.4.json new file mode 100644 index 000000000..725880590 --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/unkeyed-valid-path-info-2.4.json @@ -0,0 +1,47 @@ +[ + { + "ca": null, + "deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, + "narSize": 34878, + "references": [ + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv" + ], + "registrationTime": null, + "signatures": [], + "ultimate": false, + "version": 2 + }, + { + "ca": { + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=" + }, + "method": "nar" + }, + "deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, + "narSize": 34878, + "references": [ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "n5wkd9frr45pa74if5gpz9j7mifg27fh-foo" + ], + "registrationTime": null, + "signatures": [ + "fake-sig-1", + "fake-sig-2" + ], + "ultimate": false, + "version": 2 + } +] diff --git a/src/libstore-tests/data/serve-protocol/vector.json b/src/libstore-tests/data/serve-protocol/vector.json new file mode 100644 index 000000000..2b8cc1b3a --- /dev/null +++ b/src/libstore-tests/data/serve-protocol/vector.json @@ -0,0 +1,22 @@ +[ + [], + [ + "" + ], + [ + "", + "foo", + "bar" + ], + [ + [], + [ + "" + ], + [ + "", + "1", + "2" + ] + ] +] diff --git a/src/libstore-tests/data/worker-protocol/build-mode.json b/src/libstore-tests/data/worker-protocol/build-mode.json new file mode 100644 index 000000000..2ef158f87 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/build-mode.json @@ -0,0 +1,5 @@ +[ + 0, + 1, + 2 +] diff --git a/src/libstore-tests/data/worker-protocol/build-result-1.27.json b/src/libstore-tests/data/worker-protocol/build-result-1.27.json new file mode 100644 index 000000000..029bcb5a8 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/build-result-1.27.json @@ -0,0 +1,28 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "NotDeterministic", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "builtOutputs": {}, + "startTime": 0, + "status": "Built", + "stopTime": 0, + "success": true, + "timesBuilt": 0 + } +] diff --git a/src/libstore-tests/data/worker-protocol/build-result-1.28.json b/src/libstore-tests/data/worker-protocol/build-result-1.28.json new file mode 100644 index 000000000..d02845b7f --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/build-result-1.28.json @@ -0,0 +1,41 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "NotDeterministic", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "builtOutputs": { + "bar": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!bar", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "signatures": [] + }, + "foo": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!foo", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + } + }, + "startTime": 0, + "status": "Built", + "stopTime": 0, + "success": true, + "timesBuilt": 0 + } +] diff --git a/src/libstore-tests/data/worker-protocol/build-result-1.29.json b/src/libstore-tests/data/worker-protocol/build-result-1.29.json new file mode 100644 index 000000000..30a8e82e9 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/build-result-1.29.json @@ -0,0 +1,41 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": true, + "startTime": 30, + "status": "NotDeterministic", + "stopTime": 50, + "success": false, + "timesBuilt": 3 + }, + { + "builtOutputs": { + "bar": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!bar", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "signatures": [] + }, + "foo": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!foo", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + } + }, + "startTime": 30, + "status": "Built", + "stopTime": 50, + "success": true, + "timesBuilt": 1 + } +] diff --git a/src/libstore-tests/data/worker-protocol/build-result-1.37.json b/src/libstore-tests/data/worker-protocol/build-result-1.37.json new file mode 100644 index 000000000..61cddd2ca --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/build-result-1.37.json @@ -0,0 +1,43 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": true, + "startTime": 30, + "status": "NotDeterministic", + "stopTime": 50, + "success": false, + "timesBuilt": 3 + }, + { + "builtOutputs": { + "bar": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!bar", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "signatures": [] + }, + "foo": { + "dependentRealisations": {}, + "id": "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!foo", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + } + }, + "cpuSystem": 604000000, + "cpuUser": 500000000, + "startTime": 30, + "status": "Built", + "stopTime": 50, + "success": true, + "timesBuilt": 1 + } +] diff --git a/src/libstore-tests/data/worker-protocol/content-address.json b/src/libstore-tests/data/worker-protocol/content-address.json new file mode 100644 index 000000000..9a0d57154 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/content-address.json @@ -0,0 +1,26 @@ +[ + { + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "+Xc9Ll6mcPltwaewrk/BAQ56Y3G5T//wzhKUc0zrYu0=" + }, + "method": "text" + }, + { + "hash": { + "algorithm": "sha1", + "format": "base64", + "hash": "gGemBoenViNZM3hiwqns/Fgzqwo=" + }, + "method": "flat" + }, + { + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=" + }, + "method": "nar" + } +] diff --git a/src/libstore-tests/data/worker-protocol/derived-path-1.29.json b/src/libstore-tests/data/worker-protocol/derived-path-1.29.json new file mode 100644 index 000000000..f0efe4a35 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/derived-path-1.29.json @@ -0,0 +1,16 @@ +[ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + { + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "outputs": [ + "*" + ] + }, + { + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "outputs": [ + "x", + "y" + ] + } +] diff --git a/src/libstore-tests/data/worker-protocol/derived-path-1.30.json b/src/libstore-tests/data/worker-protocol/derived-path-1.30.json new file mode 100644 index 000000000..7a67e4761 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/derived-path-1.30.json @@ -0,0 +1,17 @@ +[ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo.drv", + { + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "outputs": [ + "*" + ] + }, + { + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "outputs": [ + "x", + "y" + ] + } +] diff --git a/src/libstore-tests/data/worker-protocol/drv-output.json b/src/libstore-tests/data/worker-protocol/drv-output.json new file mode 100644 index 000000000..2668d70c9 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/drv-output.json @@ -0,0 +1,4 @@ +[ + "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!quux" +] diff --git a/src/libstore-tests/data/worker-protocol/keyed-build-result-1.29.json b/src/libstore-tests/data/worker-protocol/keyed-build-result-1.29.json new file mode 100644 index 000000000..c15d47aa3 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/keyed-build-result-1.29.json @@ -0,0 +1,27 @@ +[ + { + "errorMsg": "no idea why", + "isNonDeterministic": false, + "path": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-xxx", + "startTime": 0, + "status": "OutputRejected", + "stopTime": 0, + "success": false, + "timesBuilt": 0 + }, + { + "errorMsg": "no idea why", + "isNonDeterministic": true, + "path": { + "drvPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "outputs": [ + "out" + ] + }, + "startTime": 30, + "status": "NotDeterministic", + "stopTime": 50, + "success": false, + "timesBuilt": 3 + } +] diff --git a/src/libstore-tests/data/worker-protocol/optional-content-address.json b/src/libstore-tests/data/worker-protocol/optional-content-address.json new file mode 100644 index 000000000..6cdaa59a5 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/optional-content-address.json @@ -0,0 +1,11 @@ +[ + null, + { + "hash": { + "algorithm": "sha1", + "format": "base64", + "hash": "gGemBoenViNZM3hiwqns/Fgzqwo=" + }, + "method": "flat" + } +] diff --git a/src/libstore-tests/data/worker-protocol/optional-store-path.json b/src/libstore-tests/data/worker-protocol/optional-store-path.json new file mode 100644 index 000000000..58519a4d2 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/optional-store-path.json @@ -0,0 +1,4 @@ +[ + null, + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" +] diff --git a/src/libstore-tests/data/worker-protocol/optional-trusted-flag.json b/src/libstore-tests/data/worker-protocol/optional-trusted-flag.json new file mode 100644 index 000000000..2f3c092f8 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/optional-trusted-flag.json @@ -0,0 +1,5 @@ +[ + null, + true, + false +] diff --git a/src/libstore-tests/data/worker-protocol/realisation-with-deps.json b/src/libstore-tests/data/worker-protocol/realisation-with-deps.json new file mode 100644 index 000000000..77148d14c --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/realisation-with-deps.json @@ -0,0 +1,13 @@ +[ + { + "dependentRealisations": { + "sha256:6f869f9ea2823bda165e06076fd0de4366dead2c0e8d2dbbad277d4f15c373f5!quux": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo" + }, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [ + "asdf", + "qwer" + ] + } +] diff --git a/src/libstore-tests/data/worker-protocol/realisation.json b/src/libstore-tests/data/worker-protocol/realisation.json new file mode 100644 index 000000000..f9ff09dbb --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/realisation.json @@ -0,0 +1,17 @@ +[ + { + "dependentRealisations": {}, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [] + }, + { + "dependentRealisations": {}, + "id": "sha256:15e3c560894cbb27085cf65b5a2ecb18488c999497f4531b6907a7581ce6d527!baz", + "outPath": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "signatures": [ + "asdf", + "qwer" + ] + } +] diff --git a/src/libstore-tests/data/worker-protocol/set.json b/src/libstore-tests/data/worker-protocol/set.json new file mode 100644 index 000000000..acd123082 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/set.json @@ -0,0 +1,22 @@ +[ + [], + [ + "" + ], + [ + "", + "bar", + "foo" + ], + [ + [], + [ + "" + ], + [ + "", + "1", + "2" + ] + ] +] diff --git a/src/libstore-tests/data/worker-protocol/store-path.json b/src/libstore-tests/data/worker-protocol/store-path.json new file mode 100644 index 000000000..16459245b --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/store-path.json @@ -0,0 +1,4 @@ +[ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo", + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-foo-bar" +] diff --git a/src/libstore-tests/data/worker-protocol/string.json b/src/libstore-tests/data/worker-protocol/string.json new file mode 100644 index 000000000..d3db4f3b4 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/string.json @@ -0,0 +1,7 @@ +[ + "", + "hi", + "white rabbit", + "大白兔", + "oh no " +] diff --git a/src/libstore-tests/data/worker-protocol/unkeyed-valid-path-info-1.15.json b/src/libstore-tests/data/worker-protocol/unkeyed-valid-path-info-1.15.json new file mode 100644 index 000000000..0d78d3875 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/unkeyed-valid-path-info-1.15.json @@ -0,0 +1,34 @@ +[ + { + "ca": null, + "deriver": null, + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, + "narSize": 34878, + "references": [], + "registrationTime": 23423, + "signatures": [], + "ultimate": false, + "version": 2 + }, + { + "ca": null, + "deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, + "narSize": 34878, + "references": [ + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo.drv" + ], + "registrationTime": 23423, + "signatures": [], + "ultimate": false, + "version": 2 + } +] diff --git a/src/libstore-tests/data/worker-protocol/valid-path-info-1.15.json b/src/libstore-tests/data/worker-protocol/valid-path-info-1.15.json new file mode 100644 index 000000000..6d153ee1c --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/valid-path-info-1.15.json @@ -0,0 +1,37 @@ +[ + { + "ca": null, + "deriver": null, + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, + "narSize": 34878, + "path": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "references": [], + "registrationTime": 23423, + "signatures": [], + "ultimate": false, + "version": 2 + }, + { + "ca": null, + "deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, + "narSize": 34878, + "path": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "references": [ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo" + ], + "registrationTime": 23423, + "signatures": [], + "ultimate": false, + "version": 2 + } +] diff --git a/src/libstore-tests/data/worker-protocol/valid-path-info-1.16.json b/src/libstore-tests/data/worker-protocol/valid-path-info-1.16.json new file mode 100644 index 000000000..9e2806824 --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/valid-path-info-1.16.json @@ -0,0 +1,66 @@ +[ + { + "ca": null, + "deriver": null, + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, + "narSize": 34878, + "path": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "references": [], + "registrationTime": 23423, + "signatures": [], + "ultimate": true, + "version": 2 + }, + { + "ca": null, + "deriver": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv", + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, + "narSize": 34878, + "path": "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "references": [ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "g1w7hyyyy1w7hy3qg1w7hy3qgqqqqy3q-foo" + ], + "registrationTime": 23423, + "signatures": [ + "fake-sig-1", + "fake-sig-2" + ], + "ultimate": false, + "version": 2 + }, + { + "ca": { + "hash": { + "algorithm": "sha256", + "format": "base64", + "hash": "EMIJ+giQ/gLIWoxmPKjno3zHZrxbGymgzGGyZvZBIdM=" + }, + "method": "nar" + }, + "deriver": null, + "narHash": { + "algorithm": "sha256", + "format": "base64", + "hash": "FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=" + }, + "narSize": 34878, + "path": "n5wkd9frr45pa74if5gpz9j7mifg27fh-foo", + "references": [ + "g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar", + "n5wkd9frr45pa74if5gpz9j7mifg27fh-foo" + ], + "registrationTime": 23423, + "signatures": [], + "ultimate": false, + "version": 2 + } +] diff --git a/src/libstore-tests/data/worker-protocol/vector.json b/src/libstore-tests/data/worker-protocol/vector.json new file mode 100644 index 000000000..2b8cc1b3a --- /dev/null +++ b/src/libstore-tests/data/worker-protocol/vector.json @@ -0,0 +1,22 @@ +[ + [], + [ + "" + ], + [ + "", + "foo", + "bar" + ], + [ + [], + [ + "" + ], + [ + "", + "1", + "2" + ] + ] +] diff --git a/src/libstore-tests/serve-protocol.cc b/src/libstore-tests/serve-protocol.cc index a7b69821c..e04d89e3d 100644 --- a/src/libstore-tests/serve-protocol.cc +++ b/src/libstore-tests/serve-protocol.cc @@ -4,6 +4,7 @@ #include #include +#include "nix/util/json-utils.hh" #include "nix/store/serve-protocol.hh" #include "nix/store/serve-protocol-impl.hh" #include "nix/store/serve-protocol-connection.hh" @@ -334,7 +335,7 @@ VERSIONED_CHARACTERIZATION_TEST( }), })) -VERSIONED_CHARACTERIZATION_TEST( +VERSIONED_CHARACTERIZATION_TEST_NO_JSON( ServeProtoTest, build_options_2_1, "build-options-2.1", @@ -344,7 +345,7 @@ VERSIONED_CHARACTERIZATION_TEST( .buildTimeout = 6, })) -VERSIONED_CHARACTERIZATION_TEST( +VERSIONED_CHARACTERIZATION_TEST_NO_JSON( ServeProtoTest, build_options_2_2, "build-options-2.2", @@ -355,7 +356,7 @@ VERSIONED_CHARACTERIZATION_TEST( .maxLogSize = 7, })) -VERSIONED_CHARACTERIZATION_TEST( +VERSIONED_CHARACTERIZATION_TEST_NO_JSON( ServeProtoTest, build_options_2_3, "build-options-2.3", @@ -368,7 +369,7 @@ VERSIONED_CHARACTERIZATION_TEST( .enforceDeterminism = true, })) -VERSIONED_CHARACTERIZATION_TEST( +VERSIONED_CHARACTERIZATION_TEST_NO_JSON( ServeProtoTest, build_options_2_7, "build-options-2.7", @@ -439,7 +440,7 @@ VERSIONED_CHARACTERIZATION_TEST( TEST_F(ServeProtoTest, handshake_log) { - CharacterizationTest::writeTest("handshake-to-client", [&]() -> std::string { + CharacterizationTest::writeTest("handshake-to-client.bin", [&]() -> std::string { StringSink toClientLog; Pipe toClient, toServer; @@ -475,7 +476,7 @@ struct NullBufferedSink : BufferedSink TEST_F(ServeProtoTest, handshake_client_replay) { - CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { + CharacterizationTest::readTest("handshake-to-client.bin", [&](std::string toClientLog) { NullBufferedSink nullSink; StringSource in{toClientLog}; @@ -487,7 +488,7 @@ TEST_F(ServeProtoTest, handshake_client_replay) TEST_F(ServeProtoTest, handshake_client_truncated_replay_throws) { - CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { + CharacterizationTest::readTest("handshake-to-client.bin", [&](std::string toClientLog) { for (size_t len = 0; len < toClientLog.size(); ++len) { NullBufferedSink nullSink; auto substring = toClientLog.substr(0, len); @@ -505,7 +506,7 @@ TEST_F(ServeProtoTest, handshake_client_truncated_replay_throws) TEST_F(ServeProtoTest, handshake_client_corrupted_throws) { - CharacterizationTest::readTest("handshake-to-client", [&](const std::string toClientLog) { + CharacterizationTest::readTest("handshake-to-client.bin", [&](const std::string toClientLog) { for (size_t idx = 0; idx < toClientLog.size(); ++idx) { // corrupt a copy std::string toClientLogCorrupt = toClientLog; diff --git a/src/libstore-tests/worker-protocol.cc b/src/libstore-tests/worker-protocol.cc index 8f70e937b..1e0ede81c 100644 --- a/src/libstore-tests/worker-protocol.cc +++ b/src/libstore-tests/worker-protocol.cc @@ -4,6 +4,7 @@ #include #include +#include "nix/util/json-utils.hh" #include "nix/store/worker-protocol.hh" #include "nix/store/worker-protocol-connection.hh" #include "nix/store/worker-protocol-impl.hh" @@ -649,7 +650,7 @@ VERSIONED_CHARACTERIZATION_TEST( }, })) -VERSIONED_CHARACTERIZATION_TEST( +VERSIONED_CHARACTERIZATION_TEST_NO_JSON( WorkerProtoTest, clientHandshakeInfo_1_30, "client-handshake-info_1_30", @@ -658,7 +659,7 @@ VERSIONED_CHARACTERIZATION_TEST( {}, })) -VERSIONED_CHARACTERIZATION_TEST( +VERSIONED_CHARACTERIZATION_TEST_NO_JSON( WorkerProtoTest, clientHandshakeInfo_1_33, "client-handshake-info_1_33", @@ -672,7 +673,7 @@ VERSIONED_CHARACTERIZATION_TEST( }, })) -VERSIONED_CHARACTERIZATION_TEST( +VERSIONED_CHARACTERIZATION_TEST_NO_JSON( WorkerProtoTest, clientHandshakeInfo_1_35, "client-handshake-info_1_35", @@ -690,7 +691,7 @@ VERSIONED_CHARACTERIZATION_TEST( TEST_F(WorkerProtoTest, handshake_log) { - CharacterizationTest::writeTest("handshake-to-client", [&]() -> std::string { + CharacterizationTest::writeTest("handshake-to-client.bin", [&]() -> std::string { StringSink toClientLog; Pipe toClient, toServer; @@ -751,7 +752,7 @@ struct NullBufferedSink : BufferedSink TEST_F(WorkerProtoTest, handshake_client_replay) { - CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { + CharacterizationTest::readTest("handshake-to-client.bin", [&](std::string toClientLog) { NullBufferedSink nullSink; StringSource in{toClientLog}; @@ -764,7 +765,7 @@ TEST_F(WorkerProtoTest, handshake_client_replay) TEST_F(WorkerProtoTest, handshake_client_truncated_replay_throws) { - CharacterizationTest::readTest("handshake-to-client", [&](std::string toClientLog) { + CharacterizationTest::readTest("handshake-to-client.bin", [&](std::string toClientLog) { for (size_t len = 0; len < toClientLog.size(); ++len) { NullBufferedSink nullSink; auto substring = toClientLog.substr(0, len); @@ -782,7 +783,7 @@ TEST_F(WorkerProtoTest, handshake_client_truncated_replay_throws) TEST_F(WorkerProtoTest, handshake_client_corrupted_throws) { - CharacterizationTest::readTest("handshake-to-client", [&](const std::string toClientLog) { + CharacterizationTest::readTest("handshake-to-client.bin", [&](const std::string toClientLog) { for (size_t idx = 0; idx < toClientLog.size(); ++idx) { // corrupt a copy std::string toClientLogCorrupt = toClientLog; diff --git a/src/libstore/build-result.cc b/src/libstore/build-result.cc index e3d9e9085..f4bc8ab33 100644 --- a/src/libstore/build-result.cc +++ b/src/libstore/build-result.cc @@ -153,4 +153,20 @@ BuildResult adl_serializer::from_json(const json & _json) return br; } +KeyedBuildResult adl_serializer::from_json(const json & json0) +{ + auto json = getObject(json0); + + return KeyedBuildResult{ + adl_serializer::from_json(json0), + valueAt(json, "path"), + }; +} + +void adl_serializer::to_json(json & json, const KeyedBuildResult & kbr) +{ + adl_serializer::to_json(json, kbr); + json["path"] = kbr.path; +} + } // namespace nlohmann diff --git a/src/libstore/include/nix/store/build-result.hh b/src/libstore/include/nix/store/build-result.hh index 4739232f8..96134791b 100644 --- a/src/libstore/include/nix/store/build-result.hh +++ b/src/libstore/include/nix/store/build-result.hh @@ -178,3 +178,4 @@ struct KeyedBuildResult : BuildResult } // namespace nix JSON_IMPL(nix::BuildResult) +JSON_IMPL(nix::KeyedBuildResult) diff --git a/src/libstore/include/nix/store/path-info.hh b/src/libstore/include/nix/store/path-info.hh index 8f6115b73..a64e8458d 100644 --- a/src/libstore/include/nix/store/path-info.hh +++ b/src/libstore/include/nix/store/path-info.hh @@ -197,3 +197,4 @@ using ValidPathInfos = std::map; } // namespace nix JSON_IMPL(nix::UnkeyedValidPathInfo) +JSON_IMPL(nix::ValidPathInfo) diff --git a/src/libstore/include/nix/store/realisation.hh b/src/libstore/include/nix/store/realisation.hh index e8a71862e..af0e4aefd 100644 --- a/src/libstore/include/nix/store/realisation.hh +++ b/src/libstore/include/nix/store/realisation.hh @@ -182,5 +182,6 @@ public: } // namespace nix +JSON_IMPL(nix::DrvOutput) JSON_IMPL(nix::UnkeyedRealisation) JSON_IMPL(nix::Realisation) diff --git a/src/libstore/include/nix/store/store-api.hh b/src/libstore/include/nix/store/store-api.hh index c57eff1f0..4c0b156fa 100644 --- a/src/libstore/include/nix/store/store-api.hh +++ b/src/libstore/include/nix/store/store-api.hh @@ -1004,4 +1004,10 @@ const ContentAddress * getDerivationCA(const BasicDerivation & drv); std::map drvOutputReferences(Store & store, const Derivation & drv, const StorePath & outputPath, Store * evalStore = nullptr); +template<> +struct json_avoids_null : std::true_type +{}; + } // namespace nix + +JSON_IMPL(nix::TrustedFlag) diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc index 34a369810..ca2458302 100644 --- a/src/libstore/misc.cc +++ b/src/libstore/misc.cc @@ -10,6 +10,7 @@ #include "nix/util/closure.hh" #include "nix/store/filetransfer.hh" #include "nix/util/strings.hh" +#include "nix/util/json-utils.hh" #include @@ -482,3 +483,19 @@ OutputPathMap resolveDerivedPath(Store & store, const DerivedPath::Built & bfd) } } // namespace nix + +namespace nlohmann { + +using namespace nix; + +TrustedFlag adl_serializer::from_json(const json & json) +{ + return getBoolean(json) ? TrustedFlag::Trusted : TrustedFlag::NotTrusted; +} + +void adl_serializer::to_json(json & json, const TrustedFlag & trustedFlag) +{ + json = static_cast(trustedFlag); +} + +} // namespace nlohmann diff --git a/src/libstore/path-info.cc b/src/libstore/path-info.cc index d56ba9475..6b7469e33 100644 --- a/src/libstore/path-info.cc +++ b/src/libstore/path-info.cc @@ -251,4 +251,20 @@ void adl_serializer::to_json(json & json, const UnkeyedVal json = c.toJSON(nullptr, true); } +ValidPathInfo adl_serializer::from_json(const json & json0) +{ + auto json = getObject(json0); + + return ValidPathInfo{ + valueAt(json, "path"), + adl_serializer::from_json(json0), + }; +} + +void adl_serializer::to_json(json & json, const ValidPathInfo & v) +{ + adl_serializer::to_json(json, v); + json["path"] = v.path; +} + } // namespace nlohmann diff --git a/src/libstore/realisation.cc b/src/libstore/realisation.cc index a7f3b98d6..4aeb05874 100644 --- a/src/libstore/realisation.cc +++ b/src/libstore/realisation.cc @@ -144,6 +144,16 @@ namespace nlohmann { using namespace nix; +DrvOutput adl_serializer::from_json(const json & json) +{ + return DrvOutput::parse(getString(json)); +} + +void adl_serializer::to_json(json & json, const DrvOutput & drvOutput) +{ + json = drvOutput.to_string(); +} + UnkeyedRealisation adl_serializer::from_json(const json & json0) { auto json = getObject(json0); @@ -182,14 +192,14 @@ Realisation adl_serializer::from_json(const json & json0) return Realisation{ static_cast(json0), - DrvOutput::parse(valueAt(json, "id")), + valueAt(json, "id"), }; } void adl_serializer::to_json(json & json, const Realisation & r) { json = static_cast(r); - json["id"] = r.id.to_string(); + json["id"] = r.id; } } // namespace nlohmann diff --git a/src/libutil-test-support/include/nix/util/tests/json-characterization.hh b/src/libutil-test-support/include/nix/util/tests/json-characterization.hh index d713c615b..0ee6fd2fd 100644 --- a/src/libutil-test-support/include/nix/util/tests/json-characterization.hh +++ b/src/libutil-test-support/include/nix/util/tests/json-characterization.hh @@ -11,6 +11,34 @@ namespace nix { +/** + * Golden test for JSON reading + */ +template +void readJsonTest(CharacterizationTest & test, PathView testStem, const T & expected, auto... args) +{ + using namespace nlohmann; + test.readTest(Path{testStem} + ".json", [&](const auto & encodedRaw) { + auto encoded = json::parse(encodedRaw); + T decoded = adl_serializer::from_json(encoded, args...); + ASSERT_EQ(decoded, expected); + }); +} + +/** + * Golden test for JSON writing + */ +template +void writeJsonTest(CharacterizationTest & test, PathView testStem, const T & value) +{ + using namespace nlohmann; + test.writeTest( + Path{testStem} + ".json", + [&]() -> json { return static_cast(value); }, + [](const auto & file) { return json::parse(readFile(file)); }, + [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); +} + /** * Mixin class for writing characterization tests for `nlohmann::json` * conversions for a given type. @@ -26,12 +54,7 @@ struct JsonCharacterizationTest : virtual CharacterizationTest */ void readJsonTest(PathView testStem, const T & expected, auto... args) { - using namespace nlohmann; - readTest(Path{testStem} + ".json", [&](const auto & encodedRaw) { - auto encoded = json::parse(encodedRaw); - T decoded = adl_serializer::from_json(encoded, args...); - ASSERT_EQ(decoded, expected); - }); + nix::readJsonTest(*this, testStem, expected, args...); } /** @@ -42,12 +65,7 @@ struct JsonCharacterizationTest : virtual CharacterizationTest */ void writeJsonTest(PathView testStem, const T & value) { - using namespace nlohmann; - writeTest( - Path{testStem} + ".json", - [&]() -> json { return static_cast(value); }, - [](const auto & file) { return json::parse(readFile(file)); }, - [](const auto & file, const auto & got) { return writeFile(file, got.dump(2) + "\n"); }); + nix::writeJsonTest(*this, testStem, value); } }; From a5eba9a354e5e2d382a3bc1eac9be0e72a48884f Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Tue, 11 Nov 2025 04:12:44 +0300 Subject: [PATCH 204/213] libexpr: Fix error message in forceStringNoCtx Otherwise it would print the address of the value. --- src/libexpr/eval.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 71619a9be..8e74316ca 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -2340,7 +2340,7 @@ std::string_view EvalState::forceStringNoCtx(Value & v, const PosIdx pos, std::s error( "the string '%1%' is not allowed to refer to a store path (such as '%2%')", v.string_view(), - *v.context()->begin()) + (*v.context()->begin())->view()) .withTrace(pos, errorCtx) .debugThrow(); } From 7ff3cc65e4badadc1d612ee9e0190aabafac8d98 Mon Sep 17 00:00:00 2001 From: Taeer Bar-Yam Date: Tue, 11 Nov 2025 17:48:07 +0100 Subject: [PATCH 205/213] move allocBytes() into EvalMemory --- src/libexpr/eval.cc | 27 +++++++++-------- src/libexpr/include/nix/expr/eval-inline.hh | 2 +- src/libexpr/include/nix/expr/eval.hh | 3 +- src/libexpr/include/nix/expr/value.hh | 9 +++--- src/libexpr/primops.cc | 33 +++++++++++---------- src/libexpr/primops/context.cc | 6 ++-- 6 files changed, 44 insertions(+), 36 deletions(-) diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc index 8e74316ca..8b068e8c7 100644 --- a/src/libexpr/eval.cc +++ b/src/libexpr/eval.cc @@ -852,25 +852,26 @@ void Value::mkString(std::string_view s) mkStringNoCopy(StringData::make(s)); } -Value::StringWithContext::Context * Value::StringWithContext::Context::fromBuilder(const NixStringContext & context) +Value::StringWithContext::Context * +Value::StringWithContext::Context::fromBuilder(const NixStringContext & context, EvalMemory & mem) { if (context.empty()) return nullptr; - auto ctx = new (allocBytes(sizeof(Context) + context.size() * sizeof(value_type))) Context(context.size()); + auto ctx = new (mem.allocBytes(sizeof(Context) + context.size() * sizeof(value_type))) Context(context.size()); std::ranges::transform( context, ctx->elems, [](const NixStringContextElem & elt) { return &StringData::make(elt.to_string()); }); return ctx; } -void Value::mkString(std::string_view s, const NixStringContext & context) +void Value::mkString(std::string_view s, const NixStringContext & context, EvalMemory & mem) { - mkStringNoCopy(StringData::make(s), Value::StringWithContext::Context::fromBuilder(context)); + mkStringNoCopy(StringData::make(s), Value::StringWithContext::Context::fromBuilder(context, mem)); } -void Value::mkStringMove(const StringData & s, const NixStringContext & context) +void Value::mkStringMove(const StringData & s, const NixStringContext & context, EvalMemory & mem) { - mkStringNoCopy(s, Value::StringWithContext::Context::fromBuilder(context)); + mkStringNoCopy(s, Value::StringWithContext::Context::fromBuilder(context, mem)); } void Value::mkPath(const SourcePath & path) @@ -911,9 +912,9 @@ inline Value * EvalState::lookupVar(Env * env, const ExprVar & var, bool noEval) } } -ListBuilder::ListBuilder(size_t size) +ListBuilder::ListBuilder(EvalMemory & mem, size_t size) : size(size) - , elems(size <= 2 ? inlineElems : (Value **) allocBytes(size * sizeof(Value *))) + , elems(size <= 2 ? inlineElems : (Value **) mem.allocBytes(size * sizeof(Value *))) { } @@ -953,7 +954,8 @@ void EvalState::mkStorePathString(const StorePath & p, Value & v) store->printStorePath(p), NixStringContext{ NixStringContextElem::Opaque{.path = p}, - }); + }, + mem); } std::string EvalState::mkOutputStringRaw( @@ -975,7 +977,7 @@ void EvalState::mkOutputString( std::optional optStaticOutputPath, const ExperimentalFeatureSettings & xpSettings) { - value.mkString(mkOutputStringRaw(b, optStaticOutputPath, xpSettings), NixStringContext{b}); + value.mkString(mkOutputStringRaw(b, optStaticOutputPath, xpSettings), NixStringContext{b}, mem); } std::string EvalState::mkSingleDerivedPathStringRaw(const SingleDerivedPath & p) @@ -1010,7 +1012,8 @@ void EvalState::mkSingleDerivedPathString(const SingleDerivedPath & p, Value & v mkSingleDerivedPathStringRaw(p), NixStringContext{ std::visit([](auto && v) -> NixStringContextElem { return v; }, p), - }); + }, + mem); } Value * Expr::maybeThunk(EvalState & state, Env & env) @@ -2143,7 +2146,7 @@ void ExprConcatStrings::eval(EvalState & state, Env & env, Value & v) tmp += part->size(); } *tmp = '\0'; - v.mkStringMove(resultStr, context); + v.mkStringMove(resultStr, context, state.mem); } } diff --git a/src/libexpr/include/nix/expr/eval-inline.hh b/src/libexpr/include/nix/expr/eval-inline.hh index 1320da914..e8aa380fd 100644 --- a/src/libexpr/include/nix/expr/eval-inline.hh +++ b/src/libexpr/include/nix/expr/eval-inline.hh @@ -12,7 +12,7 @@ namespace nix { * Note: Various places expect the allocated memory to be zeroed. */ [[gnu::always_inline]] -inline void * allocBytes(size_t n) +inline void * EvalMemory::allocBytes(size_t n) { void * p; #if NIX_USE_BOEHMGC diff --git a/src/libexpr/include/nix/expr/eval.hh b/src/libexpr/include/nix/expr/eval.hh index 0c7f9cf09..85cbffbe8 100644 --- a/src/libexpr/include/nix/expr/eval.hh +++ b/src/libexpr/include/nix/expr/eval.hh @@ -335,6 +335,7 @@ public: EvalMemory & operator=(const EvalMemory &) = delete; EvalMemory & operator=(EvalMemory &&) = delete; + inline void * allocBytes(size_t n); inline Value * allocValue(); inline Env & allocEnv(size_t size); @@ -348,7 +349,7 @@ public: ListBuilder buildList(size_t size) { stats.nrListElems += size; - return ListBuilder(size); + return ListBuilder(*this, size); } const Statistics & getStats() const & diff --git a/src/libexpr/include/nix/expr/value.hh b/src/libexpr/include/nix/expr/value.hh index 54a735fbd..ff62092f2 100644 --- a/src/libexpr/include/nix/expr/value.hh +++ b/src/libexpr/include/nix/expr/value.hh @@ -88,6 +88,7 @@ class PosIdx; struct Pos; class StorePath; class EvalState; +class EvalMemory; class XMLWriter; class Printer; @@ -161,7 +162,7 @@ class ListBuilder Value * inlineElems[2] = {nullptr, nullptr}; public: Value ** elems; - ListBuilder(size_t size); + ListBuilder(EvalMemory & mem, size_t size); // NOTE: Can be noexcept because we are just copying integral values and // raw pointers. @@ -364,7 +365,7 @@ struct ValueBase /** * @return null pointer when context.empty() */ - static Context * fromBuilder(const NixStringContext & context); + static Context * fromBuilder(const NixStringContext & context, EvalMemory & mem); }; /** @@ -1148,9 +1149,9 @@ public: void mkString(std::string_view s); - void mkString(std::string_view s, const NixStringContext & context); + void mkString(std::string_view s, const NixStringContext & context, EvalMemory & mem); - void mkStringMove(const StringData & s, const NixStringContext & context); + void mkStringMove(const StringData & s, const NixStringContext & context, EvalMemory & mem); void mkPath(const SourcePath & path); diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 3b39b7f20..98ed1b450 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -222,7 +222,8 @@ void derivationToValue( path2, { NixStringContextElem::DrvDeep{.drvPath = storePath}, - }); + }, + state.mem); attrs.alloc(state.s.name).mkString(drv.env["name"]); auto list = state.buildList(drv.outputs.size()); @@ -1811,7 +1812,8 @@ static void derivationStrictInternal(EvalState & state, std::string_view drvName drvPathS, { NixStringContextElem::DrvDeep{.drvPath = drvPath}, - }); + }, + state.mem); for (auto & i : drv.outputs) mkOutputString(state, result, drvPath, i); @@ -1864,7 +1866,7 @@ static void prim_toPath(EvalState & state, const PosIdx pos, Value ** args, Valu NixStringContext context; auto path = state.coerceToPath(pos, *args[0], context, "while evaluating the first argument passed to builtins.toPath"); - v.mkString(path.path.abs(), context); + v.mkString(path.path.abs(), context, state.mem); } static RegisterPrimOp primop_toPath({ @@ -1907,7 +1909,7 @@ static void prim_storePath(EvalState & state, const PosIdx pos, Value ** args, V if (!settings.readOnlyMode) state.store->ensurePath(path2); context.insert(NixStringContextElem::Opaque{.path = path2}); - v.mkString(path.abs(), context); + v.mkString(path.abs(), context, state.mem); } static RegisterPrimOp primop_storePath({ @@ -1989,7 +1991,8 @@ static void prim_baseNameOf(EvalState & state, const PosIdx pos, Value ** args, v.mkString( legacyBaseNameOf(*state.coerceToString( pos, *args[0], context, "while evaluating the first argument passed to builtins.baseNameOf", false, false)), - context); + context, + state.mem); } static RegisterPrimOp primop_baseNameOf({ @@ -2025,11 +2028,11 @@ static void prim_dirOf(EvalState & state, const PosIdx pos, Value ** args, Value pos, *args[0], context, "while evaluating the first argument passed to 'builtins.dirOf'", false, false); auto pos = path->rfind('/'); if (pos == path->npos) - v.mkStringMove("."_sds, context); + v.mkStringMove("."_sds, context, state.mem); else if (pos == 0) - v.mkStringMove("/"_sds, context); + v.mkStringMove("/"_sds, context, state.mem); else - v.mkString(path->substr(0, pos), context); + v.mkString(path->substr(0, pos), context, state.mem); } } @@ -2071,7 +2074,7 @@ static void prim_readFile(EvalState & state, const PosIdx pos, Value ** args, Va .path = std::move((StorePath &&) p), }); } - v.mkString(s, context); + v.mkString(s, context, state.mem); } static RegisterPrimOp primop_readFile({ @@ -2467,7 +2470,7 @@ static void prim_toXML(EvalState & state, const PosIdx pos, Value ** args, Value std::ostringstream out; NixStringContext context; printValueAsXML(state, true, false, *args[0], out, context, pos); - v.mkString(out.view(), context); + v.mkString(out.view(), context, state.mem); } static RegisterPrimOp primop_toXML({ @@ -2575,7 +2578,7 @@ static void prim_toJSON(EvalState & state, const PosIdx pos, Value ** args, Valu std::ostringstream out; NixStringContext context; printValueAsJSON(state, true, *args[0], pos, out, context); - v.mkString(out.view(), context); + v.mkString(out.view(), context, state.mem); } static RegisterPrimOp primop_toJSON({ @@ -4404,7 +4407,7 @@ static void prim_toString(EvalState & state, const PosIdx pos, Value ** args, Va NixStringContext context; auto s = state.coerceToString( pos, *args[0], context, "while evaluating the first argument passed to builtins.toString", true, false); - v.mkString(*s, context); + v.mkString(*s, context, state.mem); } static RegisterPrimOp primop_toString({ @@ -4477,7 +4480,7 @@ static void prim_substring(EvalState & state, const PosIdx pos, Value ** args, V auto s = state.coerceToString( pos, *args[2], context, "while evaluating the third argument (the string) passed to builtins.substring"); - v.mkString(NixUInt(start) >= s->size() ? "" : s->substr(start, _len), context); + v.mkString(NixUInt(start) >= s->size() ? "" : s->substr(start, _len), context, state.mem); } static RegisterPrimOp primop_substring({ @@ -4875,7 +4878,7 @@ static void prim_concatStringsSep(EvalState & state, const PosIdx pos, Value ** "while evaluating one element of the list of strings to concat passed to builtins.concatStringsSep"); } - v.mkString(res, context); + v.mkString(res, context, state.mem); } static RegisterPrimOp primop_concatStringsSep({ @@ -4950,7 +4953,7 @@ static void prim_replaceStrings(EvalState & state, const PosIdx pos, Value ** ar } } - v.mkString(res, context); + v.mkString(res, context, state.mem); } static RegisterPrimOp primop_replaceStrings({ diff --git a/src/libexpr/primops/context.cc b/src/libexpr/primops/context.cc index 8a9fe42e8..2c5add148 100644 --- a/src/libexpr/primops/context.cc +++ b/src/libexpr/primops/context.cc @@ -69,7 +69,7 @@ static void prim_unsafeDiscardOutputDependency(EvalState & state, const PosIdx p } } - v.mkString(*s, context2); + v.mkString(*s, context2, state.mem); } static RegisterPrimOp primop_unsafeDiscardOutputDependency( @@ -137,7 +137,7 @@ static void prim_addDrvOutputDependencies(EvalState & state, const PosIdx pos, V context.begin()->raw)}), }; - v.mkString(*s, context2); + v.mkString(*s, context2, state.mem); } static RegisterPrimOp primop_addDrvOutputDependencies( @@ -321,7 +321,7 @@ static void prim_appendContext(EvalState & state, const PosIdx pos, Value ** arg } } - v.mkString(orig, context); + v.mkString(orig, context, state.mem); } static RegisterPrimOp primop_appendContext({.name = "__appendContext", .arity = 2, .fun = prim_appendContext}); From abb7d2a96e6fb460c8392ab3372d7ca9b62749a7 Mon Sep 17 00:00:00 2001 From: John Ericson Date: Tue, 11 Nov 2025 11:44:16 -0500 Subject: [PATCH 206/213] Sort the `builtins.fetchTree` doc's lists This makes the output easier to compare with the new machine-generated lists in #9732. The hand-curated order did have the advantage of putting more important attributes at the top, but I don't think it is worth preserving that when `std::map` is so much easier to work with. The right solution to leading the reader to the more important attributes is to call them out in the intro texts. --- src/libexpr/primops/fetchTree.cc | 132 +++++++++++++++---------------- 1 file changed, 66 insertions(+), 66 deletions(-) diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index b49bd02e7..2b140be8d 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -317,77 +317,11 @@ static RegisterPrimOp primop_fetchTree({ > } > ``` - - `"tarball"` - - Download a tar archive and extract it into the Nix store. - This has the same underlying implementation as [`builtins.fetchTarball`](@docroot@/language/builtins.md#builtins-fetchTarball) - - - `url` (String, required) - - > **Example** - > - > ```nix - > fetchTree { - > type = "tarball"; - > url = "https://github.com/NixOS/nixpkgs/tarball/nixpkgs-23.11"; - > } - > ``` - - `"git"` Fetch a Git tree and copy it to the Nix store. This is similar to [`builtins.fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit). - - `url` (String, required) - - The URL formats supported are the same as for Git itself. - - > **Example** - > - > ```nix - > fetchTree { - > type = "git"; - > url = "git@github.com:NixOS/nixpkgs.git"; - > } - > ``` - - > **Note** - > - > If the URL points to a local directory, and no `ref` or `rev` is given, Nix only considers files added to the Git index, as listed by `git ls-files` but use the *current file contents* of the Git working directory. - - - `ref` (String, optional) - - By default, this has no effect. This becomes relevant only once `shallow` cloning is disabled. - - A [Git reference](https://git-scm.com/book/en/v2/Git-Internals-Git-References), such as a branch or tag name. - - Default: `"HEAD"` - - - `rev` (String, optional) - - A Git revision; a commit hash. - - Default: the tip of `ref` - - - `shallow` (Bool, optional) - - Make a shallow clone when fetching the Git tree. - When this is enabled, the options `ref` and `allRefs` have no effect anymore. - - Default: `true` - - - `submodules` (Bool, optional) - - Also fetch submodules if available. - - Default: `false` - - - `lfs` (Bool, optional) - - Fetch any [Git LFS](https://git-lfs.com/) files. - - Default: `false` - - `allRefs` (Bool, optional) By default, this has no effect. This becomes relevant only once `shallow` cloning is disabled. @@ -405,6 +339,26 @@ static RegisterPrimOp primop_fetchTree({ If set, pass through the value to the output attribute set. Otherwise, generated from the fetched Git tree. + - `lfs` (Bool, optional) + + Fetch any [Git LFS](https://git-lfs.com/) files. + + Default: `false` + + - `ref` (String, optional) + + By default, this has no effect. This becomes relevant only once `shallow` cloning is disabled. + + A [Git reference](https://git-scm.com/book/en/v2/Git-Internals-Git-References), such as a branch or tag name. + + Default: `"HEAD"` + + - `rev` (String, optional) + + A Git revision; a commit hash. + + Default: the tip of `ref` + - `revCount` (Integer, optional) Number of revisions in the history of the Git repository before the fetched commit. @@ -412,6 +366,52 @@ static RegisterPrimOp primop_fetchTree({ If set, pass through the value to the output attribute set. Otherwise, generated from the fetched Git tree. + - `shallow` (Bool, optional) + + Make a shallow clone when fetching the Git tree. + When this is enabled, the options `ref` and `allRefs` have no effect anymore. + + Default: `true` + + - `submodules` (Bool, optional) + + Also fetch submodules if available. + + Default: `false` + + - `url` (String, required) + + The URL formats supported are the same as for Git itself. + + > **Example** + > + > ```nix + > fetchTree { + > type = "git"; + > url = "git@github.com:NixOS/nixpkgs.git"; + > } + > ``` + + > **Note** + > + > If the URL points to a local directory, and no `ref` or `rev` is given, Nix only considers files added to the Git index, as listed by `git ls-files` but use the *current file contents* of the Git working directory. + + - `"tarball"` + + Download a tar archive and extract it into the Nix store. + This has the same underlying implementation as [`builtins.fetchTarball`](@docroot@/language/builtins.md#builtins-fetchTarball) + + - `url` (String, required) + + > **Example** + > + > ```nix + > fetchTree { + > type = "tarball"; + > url = "https://github.com/NixOS/nixpkgs/tarball/nixpkgs-23.11"; + > } + > ``` + The following input types are still subject to change: - `"path"` From ddc3fba9fb114b20cca5af77ce786eb7768211b0 Mon Sep 17 00:00:00 2001 From: Alex Auvolat Date: Wed, 12 Nov 2025 20:16:00 +0100 Subject: [PATCH 207/213] doc: fix "Nix Archive (NAR) format" specification For executable files in NAR archives, the `executable` tag is followed by an empty string, which was not indicated correctly in the specification. Adding the empty string can be seen in `src/libutil/archive.cc:62`. Here is an example of a hexdump of a NAR archives where this empty string can be seen: ``` 00000730 65 6e 74 72 79 00 00 00 01 00 00 00 00 00 00 00 |entry...........| 00000740 28 00 00 00 00 00 00 00 04 00 00 00 00 00 00 00 |(...............| 00000750 6e 61 6d 65 00 00 00 00 10 00 00 00 00 00 00 00 |name............| 00000760 6c 69 62 6d 70 66 72 2e 73 6f 2e 36 2e 32 2e 31 |libmpfr.so.6.2.1| 00000770 04 00 00 00 00 00 00 00 6e 6f 64 65 00 00 00 00 |........node....| 00000780 01 00 00 00 00 00 00 00 28 00 00 00 00 00 00 00 |........(.......| 00000790 04 00 00 00 00 00 00 00 74 79 70 65 00 00 00 00 |........type....| 000007a0 07 00 00 00 00 00 00 00 72 65 67 75 6c 61 72 00 |........regular.| 000007b0 0a 00 00 00 00 00 00 00 65 78 65 63 75 74 61 62 |........executab| 000007c0 6c 65 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |le..............| 000007d0 08 00 00 00 00 00 00 00 63 6f 6e 74 65 6e 74 73 |........contents| 000007e0 a0 16 0c 00 00 00 00 00 7f 45 4c 46 02 01 01 00 |.........ELF....| 000007f0 00 00 00 00 00 00 00 00 03 00 3e 00 01 00 00 00 |..........>.....| 00000800 00 00 00 00 00 00 00 00 40 00 00 00 00 00 00 00 |........@.......| 00000810 e0 0e 0c 00 00 00 00 00 00 00 00 00 40 00 38 00 |............@.8.| 00000820 0b 00 40 00 1f 00 1e 00 01 00 00 00 04 00 00 00 |..@.............| 00000830 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 |................| ``` (taken from `09zrxnn4j5hjxqj93xvxrl1dpmq4cyajas3yf7a7y0i7h81m6bd4.nar`, available on `cache.nixos.org`) --- doc/manual/source/protocols/nix-archive/index.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/manual/source/protocols/nix-archive/index.md b/doc/manual/source/protocols/nix-archive/index.md index bd2a8e833..98769d59e 100644 --- a/doc/manual/source/protocols/nix-archive/index.md +++ b/doc/manual/source/protocols/nix-archive/index.md @@ -24,7 +24,7 @@ nar-obj-inner | str("type"), str("directory") directory ; -regular = [ str("executable") ], str("contents"), str(contents); +regular = [ str("executable"), str("") ], str("contents"), str(contents); symlink = str("target"), str(target); @@ -52,4 +52,4 @@ The Nix Archive (NAR) format is also formally described using [Kaitai Struct](ht {{#include nar.ksy}} ``` -The source of the spec can be found [here](https://github.com/nixos/nix/blob/master/src/nix-manual/source/protocols/nix-archive/nar.ksy). Contributions and improvements to the spec are welcomed. \ No newline at end of file +The source of the spec can be found [here](https://github.com/nixos/nix/blob/master/src/nix-manual/source/protocols/nix-archive/nar.ksy). Contributions and improvements to the spec are welcomed. From 292bd390afbe7f8d3014251faba4cef25b516373 Mon Sep 17 00:00:00 2001 From: Robert Hensing Date: Wed, 12 Nov 2025 21:08:27 +0100 Subject: [PATCH 208/213] Remove setting from Input This is more straightforward and not subject to undocumented memory safety restrictions. Also easier to test. --- src/libcmd/common-eval-args.cc | 7 +- src/libcmd/installables.cc | 1 + src/libcmd/repl.cc | 2 +- src/libexpr/primops/fetchMercurial.cc | 2 +- src/libexpr/primops/fetchTree.cc | 9 +- src/libfetchers-tests/git.cc | 2 +- src/libfetchers/fetchers.cc | 28 +++---- src/libfetchers/git.cc | 38 ++++----- src/libfetchers/github.cc | 83 +++++++++---------- .../include/nix/fetchers/fetchers.hh | 24 ++---- .../include/nix/fetchers/input-cache.hh | 4 +- .../include/nix/fetchers/registry.hh | 5 +- src/libfetchers/indirect.cc | 7 +- src/libfetchers/input-cache.cc | 10 +-- src/libfetchers/mercurial.cc | 25 +++--- src/libfetchers/path.cc | 11 +-- src/libfetchers/registry.cc | 9 +- src/libfetchers/tarball.cc | 25 +++--- src/libflake/flake-primops.cc | 2 +- src/libflake/flake.cc | 11 ++- src/libflake/flakeref.cc | 10 ++- src/libflake/include/nix/flake/flakeref.hh | 8 +- src/libflake/lockfile.cc | 4 +- src/nix/flake-prefetch-inputs.cc | 2 +- src/nix/flake.cc | 10 +-- src/nix/profile.cc | 5 +- src/nix/registry.cc | 5 +- 27 files changed, 181 insertions(+), 168 deletions(-) diff --git a/src/libcmd/common-eval-args.cc b/src/libcmd/common-eval-args.cc index f7e086c16..82deb0b71 100644 --- a/src/libcmd/common-eval-args.cc +++ b/src/libcmd/common-eval-args.cc @@ -33,7 +33,8 @@ EvalSettings evalSettings{ // FIXME `parseFlakeRef` should take a `std::string_view`. auto flakeRef = parseFlakeRef(fetchSettings, std::string{rest}, {}, true, false); debug("fetching flake search path element '%s''", rest); - auto [accessor, lockedRef] = flakeRef.resolve(state.store).lazyFetch(state.store); + auto [accessor, lockedRef] = + flakeRef.resolve(fetchSettings, state.store).lazyFetch(fetchSettings, state.store); auto storePath = nix::fetchToStore( state.fetchSettings, *state.store, @@ -131,7 +132,7 @@ MixEvalArgs::MixEvalArgs() fetchers::Attrs extraAttrs; if (to.subdir != "") extraAttrs["dir"] = to.subdir; - fetchers::overrideRegistry(from.input, to.input, extraAttrs); + fetchers::overrideRegistry(fetchSettings, from.input, to.input, extraAttrs); }}, .completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) { completeFlakeRef(completions, openStore(), prefix); @@ -187,7 +188,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * bas else if (hasPrefix(s, "flake:")) { experimentalFeatureSettings.require(Xp::Flakes); auto flakeRef = parseFlakeRef(fetchSettings, std::string(s.substr(6)), {}, true, false); - auto [accessor, lockedRef] = flakeRef.resolve(state.store).lazyFetch(state.store); + auto [accessor, lockedRef] = flakeRef.resolve(fetchSettings, state.store).lazyFetch(fetchSettings, state.store); auto storePath = nix::fetchToStore( state.fetchSettings, *state.store, SourcePath(accessor), FetchMode::Copy, lockedRef.input.getName()); state.allowPath(storePath); diff --git a/src/libcmd/installables.cc b/src/libcmd/installables.cc index f0f36378b..8a8269c6b 100644 --- a/src/libcmd/installables.cc +++ b/src/libcmd/installables.cc @@ -185,6 +185,7 @@ MixFlakeOptions::MixFlakeOptions() } overrideRegistry( + fetchSettings, fetchers::Input::fromAttrs(fetchSettings, {{"type", "indirect"}, {"id", inputName}}), input3->lockedRef.input, extraAttrs); diff --git a/src/libcmd/repl.cc b/src/libcmd/repl.cc index a308b731d..00fca4e0c 100644 --- a/src/libcmd/repl.cc +++ b/src/libcmd/repl.cc @@ -738,7 +738,7 @@ void NixRepl::loadFlake(const std::string & flakeRefS) } auto flakeRef = parseFlakeRef(fetchSettings, flakeRefS, cwd.string(), true); - if (evalSettings.pureEval && !flakeRef.input.isLocked()) + if (evalSettings.pureEval && !flakeRef.input.isLocked(fetchSettings)) throw Error("cannot use ':load-flake' on locked flake reference '%s' (use --impure to override)", flakeRefS); Value v; diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc index c856deede..e1a9c8679 100644 --- a/src/libexpr/primops/fetchMercurial.cc +++ b/src/libexpr/primops/fetchMercurial.cc @@ -81,7 +81,7 @@ static void prim_fetchMercurial(EvalState & state, const PosIdx pos, Value ** ar attrs.insert_or_assign("rev", rev->gitRev()); auto input = fetchers::Input::fromAttrs(state.fetchSettings, std::move(attrs)); - auto [storePath, input2] = input.fetchToStore(state.store); + auto [storePath, input2] = input.fetchToStore(state.fetchSettings, state.store); auto attrs2 = state.buildBindings(8); state.mkStorePathString(storePath, attrs2.alloc(state.s.outPath)); diff --git a/src/libexpr/primops/fetchTree.cc b/src/libexpr/primops/fetchTree.cc index b49bd02e7..81fb8c00a 100644 --- a/src/libexpr/primops/fetchTree.cc +++ b/src/libexpr/primops/fetchTree.cc @@ -82,7 +82,7 @@ struct FetchTreeParams static void fetchTree( EvalState & state, const PosIdx pos, Value ** args, Value & v, const FetchTreeParams & params = FetchTreeParams{}) { - fetchers::Input input{state.fetchSettings}; + fetchers::Input input{}; NixStringContext context; std::optional type; auto fetcher = params.isFetchGit ? "fetchGit" : "fetchTree"; @@ -194,9 +194,9 @@ static void fetchTree( } if (!state.settings.pureEval && !input.isDirect() && experimentalFeatureSettings.isEnabled(Xp::Flakes)) - input = lookupInRegistries(state.store, input, fetchers::UseRegistries::Limited).first; + input = lookupInRegistries(state.fetchSettings, state.store, input, fetchers::UseRegistries::Limited).first; - if (state.settings.pureEval && !input.isLocked()) { + if (state.settings.pureEval && !input.isLocked(state.fetchSettings)) { if (input.getNarHash()) warn( "Input '%s' is unlocked (e.g. lacks a Git revision) but is checked by NAR hash. " @@ -219,7 +219,8 @@ static void fetchTree( throw Error("input '%s' is not allowed to use the '__final' attribute", input.to_string()); } - auto cachedInput = state.inputCache->getAccessor(state.store, input, fetchers::UseRegistries::No); + auto cachedInput = + state.inputCache->getAccessor(state.fetchSettings, state.store, input, fetchers::UseRegistries::No); auto storePath = state.mountInput(cachedInput.lockedInput, input, cachedInput.accessor); diff --git a/src/libfetchers-tests/git.cc b/src/libfetchers-tests/git.cc index 4f0e0d974..e8092b86c 100644 --- a/src/libfetchers-tests/git.cc +++ b/src/libfetchers-tests/git.cc @@ -196,7 +196,7 @@ TEST_F(GitTest, submodulePeriodSupport) {"ref", "main"}, }); - auto [accessor, i] = input.getAccessor(store); + auto [accessor, i] = input.getAccessor(settings, store); ASSERT_EQ(accessor->readFile(CanonPath("deps/sub/lib.txt")), "hello from submodule\n"); } diff --git a/src/libfetchers/fetchers.cc b/src/libfetchers/fetchers.cc index c9c0fffa2..1b1b39a9c 100644 --- a/src/libfetchers/fetchers.cc +++ b/src/libfetchers/fetchers.cc @@ -89,7 +89,7 @@ Input Input::fromAttrs(const Settings & settings, Attrs && attrs) // but not all of them. Doing this is to support those other // operations which are supposed to be robust on // unknown/uninterpretable inputs. - Input input{settings}; + Input input; input.attrs = attrs; fixupInput(input); return input; @@ -159,9 +159,9 @@ bool Input::isDirect() const return !scheme || scheme->isDirect(*this); } -bool Input::isLocked() const +bool Input::isLocked(const Settings & settings) const { - return scheme && scheme->isLocked(*this); + return scheme && scheme->isLocked(settings, *this); } bool Input::isFinal() const @@ -198,17 +198,17 @@ bool Input::contains(const Input & other) const } // FIXME: remove -std::pair Input::fetchToStore(ref store) const +std::pair Input::fetchToStore(const Settings & settings, ref store) const { if (!scheme) throw Error("cannot fetch unsupported input '%s'", attrsToJSON(toAttrs())); auto [storePath, input] = [&]() -> std::pair { try { - auto [accessor, result] = getAccessorUnchecked(store); + auto [accessor, result] = getAccessorUnchecked(settings, store); auto storePath = - nix::fetchToStore(*settings, *store, SourcePath(accessor), FetchMode::Copy, result.getName()); + nix::fetchToStore(settings, *store, SourcePath(accessor), FetchMode::Copy, result.getName()); auto narHash = store->queryPathInfo(storePath)->narHash; result.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true)); @@ -297,10 +297,10 @@ void Input::checkLocks(Input specified, Input & result) } } -std::pair, Input> Input::getAccessor(ref store) const +std::pair, Input> Input::getAccessor(const Settings & settings, ref store) const { try { - auto [accessor, result] = getAccessorUnchecked(store); + auto [accessor, result] = getAccessorUnchecked(settings, store); result.attrs.insert_or_assign("__final", Explicit(true)); @@ -313,7 +313,7 @@ std::pair, Input> Input::getAccessor(ref store) const } } -std::pair, Input> Input::getAccessorUnchecked(ref store) const +std::pair, Input> Input::getAccessorUnchecked(const Settings & settings, ref store) const { // FIXME: cache the accessor @@ -349,7 +349,7 @@ std::pair, Input> Input::getAccessorUnchecked(ref sto if (accessor->fingerprint) { ContentAddressMethod method = ContentAddressMethod::Raw::NixArchive; auto cacheKey = makeFetchToStoreCacheKey(getName(), *accessor->fingerprint, method, "/"); - settings->getCache()->upsert(cacheKey, *store, {}, storePath); + settings.getCache()->upsert(cacheKey, *store, {}, storePath); } accessor->setPathDisplay("«" + to_string() + "»"); @@ -360,7 +360,7 @@ std::pair, Input> Input::getAccessorUnchecked(ref sto } } - auto [accessor, result] = scheme->getAccessor(store, *this); + auto [accessor, result] = scheme->getAccessor(settings, store, *this); if (!accessor->fingerprint) accessor->fingerprint = result.getFingerprint(store); @@ -377,10 +377,10 @@ Input Input::applyOverrides(std::optional ref, std::optional return scheme->applyOverrides(*this, ref, rev); } -void Input::clone(const Path & destDir) const +void Input::clone(const Settings & settings, const Path & destDir) const { assert(scheme); - scheme->clone(*this, destDir); + scheme->clone(settings, *this, destDir); } std::optional Input::getSourcePath() const @@ -493,7 +493,7 @@ void InputScheme::putFile( throw Error("input '%s' does not support modifying file '%s'", input.to_string(), path); } -void InputScheme::clone(const Input & input, const Path & destDir) const +void InputScheme::clone(const Settings & settings, const Input & input, const Path & destDir) const { throw Error("do not know how to clone input '%s'", input.to_string()); } diff --git a/src/libfetchers/git.cc b/src/libfetchers/git.cc index 7247442ee..d162d1656 100644 --- a/src/libfetchers/git.cc +++ b/src/libfetchers/git.cc @@ -229,7 +229,7 @@ struct GitInputScheme : InputScheme if (auto ref = maybeGetStrAttr(attrs, "ref"); ref && !isLegalRefName(*ref)) throw BadURL("invalid Git branch/tag name '%s'", *ref); - Input input{settings}; + Input input{}; input.attrs = attrs; input.attrs["url"] = fixGitURL(getStrAttr(attrs, "url")).to_string(); getShallowAttr(input); @@ -278,7 +278,7 @@ struct GitInputScheme : InputScheme return res; } - void clone(const Input & input, const Path & destDir) const override + void clone(const Settings & settings, const Input & input, const Path & destDir) const override { auto repoInfo = getRepoInfo(input); @@ -623,7 +623,7 @@ struct GitInputScheme : InputScheme } std::pair, Input> - getAccessorFromCommit(ref store, RepoInfo & repoInfo, Input && input) const + getAccessorFromCommit(const Settings & settings, ref store, RepoInfo & repoInfo, Input && input) const { assert(!repoInfo.workdirInfo.isDirty); @@ -733,10 +733,10 @@ struct GitInputScheme : InputScheme auto rev = *input.getRev(); - input.attrs.insert_or_assign("lastModified", getLastModified(*input.settings, repoInfo, repoDir, rev)); + input.attrs.insert_or_assign("lastModified", getLastModified(settings, repoInfo, repoDir, rev)); if (!getShallowAttr(input)) - input.attrs.insert_or_assign("revCount", getRevCount(*input.settings, repoInfo, repoDir, rev)); + input.attrs.insert_or_assign("revCount", getRevCount(settings, repoInfo, repoDir, rev)); printTalkative("using revision %s of repo '%s'", rev.gitRev(), repoInfo.locationToArg()); @@ -779,8 +779,8 @@ struct GitInputScheme : InputScheme attrs.insert_or_assign("submodules", Explicit{true}); attrs.insert_or_assign("lfs", Explicit{smudgeLfs}); attrs.insert_or_assign("allRefs", Explicit{true}); - auto submoduleInput = fetchers::Input::fromAttrs(*input.settings, std::move(attrs)); - auto [submoduleAccessor, submoduleInput2] = submoduleInput.getAccessor(store); + auto submoduleInput = fetchers::Input::fromAttrs(settings, std::move(attrs)); + auto [submoduleAccessor, submoduleInput2] = submoduleInput.getAccessor(settings, store); submoduleAccessor->setPathDisplay("«" + submoduleInput.to_string() + "»"); mounts.insert_or_assign(submodule.path, submoduleAccessor); } @@ -797,7 +797,7 @@ struct GitInputScheme : InputScheme } std::pair, Input> - getAccessorFromWorkdir(ref store, RepoInfo & repoInfo, Input && input) const + getAccessorFromWorkdir(const Settings & settings, ref store, RepoInfo & repoInfo, Input && input) const { auto repoPath = repoInfo.getPath().value(); @@ -829,8 +829,8 @@ struct GitInputScheme : InputScheme // TODO: fall back to getAccessorFromCommit-like fetch when submodules aren't checked out // attrs.insert_or_assign("allRefs", Explicit{ true }); - auto submoduleInput = fetchers::Input::fromAttrs(*input.settings, std::move(attrs)); - auto [submoduleAccessor, submoduleInput2] = submoduleInput.getAccessor(store); + auto submoduleInput = fetchers::Input::fromAttrs(settings, std::move(attrs)); + auto [submoduleAccessor, submoduleInput2] = submoduleInput.getAccessor(settings, store); submoduleAccessor->setPathDisplay("«" + submoduleInput.to_string() + "»"); /* If the submodule is dirty, mark this repo dirty as @@ -857,12 +857,12 @@ struct GitInputScheme : InputScheme input.attrs.insert_or_assign("rev", rev.gitRev()); if (!getShallowAttr(input)) { input.attrs.insert_or_assign( - "revCount", rev == nullRev ? 0 : getRevCount(*input.settings, repoInfo, repoPath, rev)); + "revCount", rev == nullRev ? 0 : getRevCount(settings, repoInfo, repoPath, rev)); } verifyCommit(input, repo); } else { - repoInfo.warnDirty(*input.settings); + repoInfo.warnDirty(settings); if (repoInfo.workdirInfo.headRev) { input.attrs.insert_or_assign("dirtyRev", repoInfo.workdirInfo.headRev->gitRev() + "-dirty"); @@ -874,14 +874,14 @@ struct GitInputScheme : InputScheme input.attrs.insert_or_assign( "lastModified", - repoInfo.workdirInfo.headRev - ? getLastModified(*input.settings, repoInfo, repoPath, *repoInfo.workdirInfo.headRev) - : 0); + repoInfo.workdirInfo.headRev ? getLastModified(settings, repoInfo, repoPath, *repoInfo.workdirInfo.headRev) + : 0); return {accessor, std::move(input)}; } - std::pair, Input> getAccessor(ref store, const Input & _input) const override + std::pair, Input> + getAccessor(const Settings & settings, ref store, const Input & _input) const override { Input input(_input); @@ -897,8 +897,8 @@ struct GitInputScheme : InputScheme } auto [accessor, final] = input.getRef() || input.getRev() || !repoInfo.getPath() - ? getAccessorFromCommit(store, repoInfo, std::move(input)) - : getAccessorFromWorkdir(store, repoInfo, std::move(input)); + ? getAccessorFromCommit(settings, store, repoInfo, std::move(input)) + : getAccessorFromWorkdir(settings, store, repoInfo, std::move(input)); return {accessor, std::move(final)}; } @@ -934,7 +934,7 @@ struct GitInputScheme : InputScheme } } - bool isLocked(const Input & input) const override + bool isLocked(const Settings & settings, const Input & input) const override { auto rev = input.getRev(); return rev && rev != nullRev; diff --git a/src/libfetchers/github.cc b/src/libfetchers/github.cc index 594f3e226..bdc196386 100644 --- a/src/libfetchers/github.cc +++ b/src/libfetchers/github.cc @@ -92,7 +92,7 @@ struct GitArchiveInputScheme : InputScheme if (ref && rev) throw BadURL("URL '%s' contains both a commit hash and a branch/tag name %s %s", url, *ref, rev->gitRev()); - Input input{settings}; + Input input{}; input.attrs.insert_or_assign("type", std::string{schemeName()}); input.attrs.insert_or_assign("owner", path[0]); input.attrs.insert_or_assign("repo", path[1]); @@ -129,7 +129,7 @@ struct GitArchiveInputScheme : InputScheme getStrAttr(attrs, "owner"); getStrAttr(attrs, "repo"); - Input input{settings}; + Input input{}; input.attrs = attrs; return input; } @@ -233,9 +233,9 @@ struct GitArchiveInputScheme : InputScheme std::optional treeHash; }; - virtual RefInfo getRevFromRef(nix::ref store, const Input & input) const = 0; + virtual RefInfo getRevFromRef(const Settings & settings, nix::ref store, const Input & input) const = 0; - virtual DownloadUrl getDownloadUrl(const Input & input) const = 0; + virtual DownloadUrl getDownloadUrl(const Settings & settings, const Input & input) const = 0; struct TarballInfo { @@ -243,7 +243,7 @@ struct GitArchiveInputScheme : InputScheme time_t lastModified; }; - std::pair downloadArchive(ref store, Input input) const + std::pair downloadArchive(const Settings & settings, ref store, Input input) const { if (!maybeGetStrAttr(input.attrs, "ref")) input.attrs.insert_or_assign("ref", "HEAD"); @@ -252,7 +252,7 @@ struct GitArchiveInputScheme : InputScheme auto rev = input.getRev(); if (!rev) { - auto refInfo = getRevFromRef(store, input); + auto refInfo = getRevFromRef(settings, store, input); rev = refInfo.rev; upstreamTreeHash = refInfo.treeHash; debug("HEAD revision for '%s' is %s", input.to_string(), refInfo.rev.gitRev()); @@ -261,7 +261,7 @@ struct GitArchiveInputScheme : InputScheme input.attrs.erase("ref"); input.attrs.insert_or_assign("rev", rev->gitRev()); - auto cache = input.settings->getCache(); + auto cache = settings.getCache(); Cache::Key treeHashKey{"gitRevToTreeHash", {{"rev", rev->gitRev()}}}; Cache::Key lastModifiedKey{"gitRevToLastModified", {{"rev", rev->gitRev()}}}; @@ -270,7 +270,7 @@ struct GitArchiveInputScheme : InputScheme if (auto lastModifiedAttrs = cache->lookup(lastModifiedKey)) { auto treeHash = getRevAttr(*treeHashAttrs, "treeHash"); auto lastModified = getIntAttr(*lastModifiedAttrs, "lastModified"); - if (input.settings->getTarballCache()->hasObject(treeHash)) + if (settings.getTarballCache()->hasObject(treeHash)) return {std::move(input), TarballInfo{.treeHash = treeHash, .lastModified = (time_t) lastModified}}; else debug("Git tree with hash '%s' has disappeared from the cache, refetching...", treeHash.gitRev()); @@ -278,7 +278,7 @@ struct GitArchiveInputScheme : InputScheme } /* Stream the tarball into the tarball cache. */ - auto url = getDownloadUrl(input); + auto url = getDownloadUrl(settings, input); auto source = sinkToSource([&](Sink & sink) { FileTransferRequest req(url.url); @@ -290,7 +290,7 @@ struct GitArchiveInputScheme : InputScheme *logger, lvlInfo, actUnknown, fmt("unpacking '%s' into the Git cache", input.to_string())); TarArchive archive{*source}; - auto tarballCache = input.settings->getTarballCache(); + auto tarballCache = settings.getTarballCache(); auto parseSink = tarballCache->getFileSystemObjectSink(); auto lastModified = unpackTarfileToSink(archive, *parseSink); auto tree = parseSink->flush(); @@ -315,9 +315,10 @@ struct GitArchiveInputScheme : InputScheme return {std::move(input), tarballInfo}; } - std::pair, Input> getAccessor(ref store, const Input & _input) const override + std::pair, Input> + getAccessor(const Settings & settings, ref store, const Input & _input) const override { - auto [input, tarballInfo] = downloadArchive(store, _input); + auto [input, tarballInfo] = downloadArchive(settings, store, _input); #if 0 input.attrs.insert_or_assign("treeHash", tarballInfo.treeHash.gitRev()); @@ -325,19 +326,18 @@ struct GitArchiveInputScheme : InputScheme input.attrs.insert_or_assign("lastModified", uint64_t(tarballInfo.lastModified)); auto accessor = - input.settings->getTarballCache()->getAccessor(tarballInfo.treeHash, false, "«" + input.to_string() + "»"); + settings.getTarballCache()->getAccessor(tarballInfo.treeHash, false, "«" + input.to_string() + "»"); return {accessor, input}; } - bool isLocked(const Input & input) const override + bool isLocked(const Settings & settings, const Input & input) const override { /* Since we can't verify the integrity of the tarball from the Git revision alone, we also require a NAR hash for locking. FIXME: in the future, we may want to require a Git tree hash instead of a NAR hash. */ - return input.getRev().has_value() - && (input.settings->trustTarballsFromGitForges || input.getNarHash().has_value()); + return input.getRev().has_value() && (settings.trustTarballsFromGitForges || input.getNarHash().has_value()); } std::optional experimentalFeature() const override @@ -387,7 +387,7 @@ struct GitHubInputScheme : GitArchiveInputScheme return getStrAttr(input.attrs, "repo"); } - RefInfo getRevFromRef(nix::ref store, const Input & input) const override + RefInfo getRevFromRef(const Settings & settings, nix::ref store, const Input & input) const override { auto host = getHost(input); auto url = fmt( @@ -397,9 +397,9 @@ struct GitHubInputScheme : GitArchiveInputScheme getRepo(input), *input.getRef()); - Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); + Headers headers = makeHeadersWithAuthTokens(settings, host, input); - auto downloadResult = downloadFile(store, *input.settings, url, "source", headers); + auto downloadResult = downloadFile(store, settings, url, "source", headers); auto json = nlohmann::json::parse( store->requireStoreObjectAccessor(downloadResult.storePath)->readFile(CanonPath::root)); @@ -408,11 +408,11 @@ struct GitHubInputScheme : GitArchiveInputScheme .treeHash = Hash::parseAny(std::string{json["commit"]["tree"]["sha"]}, HashAlgorithm::SHA1)}; } - DownloadUrl getDownloadUrl(const Input & input) const override + DownloadUrl getDownloadUrl(const Settings & settings, const Input & input) const override { auto host = getHost(input); - Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); + Headers headers = makeHeadersWithAuthTokens(settings, host, input); // If we have no auth headers then we default to the public archive // urls so we do not run into rate limits. @@ -426,12 +426,12 @@ struct GitHubInputScheme : GitArchiveInputScheme return DownloadUrl{parseURL(url), headers}; } - void clone(const Input & input, const Path & destDir) const override + void clone(const Settings & settings, const Input & input, const Path & destDir) const override { auto host = getHost(input); - Input::fromURL(*input.settings, fmt("git+https://%s/%s/%s.git", host, getOwner(input), getRepo(input))) + Input::fromURL(settings, fmt("git+https://%s/%s/%s.git", host, getOwner(input), getRepo(input))) .applyOverrides(input.getRef(), input.getRev()) - .clone(destDir); + .clone(settings, destDir); } }; @@ -461,7 +461,7 @@ struct GitLabInputScheme : GitArchiveInputScheme return std::make_pair(token.substr(0, fldsplit), token.substr(fldsplit + 1)); } - RefInfo getRevFromRef(nix::ref store, const Input & input) const override + RefInfo getRevFromRef(const Settings & settings, nix::ref store, const Input & input) const override { auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); // See rate limiting note below @@ -472,9 +472,9 @@ struct GitLabInputScheme : GitArchiveInputScheme getStrAttr(input.attrs, "repo"), *input.getRef()); - Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); + Headers headers = makeHeadersWithAuthTokens(settings, host, input); - auto downloadResult = downloadFile(store, *input.settings, url, "source", headers); + auto downloadResult = downloadFile(store, settings, url, "source", headers); auto json = nlohmann::json::parse( store->requireStoreObjectAccessor(downloadResult.storePath)->readFile(CanonPath::root)); @@ -488,7 +488,7 @@ struct GitLabInputScheme : GitArchiveInputScheme } } - DownloadUrl getDownloadUrl(const Input & input) const override + DownloadUrl getDownloadUrl(const Settings & settings, const Input & input) const override { // This endpoint has a rate limit threshold that may be // server-specific and vary based whether the user is @@ -503,19 +503,19 @@ struct GitLabInputScheme : GitArchiveInputScheme getStrAttr(input.attrs, "repo"), input.getRev()->to_string(HashFormat::Base16, false)); - Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); + Headers headers = makeHeadersWithAuthTokens(settings, host, input); return DownloadUrl{parseURL(url), headers}; } - void clone(const Input & input, const Path & destDir) const override + void clone(const Settings & settings, const Input & input, const Path & destDir) const override { auto host = maybeGetStrAttr(input.attrs, "host").value_or("gitlab.com"); // FIXME: get username somewhere Input::fromURL( - *input.settings, + settings, fmt("git+https://%s/%s/%s.git", host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) .applyOverrides(input.getRef(), input.getRev()) - .clone(destDir); + .clone(settings, destDir); } }; @@ -536,7 +536,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme // Once it is implemented, however, should work as expected. } - RefInfo getRevFromRef(nix::ref store, const Input & input) const override + RefInfo getRevFromRef(const Settings & settings, nix::ref store, const Input & input) const override { // TODO: In the future, when the sourcehut graphql API is implemented for mercurial // and with anonymous access, this method should use it instead. @@ -547,11 +547,11 @@ struct SourceHutInputScheme : GitArchiveInputScheme auto base_url = fmt("https://%s/%s/%s", host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo")); - Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); + Headers headers = makeHeadersWithAuthTokens(settings, host, input); std::string refUri; if (ref == "HEAD") { - auto downloadFileResult = downloadFile(store, *input.settings, fmt("%s/HEAD", base_url), "source", headers); + auto downloadFileResult = downloadFile(store, settings, fmt("%s/HEAD", base_url), "source", headers); auto contents = store->requireStoreObjectAccessor(downloadFileResult.storePath)->readFile(CanonPath::root); auto remoteLine = git::parseLsRemoteLine(getLine(contents).first); @@ -564,8 +564,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme } std::regex refRegex(refUri); - auto downloadFileResult = - downloadFile(store, *input.settings, fmt("%s/info/refs", base_url), "source", headers); + auto downloadFileResult = downloadFile(store, settings, fmt("%s/info/refs", base_url), "source", headers); auto contents = store->requireStoreObjectAccessor(downloadFileResult.storePath)->readFile(CanonPath::root); std::istringstream is(contents); @@ -583,7 +582,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme return RefInfo{.rev = Hash::parseAny(*id, HashAlgorithm::SHA1)}; } - DownloadUrl getDownloadUrl(const Input & input) const override + DownloadUrl getDownloadUrl(const Settings & settings, const Input & input) const override { auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht"); auto url = @@ -593,18 +592,18 @@ struct SourceHutInputScheme : GitArchiveInputScheme getStrAttr(input.attrs, "repo"), input.getRev()->to_string(HashFormat::Base16, false)); - Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); + Headers headers = makeHeadersWithAuthTokens(settings, host, input); return DownloadUrl{parseURL(url), headers}; } - void clone(const Input & input, const Path & destDir) const override + void clone(const Settings & settings, const Input & input, const Path & destDir) const override { auto host = maybeGetStrAttr(input.attrs, "host").value_or("git.sr.ht"); Input::fromURL( - *input.settings, + settings, fmt("git+https://%s/%s/%s", host, getStrAttr(input.attrs, "owner"), getStrAttr(input.attrs, "repo"))) .applyOverrides(input.getRef(), input.getRev()) - .clone(destDir); + .clone(settings, destDir); } }; diff --git a/src/libfetchers/include/nix/fetchers/fetchers.hh b/src/libfetchers/include/nix/fetchers/fetchers.hh index 9dcd365ea..66915ae0d 100644 --- a/src/libfetchers/include/nix/fetchers/fetchers.hh +++ b/src/libfetchers/include/nix/fetchers/fetchers.hh @@ -36,13 +36,6 @@ struct Input { friend struct InputScheme; - const Settings * settings; - - Input(const Settings & settings) - : settings{&settings} - { - } - std::shared_ptr scheme; // note: can be null Attrs attrs; @@ -87,7 +80,7 @@ public: * attributes like a Git revision or NAR hash that uniquely * identify its contents. */ - bool isLocked() const; + bool isLocked(const Settings & settings) const; /** * Only for relative path flakes, i.e. 'path:./foo', returns the @@ -120,7 +113,7 @@ public: * Fetch the entire input into the Nix store, returning the * location in the Nix store and the locked input. */ - std::pair fetchToStore(ref store) const; + std::pair fetchToStore(const Settings & settings, ref store) const; /** * Check the locking attributes in `result` against @@ -140,17 +133,17 @@ public: * input without copying it to the store. Also return a possibly * unlocked input. */ - std::pair, Input> getAccessor(ref store) const; + std::pair, Input> getAccessor(const Settings & settings, ref store) const; private: - std::pair, Input> getAccessorUnchecked(ref store) const; + std::pair, Input> getAccessorUnchecked(const Settings & settings, ref store) const; public: Input applyOverrides(std::optional ref, std::optional rev) const; - void clone(const Path & destDir) const; + void clone(const Settings & settings, const Path & destDir) const; std::optional getSourcePath() const; @@ -223,7 +216,7 @@ struct InputScheme virtual Input applyOverrides(const Input & input, std::optional ref, std::optional rev) const; - virtual void clone(const Input & input, const Path & destDir) const; + virtual void clone(const Settings & settings, const Input & input, const Path & destDir) const; virtual std::optional getSourcePath(const Input & input) const; @@ -233,7 +226,8 @@ struct InputScheme std::string_view contents, std::optional commitMsg) const; - virtual std::pair, Input> getAccessor(ref store, const Input & input) const = 0; + virtual std::pair, Input> + getAccessor(const Settings & settings, ref store, const Input & input) const = 0; /** * Is this `InputScheme` part of an experimental feature? @@ -250,7 +244,7 @@ struct InputScheme return std::nullopt; } - virtual bool isLocked(const Input & input) const + virtual bool isLocked(const Settings & settings, const Input & input) const { return false; } diff --git a/src/libfetchers/include/nix/fetchers/input-cache.hh b/src/libfetchers/include/nix/fetchers/input-cache.hh index 402412071..ad702dfda 100644 --- a/src/libfetchers/include/nix/fetchers/input-cache.hh +++ b/src/libfetchers/include/nix/fetchers/input-cache.hh @@ -3,6 +3,7 @@ namespace nix::fetchers { enum class UseRegistries : int; +struct Settings; struct InputCache { @@ -14,7 +15,8 @@ struct InputCache Attrs extraAttrs; }; - CachedResult getAccessor(ref store, const Input & originalInput, UseRegistries useRegistries); + CachedResult + getAccessor(const Settings & settings, ref store, const Input & originalInput, UseRegistries useRegistries); struct CachedInput { diff --git a/src/libfetchers/include/nix/fetchers/registry.hh b/src/libfetchers/include/nix/fetchers/registry.hh index f705f709d..cfdf3130b 100644 --- a/src/libfetchers/include/nix/fetchers/registry.hh +++ b/src/libfetchers/include/nix/fetchers/registry.hh @@ -59,7 +59,7 @@ Path getUserRegistryPath(); Registries getRegistries(const Settings & settings, ref store); -void overrideRegistry(const Input & from, const Input & to, const Attrs & extraAttrs); +void overrideRegistry(const Settings & settings, const Input & from, const Input & to, const Attrs & extraAttrs); enum class UseRegistries : int { No, @@ -71,6 +71,7 @@ enum class UseRegistries : int { * Rewrite a flakeref using the registries. If `filter` is set, only * use the registries for which the filter function returns true. */ -std::pair lookupInRegistries(ref store, const Input & input, UseRegistries useRegistries); +std::pair +lookupInRegistries(const Settings & settings, ref store, const Input & input, UseRegistries useRegistries); } // namespace nix::fetchers diff --git a/src/libfetchers/indirect.cc b/src/libfetchers/indirect.cc index e05d27adc..66eef14dd 100644 --- a/src/libfetchers/indirect.cc +++ b/src/libfetchers/indirect.cc @@ -44,7 +44,7 @@ struct IndirectInputScheme : InputScheme // FIXME: forbid query params? - Input input{settings}; + Input input{}; input.attrs.insert_or_assign("type", "indirect"); input.attrs.insert_or_assign("id", id); if (rev) @@ -76,7 +76,7 @@ struct IndirectInputScheme : InputScheme if (!std::regex_match(id, flakeRegex)) throw BadURL("'%s' is not a valid flake ID", id); - Input input{settings}; + Input input{}; input.attrs = attrs; return input; } @@ -106,7 +106,8 @@ struct IndirectInputScheme : InputScheme return input; } - std::pair, Input> getAccessor(ref store, const Input & input) const override + std::pair, Input> + getAccessor(const Settings & settings, ref store, const Input & input) const override { throw Error("indirect input '%s' cannot be fetched directly", input.to_string()); } diff --git a/src/libfetchers/input-cache.cc b/src/libfetchers/input-cache.cc index c44f1a236..6da15bd3d 100644 --- a/src/libfetchers/input-cache.cc +++ b/src/libfetchers/input-cache.cc @@ -5,23 +5,23 @@ namespace nix::fetchers { -InputCache::CachedResult -InputCache::getAccessor(ref store, const Input & originalInput, UseRegistries useRegistries) +InputCache::CachedResult InputCache::getAccessor( + const Settings & settings, ref store, const Input & originalInput, UseRegistries useRegistries) { auto fetched = lookup(originalInput); Input resolvedInput = originalInput; if (!fetched) { if (originalInput.isDirect()) { - auto [accessor, lockedInput] = originalInput.getAccessor(store); + auto [accessor, lockedInput] = originalInput.getAccessor(settings, store); fetched.emplace(CachedInput{.lockedInput = lockedInput, .accessor = accessor}); } else { if (useRegistries != UseRegistries::No) { - auto [res, extraAttrs] = lookupInRegistries(store, originalInput, useRegistries); + auto [res, extraAttrs] = lookupInRegistries(settings, store, originalInput, useRegistries); resolvedInput = std::move(res); fetched = lookup(resolvedInput); if (!fetched) { - auto [accessor, lockedInput] = resolvedInput.getAccessor(store); + auto [accessor, lockedInput] = resolvedInput.getAccessor(settings, store); fetched.emplace( CachedInput{.lockedInput = lockedInput, .accessor = accessor, .extraAttrs = extraAttrs}); } diff --git a/src/libfetchers/mercurial.cc b/src/libfetchers/mercurial.cc index 41bf6e2aa..433690c7b 100644 --- a/src/libfetchers/mercurial.cc +++ b/src/libfetchers/mercurial.cc @@ -89,7 +89,7 @@ struct MercurialInputScheme : InputScheme throw BadURL("invalid Mercurial branch/tag name '%s'", *ref); } - Input input{settings}; + Input input{}; input.attrs = attrs; return input; } @@ -154,7 +154,7 @@ struct MercurialInputScheme : InputScheme return {isLocal, isLocal ? renderUrlPathEnsureLegal(url.path) : url.to_string()}; } - StorePath fetchToStore(ref store, Input & input) const + StorePath fetchToStore(const Settings & settings, ref store, Input & input) const { auto origRev = input.getRev(); @@ -176,10 +176,10 @@ struct MercurialInputScheme : InputScheme /* This is an unclean working tree. So copy all tracked files. */ - if (!input.settings->allowDirty) + if (!settings.allowDirty) throw Error("Mercurial tree '%s' is unclean", actualUrl); - if (input.settings->warnDirty) + if (settings.warnDirty) warn("Mercurial tree '%s' is unclean", actualUrl); input.attrs.insert_or_assign("ref", chomp(runHg({"branch", "-R", actualUrl}))); @@ -240,13 +240,13 @@ struct MercurialInputScheme : InputScheme Cache::Key refToRevKey{"hgRefToRev", {{"url", actualUrl}, {"ref", *input.getRef()}}}; if (!input.getRev()) { - if (auto res = input.settings->getCache()->lookupWithTTL(refToRevKey)) + if (auto res = settings.getCache()->lookupWithTTL(refToRevKey)) input.attrs.insert_or_assign("rev", getRevAttr(*res, "rev").gitRev()); } /* If we have a rev, check if we have a cached store path. */ if (auto rev = input.getRev()) { - if (auto res = input.settings->getCache()->lookupStorePath(revInfoKey(*rev), *store)) + if (auto res = settings.getCache()->lookupStorePath(revInfoKey(*rev), *store)) return makeResult(res->value, res->storePath); } @@ -300,7 +300,7 @@ struct MercurialInputScheme : InputScheme /* Now that we have the rev, check the cache again for a cached store path. */ - if (auto res = input.settings->getCache()->lookupStorePath(revInfoKey(rev), *store)) + if (auto res = settings.getCache()->lookupStorePath(revInfoKey(rev), *store)) return makeResult(res->value, res->storePath); Path tmpDir = createTempDir(); @@ -317,18 +317,19 @@ struct MercurialInputScheme : InputScheme }); if (!origRev) - input.settings->getCache()->upsert(refToRevKey, {{"rev", rev.gitRev()}}); + settings.getCache()->upsert(refToRevKey, {{"rev", rev.gitRev()}}); - input.settings->getCache()->upsert(revInfoKey(rev), *store, infoAttrs, storePath); + settings.getCache()->upsert(revInfoKey(rev), *store, infoAttrs, storePath); return makeResult(infoAttrs, std::move(storePath)); } - std::pair, Input> getAccessor(ref store, const Input & _input) const override + std::pair, Input> + getAccessor(const Settings & settings, ref store, const Input & _input) const override { Input input(_input); - auto storePath = fetchToStore(store, input); + auto storePath = fetchToStore(settings, store, input); auto accessor = store->requireStoreObjectAccessor(storePath); accessor->setPathDisplay("«" + input.to_string() + "»"); @@ -336,7 +337,7 @@ struct MercurialInputScheme : InputScheme return {accessor, input}; } - bool isLocked(const Input & input) const override + bool isLocked(const Settings & settings, const Input & input) const override { return (bool) input.getRev(); } diff --git a/src/libfetchers/path.cc b/src/libfetchers/path.cc index c4b5e2f1e..520534bf5 100644 --- a/src/libfetchers/path.cc +++ b/src/libfetchers/path.cc @@ -17,7 +17,7 @@ struct PathInputScheme : InputScheme if (url.authority && url.authority->host.size()) throw Error("path URL '%s' should not have an authority ('%s')", url, *url.authority); - Input input{settings}; + Input input{}; input.attrs.insert_or_assign("type", "path"); input.attrs.insert_or_assign("path", renderUrlPathEnsureLegal(url.path)); @@ -60,7 +60,7 @@ struct PathInputScheme : InputScheme { getStrAttr(attrs, "path"); - Input input{settings}; + Input input{}; input.attrs = attrs; return input; } @@ -101,7 +101,7 @@ struct PathInputScheme : InputScheme return path; } - bool isLocked(const Input & input) const override + bool isLocked(const Settings & settings, const Input & input) const override { return (bool) input.getNarHash(); } @@ -116,7 +116,8 @@ struct PathInputScheme : InputScheme throw Error("cannot fetch input '%s' because it uses a relative path", input.to_string()); } - std::pair, Input> getAccessor(ref store, const Input & _input) const override + std::pair, Input> + getAccessor(const Settings & settings, ref store, const Input & _input) const override { Input input(_input); auto path = getStrAttr(input.attrs, "path"); @@ -145,7 +146,7 @@ struct PathInputScheme : InputScheme auto info = store->queryPathInfo(*storePath); accessor->fingerprint = fmt("path:%s", store->queryPathInfo(*storePath)->narHash.to_string(HashFormat::SRI, true)); - input.settings->getCache()->upsert( + settings.getCache()->upsert( makeFetchToStoreCacheKey( input.getName(), *accessor->fingerprint, ContentAddressMethod::Raw::NixArchive, "/"), *store, diff --git a/src/libfetchers/registry.cc b/src/libfetchers/registry.cc index 2b0b5f390..48293095d 100644 --- a/src/libfetchers/registry.cc +++ b/src/libfetchers/registry.cc @@ -131,9 +131,9 @@ std::shared_ptr getFlagRegistry(const Settings & settings) return flagRegistry; } -void overrideRegistry(const Input & from, const Input & to, const Attrs & extraAttrs) +void overrideRegistry(const Settings & settings, const Input & from, const Input & to, const Attrs & extraAttrs) { - getFlagRegistry(*from.settings)->add(from, to, extraAttrs); + getFlagRegistry(settings)->add(from, to, extraAttrs); } static std::shared_ptr getGlobalRegistry(const Settings & settings, ref store) @@ -172,7 +172,8 @@ Registries getRegistries(const Settings & settings, ref store) return registries; } -std::pair lookupInRegistries(ref store, const Input & _input, UseRegistries useRegistries) +std::pair +lookupInRegistries(const Settings & settings, ref store, const Input & _input, UseRegistries useRegistries) { Attrs extraAttrs; int n = 0; @@ -187,7 +188,7 @@ restart: if (n > 100) throw Error("cycle detected in flake registry for '%s'", input.to_string()); - for (auto & registry : getRegistries(*input.settings, store)) { + for (auto & registry : getRegistries(settings, store)) { if (useRegistries == UseRegistries::Limited && !(registry->type == fetchers::Registry::Flag || registry->type == fetchers::Registry::Global)) continue; diff --git a/src/libfetchers/tarball.cc b/src/libfetchers/tarball.cc index 76cf3fd32..1891c6964 100644 --- a/src/libfetchers/tarball.cc +++ b/src/libfetchers/tarball.cc @@ -224,7 +224,7 @@ ref downloadTarball(ref store, const Settings & settings, auto input = Input::fromAttrs(settings, std::move(attrs)); - return input.getAccessor(store).first; + return input.getAccessor(settings, store).first; } // An input scheme corresponding to a curl-downloadable resource. @@ -252,7 +252,7 @@ struct CurlInputScheme : InputScheme if (!isValidURL(_url, requireTree)) return std::nullopt; - Input input{settings}; + Input input{}; auto url = _url; @@ -302,7 +302,7 @@ struct CurlInputScheme : InputScheme std::optional inputFromAttrs(const Settings & settings, const Attrs & attrs) const override { - Input input{settings}; + Input input{}; input.attrs = attrs; // input.locked = (bool) maybeGetStrAttr(input.attrs, "hash"); @@ -319,7 +319,7 @@ struct CurlInputScheme : InputScheme return url; } - bool isLocked(const Input & input) const override + bool isLocked(const Settings & settings, const Input & input) const override { return (bool) input.getNarHash(); } @@ -340,7 +340,8 @@ struct FileInputScheme : CurlInputScheme : (!requireTree && !hasTarballExtension(url))); } - std::pair, Input> getAccessor(ref store, const Input & _input) const override + std::pair, Input> + getAccessor(const Settings & settings, ref store, const Input & _input) const override { auto input(_input); @@ -348,7 +349,7 @@ struct FileInputScheme : CurlInputScheme the Nix store directly, since there is little deduplication benefit in using the Git cache for single big files like tarballs. */ - auto file = downloadFile(store, *input.settings, getStrAttr(input.attrs, "url"), input.getName()); + auto file = downloadFile(store, settings, getStrAttr(input.attrs, "url"), input.getName()); auto narHash = store->queryPathInfo(file.storePath)->narHash; input.attrs.insert_or_assign("narHash", narHash.to_string(HashFormat::SRI, true)); @@ -377,15 +378,15 @@ struct TarballInputScheme : CurlInputScheme : (requireTree || hasTarballExtension(url))); } - std::pair, Input> getAccessor(ref store, const Input & _input) const override + std::pair, Input> + getAccessor(const Settings & settings, ref store, const Input & _input) const override { auto input(_input); - auto result = - downloadTarball_(*input.settings, getStrAttr(input.attrs, "url"), {}, "«" + input.to_string() + "»"); + auto result = downloadTarball_(settings, getStrAttr(input.attrs, "url"), {}, "«" + input.to_string() + "»"); if (result.immutableUrl) { - auto immutableInput = Input::fromURL(*input.settings, *result.immutableUrl); + auto immutableInput = Input::fromURL(settings, *result.immutableUrl); // FIXME: would be nice to support arbitrary flakerefs // here, e.g. git flakes. if (immutableInput.getType() != "tarball") @@ -398,9 +399,7 @@ struct TarballInputScheme : CurlInputScheme input.attrs.insert_or_assign( "narHash", - input.settings->getTarballCache() - ->treeHashToNarHash(*input.settings, result.treeHash) - .to_string(HashFormat::SRI, true)); + settings.getTarballCache()->treeHashToNarHash(settings, result.treeHash).to_string(HashFormat::SRI, true)); return {result.accessor, input}; } diff --git a/src/libflake/flake-primops.cc b/src/libflake/flake-primops.cc index eeff9a966..3f65dc47a 100644 --- a/src/libflake/flake-primops.cc +++ b/src/libflake/flake-primops.cc @@ -38,7 +38,7 @@ PrimOp getFlake(const Settings & settings) std::string flakeRefS( state.forceStringNoCtx(*args[0], pos, "while evaluating the argument passed to builtins.getFlake")); auto flakeRef = nix::parseFlakeRef(state.fetchSettings, flakeRefS, {}, true); - if (state.settings.pureEval && !flakeRef.input.isLocked()) + if (state.settings.pureEval && !flakeRef.input.isLocked(state.fetchSettings)) throw Error( "cannot call 'getFlake' on unlocked flake reference '%s', at %s (use --impure to override)", flakeRefS, diff --git a/src/libflake/flake.cc b/src/libflake/flake.cc index dc60dbf08..b206ff13b 100644 --- a/src/libflake/flake.cc +++ b/src/libflake/flake.cc @@ -372,7 +372,8 @@ static Flake getFlake( const InputAttrPath & lockRootAttrPath) { // Fetch a lazy tree first. - auto cachedInput = state.inputCache->getAccessor(state.store, originalRef.input, useRegistries); + auto cachedInput = + state.inputCache->getAccessor(state.fetchSettings, state.store, originalRef.input, useRegistries); auto subdir = fetchers::maybeGetStrAttr(cachedInput.extraAttrs, "dir").value_or(originalRef.subdir); auto resolvedRef = FlakeRef(std::move(cachedInput.resolvedInput), subdir); @@ -388,7 +389,8 @@ static Flake getFlake( debug("refetching input '%s' due to self attribute", newLockedRef); // FIXME: need to remove attrs that are invalidated by the changed input attrs, such as 'narHash'. newLockedRef.input.attrs.erase("narHash"); - auto cachedInput2 = state.inputCache->getAccessor(state.store, newLockedRef.input, fetchers::UseRegistries::No); + auto cachedInput2 = state.inputCache->getAccessor( + state.fetchSettings, state.store, newLockedRef.input, fetchers::UseRegistries::No); cachedInput.accessor = cachedInput2.accessor; lockedRef = FlakeRef(std::move(cachedInput2.lockedInput), newLockedRef.subdir); } @@ -704,7 +706,8 @@ lockFlake(const Settings & settings, EvalState & state, const FlakeRef & topRef, this input. */ debug("creating new input '%s'", inputAttrPathS); - if (!lockFlags.allowUnlocked && !input.ref->input.isLocked() && !input.ref->input.isRelative()) + if (!lockFlags.allowUnlocked && !input.ref->input.isLocked(state.fetchSettings) + && !input.ref->input.isRelative()) throw Error("cannot update unlocked flake input '%s' in pure mode", inputAttrPathS); /* Note: in case of an --override-input, we use @@ -753,7 +756,7 @@ lockFlake(const Settings & settings, EvalState & state, const FlakeRef & topRef, return {*resolvedPath, *input.ref}; } else { auto cachedInput = state.inputCache->getAccessor( - state.store, input.ref->input, useRegistriesInputs); + state.fetchSettings, state.store, input.ref->input, useRegistriesInputs); auto lockedRef = FlakeRef(std::move(cachedInput.lockedInput), input.ref->subdir); diff --git a/src/libflake/flakeref.cc b/src/libflake/flakeref.cc index ed6b657ac..1cce46b25 100644 --- a/src/libflake/flakeref.cc +++ b/src/libflake/flakeref.cc @@ -64,9 +64,10 @@ std::ostream & operator<<(std::ostream & str, const FlakeRef & flakeRef) return str; } -FlakeRef FlakeRef::resolve(ref store, fetchers::UseRegistries useRegistries) const +FlakeRef FlakeRef::resolve( + const fetchers::Settings & fetchSettings, ref store, fetchers::UseRegistries useRegistries) const { - auto [input2, extraAttrs] = lookupInRegistries(store, input, useRegistries); + auto [input2, extraAttrs] = lookupInRegistries(fetchSettings, store, input, useRegistries); return FlakeRef(std::move(input2), fetchers::maybeGetStrAttr(extraAttrs, "dir").value_or(subdir)); } @@ -287,9 +288,10 @@ FlakeRef FlakeRef::fromAttrs(const fetchers::Settings & fetchSettings, const fet fetchers::maybeGetStrAttr(attrs, "dir").value_or("")); } -std::pair, FlakeRef> FlakeRef::lazyFetch(ref store) const +std::pair, FlakeRef> +FlakeRef::lazyFetch(const fetchers::Settings & fetchSettings, ref store) const { - auto [accessor, lockedInput] = input.getAccessor(store); + auto [accessor, lockedInput] = input.getAccessor(fetchSettings, store); return {accessor, FlakeRef(std::move(lockedInput), subdir)}; } diff --git a/src/libflake/include/nix/flake/flakeref.hh b/src/libflake/include/nix/flake/flakeref.hh index 1af8c5afd..bcc73b30c 100644 --- a/src/libflake/include/nix/flake/flakeref.hh +++ b/src/libflake/include/nix/flake/flakeref.hh @@ -71,11 +71,15 @@ struct FlakeRef fetchers::Attrs toAttrs() const; - FlakeRef resolve(ref store, fetchers::UseRegistries useRegistries = fetchers::UseRegistries::All) const; + FlakeRef resolve( + const fetchers::Settings & fetchSettings, + ref store, + fetchers::UseRegistries useRegistries = fetchers::UseRegistries::All) const; static FlakeRef fromAttrs(const fetchers::Settings & fetchSettings, const fetchers::Attrs & attrs); - std::pair, FlakeRef> lazyFetch(ref store) const; + std::pair, FlakeRef> + lazyFetch(const fetchers::Settings & fetchSettings, ref store) const; /** * Canonicalize a flakeref for the purpose of comparing "old" and diff --git a/src/libflake/lockfile.cc b/src/libflake/lockfile.cc index ecad5df6f..f2914feab 100644 --- a/src/libflake/lockfile.cc +++ b/src/libflake/lockfile.cc @@ -74,7 +74,7 @@ LockedNode::LockedNode(const fetchers::Settings & fetchSettings, const nlohmann: , parentInputAttrPath( json.find("parent") != json.end() ? (std::optional) json["parent"] : std::nullopt) { - if (!lockedRef.input.isLocked() && !lockedRef.input.isRelative()) { + if (!lockedRef.input.isLocked(fetchSettings) && !lockedRef.input.isRelative()) { if (lockedRef.input.getNarHash()) warn( "Lock file entry '%s' is unlocked (e.g. lacks a Git revision) but is checked by NAR hash. " @@ -282,7 +282,7 @@ std::optional LockFile::isUnlocked(const fetchers::Settings & fetchSet latter case, we can verify the input but we may not be able to fetch it from anywhere. */ auto isConsideredLocked = [&](const fetchers::Input & input) { - return input.isLocked() || (fetchSettings.allowDirtyLocks && input.getNarHash()); + return input.isLocked(fetchSettings) || (fetchSettings.allowDirtyLocks && input.getNarHash()); }; for (auto & i : nodes) { diff --git a/src/nix/flake-prefetch-inputs.cc b/src/nix/flake-prefetch-inputs.cc index 2a3e067c6..77ec41193 100644 --- a/src/nix/flake-prefetch-inputs.cc +++ b/src/nix/flake-prefetch-inputs.cc @@ -45,7 +45,7 @@ struct CmdFlakePrefetchInputs : FlakeCommand if (auto lockedNode = dynamic_cast(&node)) { try { Activity act(*logger, lvlInfo, actUnknown, fmt("fetching '%s'", lockedNode->lockedRef)); - auto accessor = lockedNode->lockedRef.input.getAccessor(store).first; + auto accessor = lockedNode->lockedRef.input.getAccessor(fetchSettings, store).first; fetchToStore( fetchSettings, *store, accessor, FetchMode::Copy, lockedNode->lockedRef.input.getName()); } catch (Error & e) { diff --git a/src/nix/flake.cc b/src/nix/flake.cc index 6a56126a0..4a8c7a205 100644 --- a/src/nix/flake.cc +++ b/src/nix/flake.cc @@ -245,7 +245,7 @@ struct CmdFlakeMetadata : FlakeCommand, MixJSON printJSON(j); } else { logger->cout(ANSI_BOLD "Resolved URL:" ANSI_NORMAL " %s", flake.resolvedRef.to_string()); - if (flake.lockedRef.input.isLocked()) + if (flake.lockedRef.input.isLocked(fetchSettings)) logger->cout(ANSI_BOLD "Locked URL:" ANSI_NORMAL " %s", flake.lockedRef.to_string()); if (flake.description) logger->cout(ANSI_BOLD "Description:" ANSI_NORMAL " %s", *flake.description); @@ -1049,7 +1049,7 @@ struct CmdFlakeClone : FlakeCommand if (destDir.empty()) throw Error("missing flag '--dest'"); - getFlakeRef().resolve(store).input.clone(destDir); + getFlakeRef().resolve(fetchSettings, store).input.clone(fetchSettings, destDir); } }; @@ -1100,7 +1100,7 @@ struct CmdFlakeArchive : FlakeCommand, MixJSON, MixDryRun, MixNoCheckSigs std::optional storePath; if (!(*inputNode)->lockedRef.input.isRelative()) { storePath = dryRun ? (*inputNode)->lockedRef.input.computeStorePath(*store) - : (*inputNode)->lockedRef.input.fetchToStore(store).first; + : (*inputNode)->lockedRef.input.fetchToStore(fetchSettings, store).first; sources.insert(*storePath); } if (json) { @@ -1496,8 +1496,8 @@ struct CmdFlakePrefetch : FlakeCommand, MixJSON void run(ref store) override { auto originalRef = getFlakeRef(); - auto resolvedRef = originalRef.resolve(store); - auto [accessor, lockedRef] = resolvedRef.lazyFetch(store); + auto resolvedRef = originalRef.resolve(fetchSettings, store); + auto [accessor, lockedRef] = resolvedRef.lazyFetch(getEvalState()->fetchSettings, store); auto storePath = fetchToStore(getEvalState()->fetchSettings, *store, accessor, FetchMode::Copy, lockedRef.input.getName()); auto hash = store->queryPathInfo(storePath)->narHash; diff --git a/src/nix/profile.cc b/src/nix/profile.cc index 80177cf13..9690eacd8 100644 --- a/src/nix/profile.cc +++ b/src/nix/profile.cc @@ -711,7 +711,7 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf element.identifier()); continue; } - if (element.source->originalRef.input.isLocked()) { + if (element.source->originalRef.input.isLocked(getEvalState()->fetchSettings)) { warn( "Found package '%s', but it was added from a locked flake reference so it can't be upgraded!", element.identifier()); @@ -740,7 +740,8 @@ struct CmdProfileUpgrade : virtual SourceExprCommand, MixDefaultProfile, MixProf assert(infop); auto & info = *infop; - if (info.flake.lockedRef.input.isLocked() && element.source->lockedRef == info.flake.lockedRef) + if (info.flake.lockedRef.input.isLocked(getEvalState()->fetchSettings) + && element.source->lockedRef == info.flake.lockedRef) continue; printInfo( diff --git a/src/nix/registry.cc b/src/nix/registry.cc index d9fcf09fc..7c6f80896 100644 --- a/src/nix/registry.cc +++ b/src/nix/registry.cc @@ -190,8 +190,9 @@ struct CmdRegistryPin : RegistryCommand, EvalCommand auto ref = parseFlakeRef(fetchSettings, url); auto lockedRef = parseFlakeRef(fetchSettings, locked); registry->remove(ref.input); - auto resolved = lockedRef.resolve(store).input.getAccessor(store).second; - if (!resolved.isLocked()) + auto resolvedInput = lockedRef.resolve(fetchSettings, store).input; + auto resolved = resolvedInput.getAccessor(fetchSettings, store).second; + if (!resolved.isLocked(fetchSettings)) warn("flake '%s' is not locked", resolved.to_string()); fetchers::Attrs extraAttrs; if (ref.subdir != "") From 1b5af49fd0e3bc9c047054bd95c1c749d10b8d7c Mon Sep 17 00:00:00 2001 From: David McFarland Date: Wed, 12 Nov 2025 19:52:29 -0400 Subject: [PATCH 209/213] Remove static data from headers We don't want to duplicate any of these across libraries, which is what happens when the platform doesn't support unique symbols. --- src/libcmd/command.cc | 13 +++++++++++++ src/libcmd/include/nix/cmd/command.hh | 6 +----- src/libcmd/include/nix/cmd/legacy.hh | 6 +----- src/libexpr/include/nix/expr/primops.hh | 6 +----- src/libexpr/include/nix/expr/print-options.hh | 2 +- src/libexpr/primops.cc | 6 ++++++ src/libstore/builtins/buildenv.cc | 6 ++++++ src/libstore/include/nix/store/builtins.hh | 6 +----- src/libutil/config-global.cc | 6 ++++++ src/libutil/include/nix/util/config-global.hh | 6 +----- 10 files changed, 37 insertions(+), 26 deletions(-) diff --git a/src/libcmd/command.cc b/src/libcmd/command.cc index 6b6bbe345..b06d40902 100644 --- a/src/libcmd/command.cc +++ b/src/libcmd/command.cc @@ -2,6 +2,7 @@ #include #include "nix/cmd/command.hh" +#include "nix/cmd/legacy.hh" #include "nix/cmd/markdown.hh" #include "nix/store/store-open.hh" #include "nix/store/local-fs-store.hh" @@ -14,6 +15,18 @@ namespace nix { +RegisterCommand::Commands & RegisterCommand::commands() +{ + static RegisterCommand::Commands commands; + return commands; +} + +RegisterLegacyCommand::Commands & RegisterLegacyCommand::commands() +{ + static RegisterLegacyCommand::Commands commands; + return commands; +} + nix::Commands RegisterCommand::getCommandsFor(const std::vector & prefix) { nix::Commands res; diff --git a/src/libcmd/include/nix/cmd/command.hh b/src/libcmd/include/nix/cmd/command.hh index 2bff11dc1..2f97b30da 100644 --- a/src/libcmd/include/nix/cmd/command.hh +++ b/src/libcmd/include/nix/cmd/command.hh @@ -286,11 +286,7 @@ struct RegisterCommand { typedef std::map, std::function()>> Commands; - static Commands & commands() - { - static Commands commands; - return commands; - } + static Commands & commands(); RegisterCommand(std::vector && name, std::function()> command) { diff --git a/src/libcmd/include/nix/cmd/legacy.hh b/src/libcmd/include/nix/cmd/legacy.hh index 546057184..d408cde7a 100644 --- a/src/libcmd/include/nix/cmd/legacy.hh +++ b/src/libcmd/include/nix/cmd/legacy.hh @@ -13,11 +13,7 @@ struct RegisterLegacyCommand { typedef std::map Commands; - static Commands & commands() - { - static Commands commands; - return commands; - } + static Commands & commands(); RegisterLegacyCommand(const std::string & name, MainFunction fun) { diff --git a/src/libexpr/include/nix/expr/primops.hh b/src/libexpr/include/nix/expr/primops.hh index 6407ba84e..8854f6b03 100644 --- a/src/libexpr/include/nix/expr/primops.hh +++ b/src/libexpr/include/nix/expr/primops.hh @@ -12,11 +12,7 @@ struct RegisterPrimOp { typedef std::vector PrimOps; - static PrimOps & primOps() - { - static PrimOps primOps; - return primOps; - } + static PrimOps & primOps(); /** * You can register a constant by passing an arity of 0. fun diff --git a/src/libexpr/include/nix/expr/print-options.hh b/src/libexpr/include/nix/expr/print-options.hh index ffb80abc3..600b96ba2 100644 --- a/src/libexpr/include/nix/expr/print-options.hh +++ b/src/libexpr/include/nix/expr/print-options.hh @@ -110,7 +110,7 @@ struct PrintOptions * `PrintOptions` for unknown and therefore potentially large values in error messages, * to avoid printing "too much" output. */ -static PrintOptions errorPrintOptions = PrintOptions{ +static constexpr PrintOptions errorPrintOptions = PrintOptions{ .ansiColors = true, .maxDepth = 10, .maxAttrs = 10, diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index 98ed1b450..35f16a68d 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -40,6 +40,12 @@ namespace nix { +RegisterPrimOp::PrimOps & RegisterPrimOp::primOps() +{ + static RegisterPrimOp::PrimOps primOps; + return primOps; +} + /************************************************************* * Miscellaneous *************************************************************/ diff --git a/src/libstore/builtins/buildenv.cc b/src/libstore/builtins/buildenv.cc index 22ed8d807..4db37d43a 100644 --- a/src/libstore/builtins/buildenv.cc +++ b/src/libstore/builtins/buildenv.cc @@ -10,6 +10,12 @@ namespace nix { +RegisterBuiltinBuilder::BuiltinBuilders & RegisterBuiltinBuilder::builtinBuilders() +{ + static RegisterBuiltinBuilder::BuiltinBuilders builders; + return builders; +} + namespace { struct State diff --git a/src/libstore/include/nix/store/builtins.hh b/src/libstore/include/nix/store/builtins.hh index 6925e61c1..7cc9c0911 100644 --- a/src/libstore/include/nix/store/builtins.hh +++ b/src/libstore/include/nix/store/builtins.hh @@ -33,11 +33,7 @@ struct RegisterBuiltinBuilder { typedef std::map BuiltinBuilders; - static BuiltinBuilders & builtinBuilders() - { - static BuiltinBuilders builders; - return builders; - } + static BuiltinBuilders & builtinBuilders(); RegisterBuiltinBuilder(const std::string & name, BuiltinBuilder && fun) { diff --git a/src/libutil/config-global.cc b/src/libutil/config-global.cc index cd461ea48..b63b4aaa1 100644 --- a/src/libutil/config-global.cc +++ b/src/libutil/config-global.cc @@ -4,6 +4,12 @@ namespace nix { +GlobalConfig::ConfigRegistrations & GlobalConfig::configRegistrations() +{ + static GlobalConfig::ConfigRegistrations configRegistrations; + return configRegistrations; +} + bool GlobalConfig::set(const std::string & name, const std::string & value) { for (auto & config : configRegistrations()) diff --git a/src/libutil/include/nix/util/config-global.hh b/src/libutil/include/nix/util/config-global.hh index 0e6f43ec4..5074351e0 100644 --- a/src/libutil/include/nix/util/config-global.hh +++ b/src/libutil/include/nix/util/config-global.hh @@ -9,11 +9,7 @@ struct GlobalConfig : public AbstractConfig { typedef std::vector ConfigRegistrations; - static ConfigRegistrations & configRegistrations() - { - static ConfigRegistrations configRegistrations; - return configRegistrations; - } + static ConfigRegistrations & configRegistrations(); bool set(const std::string & name, const std::string & value) override; From 91cdd887140e3e07c230f24049c30b885c15bfb2 Mon Sep 17 00:00:00 2001 From: Arnout Engelen Date: Thu, 13 Nov 2025 13:04:12 +0100 Subject: [PATCH 210/213] docs: avoid secrets in the nix store I think this is noncontroversial / common knowledge, but I didn't see it described anywhere authoratively yet. --- doc/manual/source/SUMMARY.md.in | 1 + doc/manual/source/store/secrets.md | 20 ++++++++++++++++++++ 2 files changed, 21 insertions(+) create mode 100644 doc/manual/source/store/secrets.md diff --git a/doc/manual/source/SUMMARY.md.in b/doc/manual/source/SUMMARY.md.in index 5be3d6a90..bf2dd9481 100644 --- a/doc/manual/source/SUMMARY.md.in +++ b/doc/manual/source/SUMMARY.md.in @@ -29,6 +29,7 @@ - [Build Trace](store/build-trace.md) - [Derivation Resolution](store/resolution.md) - [Building](store/building.md) + - [Secrets](store/secrets.md) - [Store Types](store/types/index.md) {{#include ./store/types/SUMMARY.md}} - [Appendix: Math notation](store/math-notation.md) diff --git a/doc/manual/source/store/secrets.md b/doc/manual/source/store/secrets.md new file mode 100644 index 000000000..dc15989a7 --- /dev/null +++ b/doc/manual/source/store/secrets.md @@ -0,0 +1,20 @@ +# Secrets + +The store is readable to all users on the system. For this reason, it +is generally discouraged to allow secrets to make it into the store. + +Even on a single-user system, separate system users isolate services +from each other and having secrets that all local users can read +weakens that isolation. When using external store caches the secrets +may end up there, and on multi-user systems the secrets will be +available to all those users. + +Organize your derivations so that secrets are read from the filesystem +(with appropriate access controls) at run time. Place the secrets on +the filesystem manually or use a scheme that includes the secret in +the store in encrypted form, and decrypts it adding the relevant +access control on system activation. +Several such schemes for NixOS can in the +[comparison of secret managing schemes] on the wiki. + +[comparison of secret managing schemes]: https://wiki.nixos.org/wiki/Comparison_of_secret_managing_schemes From e95503cf9abab2fc724bf0607256bd0f1efd6b26 Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Thu, 13 Nov 2025 23:54:14 +0300 Subject: [PATCH 211/213] libutil: Make PosixSourceAccessor update mtime only when needed Typically PosixSourceAccessor can be used from multiple threads, but mtime is not updated atomically (i.e. with compare_exchange_weak), so mtime gets raced. It's only needed in dumpPathAndGetMtime and mtime tracking can be gated behind that. Also start using getLastModified interface instead of dynamic casts. --- src/libutil/archive.cc | 4 ++-- .../include/nix/util/posix-source-accessor.hh | 18 +++++++++++++++--- src/libutil/posix-source-accessor.cc | 16 ++++++++++------ 3 files changed, 27 insertions(+), 11 deletions(-) diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index 737d9b2fe..3b5b610db 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -103,9 +103,9 @@ void SourceAccessor::dumpPath(const CanonPath & path, Sink & sink, PathFilter & time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter) { - auto path2 = PosixSourceAccessor::createAtRoot(path); + auto path2 = PosixSourceAccessor::createAtRoot(path, /*trackLastModified=*/true); path2.dumpPath(sink, filter); - return path2.accessor.dynamic_pointer_cast()->mtime; + return path2.accessor->getLastModified().value(); } void dumpPath(const Path & path, Sink & sink, PathFilter & filter) diff --git a/src/libutil/include/nix/util/posix-source-accessor.hh b/src/libutil/include/nix/util/posix-source-accessor.hh index 895e2e1c1..29561a3da 100644 --- a/src/libutil/include/nix/util/posix-source-accessor.hh +++ b/src/libutil/include/nix/util/posix-source-accessor.hh @@ -9,7 +9,7 @@ struct SourcePath; /** * A source accessor that uses the Unix filesystem. */ -struct PosixSourceAccessor : virtual SourceAccessor +class PosixSourceAccessor : virtual public SourceAccessor { /** * Optional root path to prefix all operations into the native file @@ -18,8 +18,12 @@ struct PosixSourceAccessor : virtual SourceAccessor */ const std::filesystem::path root; + const bool trackLastModified = false; + +public: + PosixSourceAccessor(); - PosixSourceAccessor(std::filesystem::path && root); + PosixSourceAccessor(std::filesystem::path && root, bool trackLastModified = false); /** * The most recent mtime seen by lstat(). This is a hack to @@ -43,6 +47,9 @@ struct PosixSourceAccessor : virtual SourceAccessor * Create a `PosixSourceAccessor` and `SourcePath` corresponding to * some native path. * + * @param Whether the accessor should return a non-null getLastModified. + * When true the accessor must be used only by a single thread. + * * The `PosixSourceAccessor` is rooted as far up the tree as * possible, (e.g. on Windows it could scoped to a drive like * `C:\`). This allows more `..` parent accessing to work. @@ -64,7 +71,12 @@ struct PosixSourceAccessor : virtual SourceAccessor * and * [`std::filesystem::path::relative_path`](https://en.cppreference.com/w/cpp/filesystem/path/relative_path). */ - static SourcePath createAtRoot(const std::filesystem::path & path); + static SourcePath createAtRoot(const std::filesystem::path & path, bool trackLastModified = false); + + std::optional getLastModified() override + { + return trackLastModified ? std::optional{mtime} : std::nullopt; + } private: diff --git a/src/libutil/posix-source-accessor.cc b/src/libutil/posix-source-accessor.cc index fe3bcb1c1..abbab45db 100644 --- a/src/libutil/posix-source-accessor.cc +++ b/src/libutil/posix-source-accessor.cc @@ -7,8 +7,9 @@ namespace nix { -PosixSourceAccessor::PosixSourceAccessor(std::filesystem::path && argRoot) +PosixSourceAccessor::PosixSourceAccessor(std::filesystem::path && argRoot, bool trackLastModified) : root(std::move(argRoot)) + , trackLastModified(trackLastModified) { assert(root.empty() || root.is_absolute()); displayPrefix = root.string(); @@ -19,11 +20,11 @@ PosixSourceAccessor::PosixSourceAccessor() { } -SourcePath PosixSourceAccessor::createAtRoot(const std::filesystem::path & path) +SourcePath PosixSourceAccessor::createAtRoot(const std::filesystem::path & path, bool trackLastModified) { std::filesystem::path path2 = absPath(path); return { - make_ref(path2.root_path()), + make_ref(path2.root_path(), trackLastModified), CanonPath{path2.relative_path().string()}, }; } @@ -114,9 +115,12 @@ std::optional PosixSourceAccessor::maybeLstat(const CanonP auto st = cachedLstat(path); if (!st) return std::nullopt; - // This makes the accessor thread-unsafe, but we only seem to use the actual value in a single threaded context in - // `src/libfetchers/path.cc`. - mtime = std::max(mtime, st->st_mtime); + + /* The contract is that trackLastModified implies that the caller uses the accessor + from a single thread. Thus this is not a CAS loop. */ + if (trackLastModified) + mtime = std::max(mtime, st->st_mtime); + return Stat{ .type = S_ISREG(st->st_mode) ? tRegular : S_ISDIR(st->st_mode) ? tDirectory From 19ab65c9d715b07bdbe4df8ce99e110ef5f1c9ce Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 14 Nov 2025 04:18:53 +0300 Subject: [PATCH 212/213] libstore: Remove dead PosixSourceAccessor variable in verifyStore --- src/libstore/local-store.cc | 1 - 1 file changed, 1 deletion(-) diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc index 2c4d546f8..1f945ecf6 100644 --- a/src/libstore/local-store.cc +++ b/src/libstore/local-store.cc @@ -1385,7 +1385,6 @@ bool LocalStore::verifyStore(bool checkContents, RepairFlag repair) checkInterrupt(); auto name = link.path().filename(); printMsg(lvlTalkative, "checking contents of %s", name); - PosixSourceAccessor accessor; std::string hash = hashPath( PosixSourceAccessor::createAtRoot(link.path()), FileIngestionMethod::NixArchive, From 0e81a358816ffdb4f9de79826e79d2b62db5a3de Mon Sep 17 00:00:00 2001 From: Sergei Zimmerman Date: Fri, 14 Nov 2025 22:45:20 +0300 Subject: [PATCH 213/213] libutil: Make CanonPath a proper range This was we can use std::ranges algorithms on it. Requires making the iterator a proper forward iterator type as well. --- src/libutil/include/nix/util/canon-path.hh | 68 +++++++++++++++++++--- 1 file changed, 59 insertions(+), 9 deletions(-) diff --git a/src/libutil/include/nix/util/canon-path.hh b/src/libutil/include/nix/util/canon-path.hh index b9b2fff25..2156b02fc 100644 --- a/src/libutil/include/nix/util/canon-path.hh +++ b/src/libutil/include/nix/util/canon-path.hh @@ -8,6 +8,7 @@ #include #include #include +#include #include @@ -122,33 +123,70 @@ public: return &cs[1]; } - struct Iterator + class Iterator { + /** + * Helper class with overloaded operator-> for "drill-down" behavior. + * This was a "temporary" string_view doesn't have to be stored anywhere. + */ + class PointerProxy + { + std::string_view segment; + + public: + PointerProxy(std::string_view segment_) + : segment(segment_) + { + } + + const std::string_view * operator->() const + { + return &segment; + } + }; + + public: + using value_type = std::string_view; + using reference_type = const std::string_view; + using pointer_type = PointerProxy; + using difference_type = std::ptrdiff_t; + using iterator_category = std::forward_iterator_tag; + std::string_view remaining; size_t slash; + /** + * Dummy default constructor required for forward iterators. Doesn't return + * a usable iterator. + */ + Iterator() + : remaining() + , slash(0) + { + } + Iterator(std::string_view remaining) : remaining(remaining) , slash(remaining.find('/')) { } - bool operator!=(const Iterator & x) const - { - return remaining.data() != x.remaining.data(); - } - bool operator==(const Iterator & x) const { - return !(*this != x); + return remaining.data() == x.remaining.data(); } - const std::string_view operator*() const + reference_type operator*() const { return remaining.substr(0, slash); } - void operator++() + pointer_type operator->() const + { + return PointerProxy(**this); + } + + Iterator & operator++() { if (slash == remaining.npos) remaining = remaining.substr(remaining.size()); @@ -156,9 +194,19 @@ public: remaining = remaining.substr(slash + 1); slash = remaining.find('/'); } + return *this; + } + + Iterator operator++(int) + { + auto tmp = *this; + ++*this; + return tmp; } }; + static_assert(std::forward_iterator); + Iterator begin() const { return Iterator(rel()); @@ -265,6 +313,8 @@ public: friend std::size_t hash_value(const CanonPath &); }; +static_assert(std::ranges::forward_range); + std::ostream & operator<<(std::ostream & stream, const CanonPath & path); inline std::size_t hash_value(const CanonPath & path)