forked from lix-project/lix
Merge pull request #9242 from obsidiansystems/path-info-map
Make `nix path-info --json` return an object not array
This commit is contained in:
commit
06d0d51895
|
@ -28,5 +28,39 @@
|
|||
|
||||
- The flake-specific flags `--recreate-lock-file` and `--update-input` have been removed from all commands operating on installables.
|
||||
They are superceded by `nix flake update`.
|
||||
|
||||
|
||||
- Commit signature verification for the [`builtins.fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit) is added as the new [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||
|
||||
- [`nix path-info --json`](@docroot@/command-ref/new-cli/nix3-path-info.md)
|
||||
(experimental) now returns a JSON map rather than JSON list.
|
||||
The `path` field of each object has instead become the key in th outer map, since it is unique.
|
||||
The `valid` field also goes away because we just use null instead.
|
||||
|
||||
- Old way:
|
||||
|
||||
```json5
|
||||
[
|
||||
{
|
||||
"path": "/nix/store/8fv91097mbh5049i9rglc73dx6kjg3qk-bash-5.2-p15",
|
||||
"valid": true,
|
||||
// ...
|
||||
},
|
||||
{
|
||||
"path": "/nix/store/wffw7l0alvs3iw94cbgi1gmmbmw99sqb-home-manager-path",
|
||||
"valid": false
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
- New way
|
||||
|
||||
```json5
|
||||
{
|
||||
"/nix/store/8fv91097mbh5049i9rglc73dx6kjg3qk-bash-5.2-p15": {
|
||||
// ...
|
||||
},
|
||||
"/nix/store/wffw7l0alvs3iw94cbgi1gmmbmw99sqb-home-manager-path": null,
|
||||
}
|
||||
```
|
||||
|
||||
This makes it match `nix derivation show`, which also maps store paths to information.
|
||||
|
|
|
@ -4,6 +4,15 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
GENERATE_CMP_EXT(
|
||||
,
|
||||
NarInfo,
|
||||
me->url,
|
||||
me->compression,
|
||||
me->fileHash,
|
||||
me->fileSize,
|
||||
static_cast<const ValidPathInfo &>(*me));
|
||||
|
||||
NarInfo::NarInfo(const Store & store, const std::string & s, const std::string & whence)
|
||||
: ValidPathInfo(StorePath(StorePath::dummy), Hash(Hash::dummy)) // FIXME: hack
|
||||
{
|
||||
|
@ -125,4 +134,59 @@ std::string NarInfo::to_string(const Store & store) const
|
|||
return res;
|
||||
}
|
||||
|
||||
nlohmann::json NarInfo::toJSON(
|
||||
const Store & store,
|
||||
bool includeImpureInfo,
|
||||
HashFormat hashFormat) const
|
||||
{
|
||||
using nlohmann::json;
|
||||
|
||||
auto jsonObject = ValidPathInfo::toJSON(store, includeImpureInfo, hashFormat);
|
||||
|
||||
if (includeImpureInfo) {
|
||||
if (!url.empty())
|
||||
jsonObject["url"] = url;
|
||||
if (!compression.empty())
|
||||
jsonObject["compression"] = compression;
|
||||
if (fileHash)
|
||||
jsonObject["downloadHash"] = fileHash->to_string(hashFormat, true);
|
||||
if (fileSize)
|
||||
jsonObject["downloadSize"] = fileSize;
|
||||
}
|
||||
|
||||
return jsonObject;
|
||||
}
|
||||
|
||||
NarInfo NarInfo::fromJSON(
|
||||
const Store & store,
|
||||
const StorePath & path,
|
||||
const nlohmann::json & json)
|
||||
{
|
||||
using nlohmann::detail::value_t;
|
||||
|
||||
NarInfo res {
|
||||
ValidPathInfo {
|
||||
path,
|
||||
UnkeyedValidPathInfo::fromJSON(store, json),
|
||||
}
|
||||
};
|
||||
|
||||
if (json.contains("url"))
|
||||
res.url = ensureType(valueAt(json, "url"), value_t::string);
|
||||
|
||||
if (json.contains("compression"))
|
||||
res.compression = ensureType(valueAt(json, "compression"), value_t::string);
|
||||
|
||||
if (json.contains("downloadHash"))
|
||||
res.fileHash = Hash::parseAny(
|
||||
static_cast<const std::string &>(
|
||||
ensureType(valueAt(json, "downloadHash"), value_t::string)),
|
||||
std::nullopt);
|
||||
|
||||
if (json.contains("downloadSize"))
|
||||
res.fileSize = ensureType(valueAt(json, "downloadSize"), value_t::number_integer);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -24,7 +24,18 @@ struct NarInfo : ValidPathInfo
|
|||
NarInfo(const ValidPathInfo & info) : ValidPathInfo(info) { }
|
||||
NarInfo(const Store & store, const std::string & s, const std::string & whence);
|
||||
|
||||
DECLARE_CMP(NarInfo);
|
||||
|
||||
std::string to_string(const Store & store) const;
|
||||
|
||||
nlohmann::json toJSON(
|
||||
const Store & store,
|
||||
bool includeImpureInfo,
|
||||
HashFormat hashFormat) const override;
|
||||
static NarInfo fromJSON(
|
||||
const Store & store,
|
||||
const StorePath & path,
|
||||
const nlohmann::json & json);
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -132,6 +132,41 @@ bool ParsedDerivation::useUidRange() const
|
|||
|
||||
static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*");
|
||||
|
||||
/**
|
||||
* Write a JSON representation of store object metadata, such as the
|
||||
* hash and the references.
|
||||
*/
|
||||
static nlohmann::json pathInfoToJSON(
|
||||
Store & store,
|
||||
const StorePathSet & storePaths)
|
||||
{
|
||||
nlohmann::json::array_t jsonList = nlohmann::json::array();
|
||||
|
||||
for (auto & storePath : storePaths) {
|
||||
auto info = store.queryPathInfo(storePath);
|
||||
|
||||
auto & jsonPath = jsonList.emplace_back(
|
||||
info->toJSON(store, false, HashFormat::Base32));
|
||||
|
||||
// Add the path to the object whose metadata we are including.
|
||||
jsonPath["path"] = store.printStorePath(storePath);
|
||||
|
||||
jsonPath["valid"] = true;
|
||||
|
||||
jsonPath["closureSize"] = ({
|
||||
uint64_t totalNarSize = 0;
|
||||
StorePathSet closure;
|
||||
store.computeFSClosure(info->path, closure, false, false);
|
||||
for (auto & p : closure) {
|
||||
auto info = store.queryPathInfo(p);
|
||||
totalNarSize += info->narSize;
|
||||
}
|
||||
totalNarSize;
|
||||
});
|
||||
}
|
||||
return jsonList;
|
||||
}
|
||||
|
||||
std::optional<nlohmann::json> ParsedDerivation::prepareStructuredAttrs(Store & store, const StorePathSet & inputPaths)
|
||||
{
|
||||
auto structuredAttrs = getStructuredAttrs();
|
||||
|
@ -152,8 +187,8 @@ std::optional<nlohmann::json> ParsedDerivation::prepareStructuredAttrs(Store & s
|
|||
StorePathSet storePaths;
|
||||
for (auto & p : *i)
|
||||
storePaths.insert(store.parseStorePath(p.get<std::string>()));
|
||||
json[i.key()] = store.pathInfoToJSON(
|
||||
store.exportReferences(storePaths, inputPaths), false, true);
|
||||
json[i.key()] = pathInfoToJSON(store,
|
||||
store.exportReferences(storePaths, inputPaths));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
#include <nlohmann/json.hpp>
|
||||
|
||||
#include "path-info.hh"
|
||||
#include "store-api.hh"
|
||||
#include "json-utils.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
@ -144,4 +147,94 @@ ValidPathInfo::ValidPathInfo(
|
|||
}, std::move(ca).raw);
|
||||
}
|
||||
|
||||
|
||||
nlohmann::json UnkeyedValidPathInfo::toJSON(
|
||||
const Store & store,
|
||||
bool includeImpureInfo,
|
||||
HashFormat hashFormat) const
|
||||
{
|
||||
using nlohmann::json;
|
||||
|
||||
auto jsonObject = json::object();
|
||||
|
||||
jsonObject["narHash"] = narHash.to_string(hashFormat, true);
|
||||
jsonObject["narSize"] = narSize;
|
||||
|
||||
{
|
||||
auto& jsonRefs = (jsonObject["references"] = json::array());
|
||||
for (auto & ref : references)
|
||||
jsonRefs.emplace_back(store.printStorePath(ref));
|
||||
}
|
||||
|
||||
if (ca)
|
||||
jsonObject["ca"] = renderContentAddress(ca);
|
||||
|
||||
if (includeImpureInfo) {
|
||||
if (deriver)
|
||||
jsonObject["deriver"] = store.printStorePath(*deriver);
|
||||
|
||||
if (registrationTime)
|
||||
jsonObject["registrationTime"] = registrationTime;
|
||||
|
||||
if (ultimate)
|
||||
jsonObject["ultimate"] = ultimate;
|
||||
|
||||
if (!sigs.empty()) {
|
||||
for (auto & sig : sigs)
|
||||
jsonObject["signatures"].push_back(sig);
|
||||
}
|
||||
}
|
||||
|
||||
return jsonObject;
|
||||
}
|
||||
|
||||
UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(
|
||||
const Store & store,
|
||||
const nlohmann::json & json)
|
||||
{
|
||||
using nlohmann::detail::value_t;
|
||||
|
||||
UnkeyedValidPathInfo res {
|
||||
Hash(Hash::dummy),
|
||||
};
|
||||
|
||||
ensureType(json, value_t::object);
|
||||
res.narHash = Hash::parseAny(
|
||||
static_cast<const std::string &>(
|
||||
ensureType(valueAt(json, "narHash"), value_t::string)),
|
||||
std::nullopt);
|
||||
res.narSize = ensureType(valueAt(json, "narSize"), value_t::number_integer);
|
||||
|
||||
try {
|
||||
auto & references = ensureType(valueAt(json, "references"), value_t::array);
|
||||
for (auto & input : references)
|
||||
res.references.insert(store.parseStorePath(static_cast<const std::string &>
|
||||
(input)));
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "while reading key 'references'");
|
||||
throw;
|
||||
}
|
||||
|
||||
if (json.contains("ca"))
|
||||
res.ca = ContentAddress::parse(
|
||||
static_cast<const std::string &>(
|
||||
ensureType(valueAt(json, "ca"), value_t::string)));
|
||||
|
||||
if (json.contains("deriver"))
|
||||
res.deriver = store.parseStorePath(
|
||||
static_cast<const std::string &>(
|
||||
ensureType(valueAt(json, "deriver"), value_t::string)));
|
||||
|
||||
if (json.contains("registrationTime"))
|
||||
res.registrationTime = ensureType(valueAt(json, "registrationTime"), value_t::number_integer);
|
||||
|
||||
if (json.contains("ultimate"))
|
||||
res.ultimate = ensureType(valueAt(json, "ultimate"), value_t::boolean);
|
||||
|
||||
if (json.contains("signatures"))
|
||||
res.sigs = valueAt(json, "signatures");
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -78,6 +78,18 @@ struct UnkeyedValidPathInfo
|
|||
DECLARE_CMP(UnkeyedValidPathInfo);
|
||||
|
||||
virtual ~UnkeyedValidPathInfo() { }
|
||||
|
||||
/**
|
||||
* @param includeImpureInfo If true, variable elements such as the
|
||||
* registration time are included.
|
||||
*/
|
||||
virtual nlohmann::json toJSON(
|
||||
const Store & store,
|
||||
bool includeImpureInfo,
|
||||
HashFormat hashFormat) const;
|
||||
static UnkeyedValidPathInfo fromJSON(
|
||||
const Store & store,
|
||||
const nlohmann::json & json);
|
||||
};
|
||||
|
||||
struct ValidPathInfo : UnkeyedValidPathInfo {
|
||||
|
|
|
@ -951,96 +951,6 @@ StorePathSet Store::exportReferences(const StorePathSet & storePaths, const Stor
|
|||
return paths;
|
||||
}
|
||||
|
||||
json Store::pathInfoToJSON(const StorePathSet & storePaths,
|
||||
bool includeImpureInfo, bool showClosureSize,
|
||||
HashFormat hashFormat,
|
||||
AllowInvalidFlag allowInvalid)
|
||||
{
|
||||
json::array_t jsonList = json::array();
|
||||
|
||||
for (auto & storePath : storePaths) {
|
||||
auto& jsonPath = jsonList.emplace_back(json::object());
|
||||
|
||||
try {
|
||||
auto info = queryPathInfo(storePath);
|
||||
|
||||
jsonPath["path"] = printStorePath(info->path);
|
||||
jsonPath["valid"] = true;
|
||||
jsonPath["narHash"] = info->narHash.to_string(hashFormat, true);
|
||||
jsonPath["narSize"] = info->narSize;
|
||||
|
||||
{
|
||||
auto& jsonRefs = (jsonPath["references"] = json::array());
|
||||
for (auto & ref : info->references)
|
||||
jsonRefs.emplace_back(printStorePath(ref));
|
||||
}
|
||||
|
||||
if (info->ca)
|
||||
jsonPath["ca"] = renderContentAddress(info->ca);
|
||||
|
||||
std::pair<uint64_t, uint64_t> closureSizes;
|
||||
|
||||
if (showClosureSize) {
|
||||
closureSizes = getClosureSize(info->path);
|
||||
jsonPath["closureSize"] = closureSizes.first;
|
||||
}
|
||||
|
||||
if (includeImpureInfo) {
|
||||
|
||||
if (info->deriver)
|
||||
jsonPath["deriver"] = printStorePath(*info->deriver);
|
||||
|
||||
if (info->registrationTime)
|
||||
jsonPath["registrationTime"] = info->registrationTime;
|
||||
|
||||
if (info->ultimate)
|
||||
jsonPath["ultimate"] = info->ultimate;
|
||||
|
||||
if (!info->sigs.empty()) {
|
||||
for (auto & sig : info->sigs)
|
||||
jsonPath["signatures"].push_back(sig);
|
||||
}
|
||||
|
||||
auto narInfo = std::dynamic_pointer_cast<const NarInfo>(
|
||||
std::shared_ptr<const ValidPathInfo>(info));
|
||||
|
||||
if (narInfo) {
|
||||
if (!narInfo->url.empty())
|
||||
jsonPath["url"] = narInfo->url;
|
||||
if (narInfo->fileHash)
|
||||
jsonPath["downloadHash"] = narInfo->fileHash->to_string(hashFormat, true);
|
||||
if (narInfo->fileSize)
|
||||
jsonPath["downloadSize"] = narInfo->fileSize;
|
||||
if (showClosureSize)
|
||||
jsonPath["closureDownloadSize"] = closureSizes.second;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (InvalidPath &) {
|
||||
jsonPath["path"] = printStorePath(storePath);
|
||||
jsonPath["valid"] = false;
|
||||
}
|
||||
}
|
||||
return jsonList;
|
||||
}
|
||||
|
||||
|
||||
std::pair<uint64_t, uint64_t> Store::getClosureSize(const StorePath & storePath)
|
||||
{
|
||||
uint64_t totalNarSize = 0, totalDownloadSize = 0;
|
||||
StorePathSet closure;
|
||||
computeFSClosure(storePath, closure, false, false);
|
||||
for (auto & p : closure) {
|
||||
auto info = queryPathInfo(p);
|
||||
totalNarSize += info->narSize;
|
||||
auto narInfo = std::dynamic_pointer_cast<const NarInfo>(
|
||||
std::shared_ptr<const ValidPathInfo>(info));
|
||||
if (narInfo)
|
||||
totalDownloadSize += narInfo->fileSize;
|
||||
}
|
||||
return {totalNarSize, totalDownloadSize};
|
||||
}
|
||||
|
||||
|
||||
const Store::Stats & Store::getStats()
|
||||
{
|
||||
|
|
|
@ -80,7 +80,6 @@ typedef std::map<std::string, StorePath> OutputPathMap;
|
|||
|
||||
enum CheckSigsFlag : bool { NoCheckSigs = false, CheckSigs = true };
|
||||
enum SubstituteFlag : bool { NoSubstitute = false, Substitute = true };
|
||||
enum AllowInvalidFlag : bool { DisallowInvalid = false, AllowInvalid = true };
|
||||
|
||||
/**
|
||||
* Magic header of exportPath() output (obsolete).
|
||||
|
@ -665,28 +664,6 @@ public:
|
|||
std::string makeValidityRegistration(const StorePathSet & paths,
|
||||
bool showDerivers, bool showHash);
|
||||
|
||||
/**
|
||||
* Write a JSON representation of store path metadata, such as the
|
||||
* hash and the references.
|
||||
*
|
||||
* @param includeImpureInfo If true, variable elements such as the
|
||||
* registration time are included.
|
||||
*
|
||||
* @param showClosureSize If true, the closure size of each path is
|
||||
* included.
|
||||
*/
|
||||
nlohmann::json pathInfoToJSON(const StorePathSet & storePaths,
|
||||
bool includeImpureInfo, bool showClosureSize,
|
||||
HashFormat hashFormat = HashFormat::Base32,
|
||||
AllowInvalidFlag allowInvalid = DisallowInvalid);
|
||||
|
||||
/**
|
||||
* @return the size of the closure of the specified path, that is,
|
||||
* the sum of the size of the NAR serialisation of each path in the
|
||||
* closure.
|
||||
*/
|
||||
std::pair<uint64_t, uint64_t> getClosureSize(const StorePath & storePath);
|
||||
|
||||
/**
|
||||
* Optimise the disk space usage of the Nix store by hard-linking files
|
||||
* with the same contents.
|
||||
|
|
85
src/libstore/tests/nar-info.cc
Normal file
85
src/libstore/tests/nar-info.cc
Normal file
|
@ -0,0 +1,85 @@
|
|||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "path-info.hh"
|
||||
|
||||
#include "tests/characterization.hh"
|
||||
#include "tests/libstore.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
using nlohmann::json;
|
||||
|
||||
class NarInfoTest : public CharacterizationTest, public LibStoreTest
|
||||
{
|
||||
Path unitTestData = getUnitTestData() + "/libstore/nar-info";
|
||||
|
||||
Path goldenMaster(PathView testStem) const override {
|
||||
return unitTestData + "/" + testStem + ".json";
|
||||
}
|
||||
};
|
||||
|
||||
static NarInfo makeNarInfo(const Store & store, bool includeImpureInfo) {
|
||||
NarInfo info = ValidPathInfo {
|
||||
store,
|
||||
"foo",
|
||||
FixedOutputInfo {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = hashString(HashType::htSHA256, "(...)"),
|
||||
|
||||
.references = {
|
||||
.others = {
|
||||
StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
},
|
||||
},
|
||||
.self = true,
|
||||
},
|
||||
},
|
||||
Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
};
|
||||
info.narSize = 34878;
|
||||
if (includeImpureInfo) {
|
||||
info.deriver = StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
};
|
||||
info.registrationTime = 23423;
|
||||
info.ultimate = true;
|
||||
info.sigs = { "asdf", "qwer" };
|
||||
|
||||
info.url = "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz";
|
||||
info.compression = "xz";
|
||||
info.fileHash = Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=");
|
||||
info.fileSize = 4029176;
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
#define JSON_TEST(STEM, PURE) \
|
||||
TEST_F(NarInfoTest, NarInfo_ ## STEM ## _from_json) { \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
auto expected = makeNarInfo(*store, PURE); \
|
||||
NarInfo got = NarInfo::fromJSON( \
|
||||
*store, \
|
||||
expected.path, \
|
||||
encoded); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(NarInfoTest, NarInfo_ ## STEM ## _to_json) { \
|
||||
writeTest(#STEM, [&]() -> json { \
|
||||
return makeNarInfo(*store, PURE) \
|
||||
.toJSON(*store, PURE, HashFormat::SRI); \
|
||||
}, [](const auto & file) { \
|
||||
return json::parse(readFile(file)); \
|
||||
}, [](const auto & file, const auto & got) { \
|
||||
return writeFile(file, got.dump(2) + "\n"); \
|
||||
}); \
|
||||
}
|
||||
|
||||
JSON_TEST(pure, false)
|
||||
JSON_TEST(impure, true)
|
||||
|
||||
}
|
79
src/libstore/tests/path-info.cc
Normal file
79
src/libstore/tests/path-info.cc
Normal file
|
@ -0,0 +1,79 @@
|
|||
#include <nlohmann/json.hpp>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include "path-info.hh"
|
||||
|
||||
#include "tests/characterization.hh"
|
||||
#include "tests/libstore.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
using nlohmann::json;
|
||||
|
||||
class PathInfoTest : public CharacterizationTest, public LibStoreTest
|
||||
{
|
||||
Path unitTestData = getUnitTestData() + "/libstore/path-info";
|
||||
|
||||
Path goldenMaster(PathView testStem) const override {
|
||||
return unitTestData + "/" + testStem + ".json";
|
||||
}
|
||||
};
|
||||
|
||||
static UnkeyedValidPathInfo makePathInfo(const Store & store, bool includeImpureInfo) {
|
||||
UnkeyedValidPathInfo info = ValidPathInfo {
|
||||
store,
|
||||
"foo",
|
||||
FixedOutputInfo {
|
||||
.method = FileIngestionMethod::Recursive,
|
||||
.hash = hashString(HashType::htSHA256, "(...)"),
|
||||
|
||||
.references = {
|
||||
.others = {
|
||||
StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
},
|
||||
},
|
||||
.self = true,
|
||||
},
|
||||
},
|
||||
Hash::parseSRI("sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc="),
|
||||
};
|
||||
info.narSize = 34878;
|
||||
if (includeImpureInfo) {
|
||||
info.deriver = StorePath {
|
||||
"g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
};
|
||||
info.registrationTime = 23423;
|
||||
info.ultimate = true;
|
||||
info.sigs = { "asdf", "qwer" };
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
#define JSON_TEST(STEM, PURE) \
|
||||
TEST_F(PathInfoTest, PathInfo_ ## STEM ## _from_json) { \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON( \
|
||||
*store, \
|
||||
encoded); \
|
||||
auto expected = makePathInfo(*store, PURE); \
|
||||
ASSERT_EQ(got, expected); \
|
||||
}); \
|
||||
} \
|
||||
\
|
||||
TEST_F(PathInfoTest, PathInfo_ ## STEM ## _to_json) { \
|
||||
writeTest(#STEM, [&]() -> json { \
|
||||
return makePathInfo(*store, PURE) \
|
||||
.toJSON(*store, PURE, HashFormat::SRI); \
|
||||
}, [](const auto & file) { \
|
||||
return json::parse(readFile(file)); \
|
||||
}, [](const auto & file, const auto & got) { \
|
||||
return writeFile(file, got.dump(2) + "\n"); \
|
||||
}); \
|
||||
}
|
||||
|
||||
JSON_TEST(pure, false)
|
||||
JSON_TEST(impure, true)
|
||||
|
||||
}
|
|
@ -4,6 +4,7 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "types.hh"
|
||||
#include "environment-variables.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
|
@ -9,6 +9,75 @@
|
|||
#include <nlohmann/json.hpp>
|
||||
|
||||
using namespace nix;
|
||||
using nlohmann::json;
|
||||
|
||||
/**
|
||||
* @return the total size of a set of store objects (specified by path),
|
||||
* that is, the sum of the size of the NAR serialisation of each object
|
||||
* in the set.
|
||||
*/
|
||||
static uint64_t getStoreObjectsTotalSize(Store & store, const StorePathSet & closure)
|
||||
{
|
||||
uint64_t totalNarSize = 0;
|
||||
for (auto & p : closure) {
|
||||
totalNarSize += store.queryPathInfo(p)->narSize;
|
||||
}
|
||||
return totalNarSize;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Write a JSON representation of store object metadata, such as the
|
||||
* hash and the references.
|
||||
*
|
||||
* @param showClosureSize If true, the closure size of each path is
|
||||
* included.
|
||||
*/
|
||||
static json pathInfoToJSON(
|
||||
Store & store,
|
||||
const StorePathSet & storePaths,
|
||||
bool showClosureSize)
|
||||
{
|
||||
json::object_t jsonAllObjects = json::object();
|
||||
|
||||
for (auto & storePath : storePaths) {
|
||||
json jsonObject;
|
||||
|
||||
try {
|
||||
auto info = store.queryPathInfo(storePath);
|
||||
|
||||
jsonObject = info->toJSON(store, true, HashFormat::SRI);
|
||||
|
||||
if (showClosureSize) {
|
||||
StorePathSet closure;
|
||||
store.computeFSClosure(storePath, closure, false, false);
|
||||
|
||||
jsonObject["closureSize"] = getStoreObjectsTotalSize(store, closure);
|
||||
|
||||
if (auto * narInfo = dynamic_cast<const NarInfo *>(&*info)) {
|
||||
uint64_t totalDownloadSize = 0;
|
||||
for (auto & p : closure) {
|
||||
auto depInfo = store.queryPathInfo(p);
|
||||
if (auto * depNarInfo = dynamic_cast<const NarInfo *>(&*depInfo))
|
||||
totalDownloadSize += depNarInfo->fileSize;
|
||||
else
|
||||
throw Error("Missing .narinfo for dep %s of %s",
|
||||
store.printStorePath(p),
|
||||
store.printStorePath(storePath));
|
||||
}
|
||||
jsonObject["closureDownloadSize"] = totalDownloadSize;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (InvalidPath &) {
|
||||
jsonObject = nullptr;
|
||||
}
|
||||
|
||||
jsonAllObjects[store.printStorePath(storePath)] = std::move(jsonObject);
|
||||
}
|
||||
return jsonAllObjects;
|
||||
}
|
||||
|
||||
|
||||
struct CmdPathInfo : StorePathsCommand, MixJSON
|
||||
{
|
||||
|
@ -87,10 +156,11 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
|
|||
pathLen = std::max(pathLen, store->printStorePath(storePath).size());
|
||||
|
||||
if (json) {
|
||||
std::cout << store->pathInfoToJSON(
|
||||
std::cout << pathInfoToJSON(
|
||||
*store,
|
||||
// FIXME: preserve order?
|
||||
StorePathSet(storePaths.begin(), storePaths.end()),
|
||||
true, showClosureSize, HashFormat::SRI, AllowInvalid).dump();
|
||||
showClosureSize).dump();
|
||||
}
|
||||
|
||||
else {
|
||||
|
@ -107,8 +177,11 @@ struct CmdPathInfo : StorePathsCommand, MixJSON
|
|||
if (showSize)
|
||||
printSize(info->narSize);
|
||||
|
||||
if (showClosureSize)
|
||||
printSize(store->getClosureSize(info->path).first);
|
||||
if (showClosureSize) {
|
||||
StorePathSet closure;
|
||||
store->computeFSClosure(storePath, closure, false, false);
|
||||
printSize(getStoreObjectsTotalSize(*store, closure));
|
||||
}
|
||||
|
||||
if (showSigs) {
|
||||
std::cout << '\t';
|
||||
|
|
|
@ -43,7 +43,7 @@ R""(
|
|||
command):
|
||||
|
||||
```console
|
||||
# nix path-info --json --all | jq -r 'sort_by(.registrationTime)[-11:-1][].path'
|
||||
# nix path-info --json --all | jq -r 'to_entries | sort_by(.value.registrationTime) | .[-11:-1][] | .key'
|
||||
```
|
||||
|
||||
* Show the size of the entire Nix store:
|
||||
|
@ -58,13 +58,13 @@ R""(
|
|||
|
||||
```console
|
||||
# nix path-info --json --all --closure-size \
|
||||
| jq 'map(select(.closureSize > 1e9)) | sort_by(.closureSize) | map([.path, .closureSize])'
|
||||
| jq 'map_values(.closureSize | select(. < 1e9)) | to_entries | sort_by(.value)'
|
||||
[
|
||||
…,
|
||||
[
|
||||
"/nix/store/zqamz3cz4dbzfihki2mk7a63mbkxz9xq-nixos-system-machine-20.09.20201112.3090c65",
|
||||
5887562256
|
||||
]
|
||||
{
|
||||
.key = "/nix/store/zqamz3cz4dbzfihki2mk7a63mbkxz9xq-nixos-system-machine-20.09.20201112.3090c65",
|
||||
.value = 5887562256,
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
|
|
|
@ -120,6 +120,7 @@ nix_tests = \
|
|||
flakes/show.sh \
|
||||
impure-derivations.sh \
|
||||
path-from-hash-part.sh \
|
||||
path-info.sh \
|
||||
toString-path.sh \
|
||||
read-only-store.sh \
|
||||
nested-sandboxing.sh \
|
||||
|
|
23
tests/functional/path-info.sh
Normal file
23
tests/functional/path-info.sh
Normal file
|
@ -0,0 +1,23 @@
|
|||
source common.sh
|
||||
|
||||
echo foo > $TEST_ROOT/foo
|
||||
foo=$(nix store add-file $TEST_ROOT/foo)
|
||||
|
||||
echo bar > $TEST_ROOT/bar
|
||||
bar=$(nix store add-file $TEST_ROOT/bar)
|
||||
|
||||
echo baz > $TEST_ROOT/baz
|
||||
baz=$(nix store add-file $TEST_ROOT/baz)
|
||||
nix-store --delete "$baz"
|
||||
|
||||
diff --unified --color=always \
|
||||
<(nix path-info --json "$foo" "$bar" "$baz" |
|
||||
jq --sort-keys 'map_values(.narHash)') \
|
||||
<(jq --sort-keys <<-EOF
|
||||
{
|
||||
"$foo": "sha256-QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA=",
|
||||
"$bar": "sha256-9fhYGu9fqxcQC2Kc81qh2RMo1QcLBUBo8U+pPn+jthQ=",
|
||||
"$baz": null
|
||||
}
|
||||
EOF
|
||||
)
|
20
unit-test-data/libstore/nar-info/impure.json
Normal file
20
unit-test-data/libstore/nar-info/impure.json
Normal file
|
@ -0,0 +1,20 @@
|
|||
{
|
||||
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
|
||||
"compression": "xz",
|
||||
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
"downloadHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"downloadSize": 4029176,
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
],
|
||||
"registrationTime": 23423,
|
||||
"signatures": [
|
||||
"asdf",
|
||||
"qwer"
|
||||
],
|
||||
"ultimate": true,
|
||||
"url": "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz"
|
||||
}
|
9
unit-test-data/libstore/nar-info/pure.json
Normal file
9
unit-test-data/libstore/nar-info/pure.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
]
|
||||
}
|
16
unit-test-data/libstore/path-info/impure.json
Normal file
16
unit-test-data/libstore/path-info/impure.json
Normal file
|
@ -0,0 +1,16 @@
|
|||
{
|
||||
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
|
||||
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
],
|
||||
"registrationTime": 23423,
|
||||
"signatures": [
|
||||
"asdf",
|
||||
"qwer"
|
||||
],
|
||||
"ultimate": true
|
||||
}
|
9
unit-test-data/libstore/path-info/pure.json
Normal file
9
unit-test-data/libstore/path-info/pure.json
Normal file
|
@ -0,0 +1,9 @@
|
|||
{
|
||||
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 34878,
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
]
|
||||
}
|
Loading…
Reference in a new issue