forked from lix-project/lix
Make nix path-info --json
return an object not array
Before it returned a list of JSON objects with store object information, including the path in each object. Now, it maps the paths to JSON objects with the metadata sans path. This matches how `nix derivation show` works. Quite hillariously, none of our existing functional tests caught this change to `path-info --json` though they did use it. So just new functional tests need to be added.
This commit is contained in:
parent
a7212e169b
commit
cc46ea1630
|
@ -30,3 +30,37 @@
|
|||
They are superceded by `nix flake update`.
|
||||
|
||||
- Commit signature verification for the [`builtins.fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit) is added as the new [`verified-fetches` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-verified-fetches).
|
||||
|
||||
- [`nix path-info --json`](@docroot@/command-ref/new-cli/nix3-path-info.md)
|
||||
(experimental) now returns a JSON map rather than JSON list.
|
||||
The `path` field of each object has instead become the key in th outer map, since it is unique.
|
||||
The `valid` field also goes away because we just use null instead.
|
||||
|
||||
- Old way:
|
||||
|
||||
```json5
|
||||
[
|
||||
{
|
||||
"path": "/nix/store/8fv91097mbh5049i9rglc73dx6kjg3qk-bash-5.2-p15",
|
||||
"valid": true,
|
||||
// ...
|
||||
},
|
||||
{
|
||||
"path": "/nix/store/wffw7l0alvs3iw94cbgi1gmmbmw99sqb-home-manager-path",
|
||||
"valid": false
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
- New way
|
||||
|
||||
```json5
|
||||
{
|
||||
"/nix/store/8fv91097mbh5049i9rglc73dx6kjg3qk-bash-5.2-p15": {
|
||||
// ...
|
||||
},
|
||||
"/nix/store/wffw7l0alvs3iw94cbgi1gmmbmw99sqb-home-manager-path": null,
|
||||
}
|
||||
```
|
||||
|
||||
This makes it match `nix derivation show`, which also maps store paths to information.
|
||||
|
|
|
@ -164,8 +164,12 @@ NarInfo NarInfo::fromJSON(
|
|||
{
|
||||
using nlohmann::detail::value_t;
|
||||
|
||||
NarInfo res { ValidPathInfo::fromJSON(store, json) };
|
||||
res.path = path;
|
||||
NarInfo res {
|
||||
ValidPathInfo {
|
||||
path,
|
||||
UnkeyedValidPathInfo::fromJSON(store, json),
|
||||
}
|
||||
};
|
||||
|
||||
if (json.contains("url"))
|
||||
res.url = ensureType(valueAt(json, "url"), value_t::string);
|
||||
|
|
|
@ -148,6 +148,11 @@ static nlohmann::json pathInfoToJSON(
|
|||
auto & jsonPath = jsonList.emplace_back(
|
||||
info->toJSON(store, false, HashFormat::Base32));
|
||||
|
||||
// Add the path to the object whose metadata we are including.
|
||||
jsonPath["path"] = store.printStorePath(storePath);
|
||||
|
||||
jsonPath["valid"] = true;
|
||||
|
||||
jsonPath["closureSize"] = ({
|
||||
uint64_t totalNarSize = 0;
|
||||
StorePathSet closure;
|
||||
|
|
|
@ -148,7 +148,7 @@ ValidPathInfo::ValidPathInfo(
|
|||
}
|
||||
|
||||
|
||||
nlohmann::json ValidPathInfo::toJSON(
|
||||
nlohmann::json UnkeyedValidPathInfo::toJSON(
|
||||
const Store & store,
|
||||
bool includeImpureInfo,
|
||||
HashFormat hashFormat) const
|
||||
|
@ -157,8 +157,6 @@ nlohmann::json ValidPathInfo::toJSON(
|
|||
|
||||
auto jsonObject = json::object();
|
||||
|
||||
jsonObject["path"] = store.printStorePath(path);
|
||||
jsonObject["valid"] = true;
|
||||
jsonObject["narHash"] = narHash.to_string(hashFormat, true);
|
||||
jsonObject["narSize"] = narSize;
|
||||
|
||||
|
@ -190,21 +188,17 @@ nlohmann::json ValidPathInfo::toJSON(
|
|||
return jsonObject;
|
||||
}
|
||||
|
||||
ValidPathInfo ValidPathInfo::fromJSON(
|
||||
UnkeyedValidPathInfo UnkeyedValidPathInfo::fromJSON(
|
||||
const Store & store,
|
||||
const nlohmann::json & json)
|
||||
{
|
||||
using nlohmann::detail::value_t;
|
||||
|
||||
ValidPathInfo res {
|
||||
StorePath(StorePath::dummy),
|
||||
UnkeyedValidPathInfo res {
|
||||
Hash(Hash::dummy),
|
||||
};
|
||||
|
||||
ensureType(json, value_t::object);
|
||||
res.path = store.parseStorePath(
|
||||
static_cast<const std::string &>(
|
||||
ensureType(valueAt(json, "path"), value_t::string)));
|
||||
res.narHash = Hash::parseAny(
|
||||
static_cast<const std::string &>(
|
||||
ensureType(valueAt(json, "narHash"), value_t::string)),
|
||||
|
|
|
@ -78,6 +78,18 @@ struct UnkeyedValidPathInfo
|
|||
DECLARE_CMP(UnkeyedValidPathInfo);
|
||||
|
||||
virtual ~UnkeyedValidPathInfo() { }
|
||||
|
||||
/**
|
||||
* @param includeImpureInfo If true, variable elements such as the
|
||||
* registration time are included.
|
||||
*/
|
||||
virtual nlohmann::json toJSON(
|
||||
const Store & store,
|
||||
bool includeImpureInfo,
|
||||
HashFormat hashFormat) const;
|
||||
static UnkeyedValidPathInfo fromJSON(
|
||||
const Store & store,
|
||||
const nlohmann::json & json);
|
||||
};
|
||||
|
||||
struct ValidPathInfo : UnkeyedValidPathInfo {
|
||||
|
@ -125,18 +137,6 @@ struct ValidPathInfo : UnkeyedValidPathInfo {
|
|||
|
||||
Strings shortRefs() const;
|
||||
|
||||
/**
|
||||
* @param includeImpureInfo If true, variable elements such as the
|
||||
* registration time are included.
|
||||
*/
|
||||
virtual nlohmann::json toJSON(
|
||||
const Store & store,
|
||||
bool includeImpureInfo,
|
||||
HashFormat hashFormat) const;
|
||||
static ValidPathInfo fromJSON(
|
||||
const Store & store,
|
||||
const nlohmann::json & json);
|
||||
|
||||
ValidPathInfo(const ValidPathInfo & other) = default;
|
||||
|
||||
ValidPathInfo(StorePath && path, UnkeyedValidPathInfo info) : UnkeyedValidPathInfo(info), path(std::move(path)) { };
|
||||
|
|
|
@ -19,8 +19,8 @@ class PathInfoTest : public CharacterizationTest, public LibStoreTest
|
|||
}
|
||||
};
|
||||
|
||||
static ValidPathInfo makePathInfo(const Store & store, bool includeImpureInfo) {
|
||||
ValidPathInfo info {
|
||||
static UnkeyedValidPathInfo makePathInfo(const Store & store, bool includeImpureInfo) {
|
||||
UnkeyedValidPathInfo info = ValidPathInfo {
|
||||
store,
|
||||
"foo",
|
||||
FixedOutputInfo {
|
||||
|
@ -54,7 +54,7 @@ static ValidPathInfo makePathInfo(const Store & store, bool includeImpureInfo) {
|
|||
TEST_F(PathInfoTest, PathInfo_ ## STEM ## _from_json) { \
|
||||
readTest(#STEM, [&](const auto & encoded_) { \
|
||||
auto encoded = json::parse(encoded_); \
|
||||
ValidPathInfo got = ValidPathInfo::fromJSON( \
|
||||
UnkeyedValidPathInfo got = UnkeyedValidPathInfo::fromJSON( \
|
||||
*store, \
|
||||
encoded); \
|
||||
auto expected = makePathInfo(*store, PURE); \
|
||||
|
|
|
@ -38,20 +38,21 @@ static json pathInfoToJSON(
|
|||
const StorePathSet & storePaths,
|
||||
bool showClosureSize)
|
||||
{
|
||||
json::array_t jsonList = json::array();
|
||||
json::object_t jsonAllObjects = json::object();
|
||||
|
||||
for (auto & storePath : storePaths) {
|
||||
json jsonObject;
|
||||
|
||||
try {
|
||||
auto info = store.queryPathInfo(storePath);
|
||||
|
||||
auto & jsonPath = jsonList.emplace_back(
|
||||
info->toJSON(store, true, HashFormat::SRI));
|
||||
jsonObject = info->toJSON(store, true, HashFormat::SRI);
|
||||
|
||||
if (showClosureSize) {
|
||||
StorePathSet closure;
|
||||
store.computeFSClosure(storePath, closure, false, false);
|
||||
|
||||
jsonPath["closureSize"] = getStoreObjectsTotalSize(store, closure);
|
||||
jsonObject["closureSize"] = getStoreObjectsTotalSize(store, closure);
|
||||
|
||||
if (auto * narInfo = dynamic_cast<const NarInfo *>(&*info)) {
|
||||
uint64_t totalDownloadSize = 0;
|
||||
|
@ -64,17 +65,17 @@ static json pathInfoToJSON(
|
|||
store.printStorePath(p),
|
||||
store.printStorePath(storePath));
|
||||
}
|
||||
jsonPath["closureDownloadSize"] = totalDownloadSize;
|
||||
jsonObject["closureDownloadSize"] = totalDownloadSize;
|
||||
}
|
||||
}
|
||||
|
||||
} catch (InvalidPath &) {
|
||||
auto & jsonPath = jsonList.emplace_back(json::object());
|
||||
jsonPath["path"] = store.printStorePath(storePath);
|
||||
jsonPath["valid"] = false;
|
||||
jsonObject = nullptr;
|
||||
}
|
||||
|
||||
jsonAllObjects[store.printStorePath(storePath)] = std::move(jsonObject);
|
||||
}
|
||||
return jsonList;
|
||||
return jsonAllObjects;
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ R""(
|
|||
command):
|
||||
|
||||
```console
|
||||
# nix path-info --json --all | jq -r 'sort_by(.registrationTime)[-11:-1][].path'
|
||||
# nix path-info --json --all | jq -r 'to_entries | sort_by(.value.registrationTime) | .[-11:-1][] | .key'
|
||||
```
|
||||
|
||||
* Show the size of the entire Nix store:
|
||||
|
@ -58,13 +58,13 @@ R""(
|
|||
|
||||
```console
|
||||
# nix path-info --json --all --closure-size \
|
||||
| jq 'map(select(.closureSize > 1e9)) | sort_by(.closureSize) | map([.path, .closureSize])'
|
||||
| jq 'map_values(.closureSize | select(. < 1e9)) | to_entries | sort_by(.value)'
|
||||
[
|
||||
…,
|
||||
[
|
||||
"/nix/store/zqamz3cz4dbzfihki2mk7a63mbkxz9xq-nixos-system-machine-20.09.20201112.3090c65",
|
||||
5887562256
|
||||
]
|
||||
{
|
||||
.key = "/nix/store/zqamz3cz4dbzfihki2mk7a63mbkxz9xq-nixos-system-machine-20.09.20201112.3090c65",
|
||||
.value = 5887562256,
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
|
|
|
@ -120,6 +120,7 @@ nix_tests = \
|
|||
flakes/show.sh \
|
||||
impure-derivations.sh \
|
||||
path-from-hash-part.sh \
|
||||
path-info.sh \
|
||||
toString-path.sh \
|
||||
read-only-store.sh \
|
||||
nested-sandboxing.sh \
|
||||
|
|
23
tests/functional/path-info.sh
Normal file
23
tests/functional/path-info.sh
Normal file
|
@ -0,0 +1,23 @@
|
|||
source common.sh
|
||||
|
||||
echo foo > $TEST_ROOT/foo
|
||||
foo=$(nix store add-file $TEST_ROOT/foo)
|
||||
|
||||
echo bar > $TEST_ROOT/bar
|
||||
bar=$(nix store add-file $TEST_ROOT/bar)
|
||||
|
||||
echo baz > $TEST_ROOT/baz
|
||||
baz=$(nix store add-file $TEST_ROOT/baz)
|
||||
nix-store --delete "$baz"
|
||||
|
||||
diff --unified --color=always \
|
||||
<(nix path-info --json "$foo" "$bar" "$baz" |
|
||||
jq --sort-keys 'map_values(.narHash)') \
|
||||
<(jq --sort-keys <<-EOF
|
||||
{
|
||||
"$foo": "sha256-QvtAMbUl/uvi+LCObmqOhvNOapHdA2raiI4xG5zI5pA=",
|
||||
"$bar": "sha256-9fhYGu9fqxcQC2Kc81qh2RMo1QcLBUBo8U+pPn+jthQ=",
|
||||
"$baz": null
|
||||
}
|
||||
EOF
|
||||
)
|
|
@ -6,7 +6,6 @@
|
|||
"downloadSize": 4029176,
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 34878,
|
||||
"path": "/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo",
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
|
@ -17,6 +16,5 @@
|
|||
"qwer"
|
||||
],
|
||||
"ultimate": true,
|
||||
"url": "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz",
|
||||
"valid": true
|
||||
"url": "nar/1w1fff338fvdw53sqgamddn1b2xgds473pv6y13gizdbqjv4i5p3.nar.xz"
|
||||
}
|
||||
|
|
|
@ -2,10 +2,8 @@
|
|||
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 34878,
|
||||
"path": "/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo",
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
],
|
||||
"valid": true
|
||||
]
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
"deriver": "/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar.drv",
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 34878,
|
||||
"path": "/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo",
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
|
@ -13,6 +12,5 @@
|
|||
"asdf",
|
||||
"qwer"
|
||||
],
|
||||
"ultimate": true,
|
||||
"valid": true
|
||||
"ultimate": true
|
||||
}
|
||||
|
|
|
@ -2,10 +2,8 @@
|
|||
"ca": "fixed:r:sha256:1lr187v6dck1rjh2j6svpikcfz53wyl3qrlcbb405zlh13x0khhh",
|
||||
"narHash": "sha256-FePFYIlMuycIXPZbWi7LGEiMmZSX9FMbaQenWBzm1Sc=",
|
||||
"narSize": 34878,
|
||||
"path": "/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo",
|
||||
"references": [
|
||||
"/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar",
|
||||
"/nix/store/n5wkd9frr45pa74if5gpz9j7mifg27fh-foo"
|
||||
],
|
||||
"valid": true
|
||||
]
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue