Add jsondocck tool, and use it for rustdoc JSON

This commit is contained in:
Rune Tynan 2021-01-15 20:34:15 -05:00
parent f09fb488f7
commit 7715656edd
No known key found for this signature in database
GPG Key ID: 7ECC932F8B2C731E
25 changed files with 546 additions and 820 deletions

View File

@ -101,6 +101,12 @@ version = "0.4.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d25d88fd6b8041580a654f9d0c581a047baee2b3efee13275f2fc392fc75034" checksum = "4d25d88fd6b8041580a654f9d0c581a047baee2b3efee13275f2fc392fc75034"
[[package]]
name = "array_tool"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f8cb5d814eb646a863c4f24978cff2880c4be96ad8cde2c0f0678732902e271"
[[package]] [[package]]
name = "arrayref" name = "arrayref"
version = "0.3.6" version = "0.3.6"
@ -1630,6 +1636,32 @@ version = "0.11.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92c245af8786f6ac35f95ca14feca9119e71339aaab41e878e7cdd655c97e9e5" checksum = "92c245af8786f6ac35f95ca14feca9119e71339aaab41e878e7cdd655c97e9e5"
[[package]]
name = "jsondocck"
version = "0.1.0"
dependencies = [
"getopts",
"jsonpath_lib",
"lazy_static",
"regex",
"serde",
"serde_json",
"shlex",
]
[[package]]
name = "jsonpath_lib"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61352ec23883402b7d30b3313c16cbabefb8907361c4eb669d990cbb87ceee5a"
dependencies = [
"array_tool",
"env_logger 0.7.1",
"log",
"serde",
"serde_json",
]
[[package]] [[package]]
name = "jsonrpc-client-transports" name = "jsonrpc-client-transports"
version = "14.2.1" version = "14.2.1"
@ -2854,9 +2886,9 @@ dependencies = [
[[package]] [[package]]
name = "regex" name = "regex"
version = "1.3.9" version = "1.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c3780fcf44b193bc4d09f36d2a3c87b251da4a046c87795a0d35f4f927ad8e6" checksum = "d9251239e129e16308e70d853559389de218ac275b515068abc96829d05b948a"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
@ -2876,9 +2908,9 @@ dependencies = [
[[package]] [[package]]
name = "regex-syntax" name = "regex-syntax"
version = "0.6.18" version = "0.6.22"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26412eb97c6b088a6997e05f69403a802a92d520de2f8e63c2b65f9e0f47c4e8" checksum = "b5eb417147ba9860a96cfe72a0b93bf88fee1744b5636ec99ab20c1aa9376581"
[[package]] [[package]]
name = "remote-test-client" name = "remote-test-client"
@ -4578,6 +4610,7 @@ version = "1.0.59"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcac07dbffa1c65e7f816ab9eba78eb142c6d44410f4eeba1e26e4f5dfa56b95" checksum = "dcac07dbffa1c65e7f816ab9eba78eb142c6d44410f4eeba1e26e4f5dfa56b95"
dependencies = [ dependencies = [
"indexmap",
"itoa", "itoa",
"ryu", "ryu",
"serde", "serde",

View File

@ -31,6 +31,7 @@ members = [
"src/tools/rustdoc-themes", "src/tools/rustdoc-themes",
"src/tools/unicode-table-generator", "src/tools/unicode-table-generator",
"src/tools/expand-yaml-anchors", "src/tools/expand-yaml-anchors",
"src/tools/jsondocck",
] ]
exclude = [ exclude = [

View File

@ -751,6 +751,11 @@ impl<'a> Builder<'a> {
cmd cmd
} }
/// Gets a path to the jsondocck tool
pub fn jsondocck(&self, compiler: Compiler, target: TargetSelection) -> PathBuf {
self.ensure(tool::JsonDocCk { compiler, target })
}
/// Return the path to `llvm-config` for the target, if it exists. /// Return the path to `llvm-config` for the target, if it exists.
/// ///
/// Note that this returns `None` if LLVM is disabled, or if we're in a /// Note that this returns `None` if LLVM is disabled, or if we're in a

View File

@ -1072,6 +1072,8 @@ note: if you're sure you want to do this, please open an issue as to why. In the
cmd.arg("--docck-python").arg(builder.python()); cmd.arg("--docck-python").arg(builder.python());
cmd.arg("--jsondocck-path").arg(builder.jsondocck(compiler, target));
if builder.config.build.ends_with("apple-darwin") { if builder.config.build.ends_with("apple-darwin") {
// Force /usr/bin/python3 on macOS for LLDB tests because we're loading the // Force /usr/bin/python3 on macOS for LLDB tests because we're loading the
// LLDB plugin's compiled module which only works with the system python // LLDB plugin's compiled module which only works with the system python

View File

@ -367,6 +367,7 @@ bootstrap_tool!(
RustdocTheme, "src/tools/rustdoc-themes", "rustdoc-themes"; RustdocTheme, "src/tools/rustdoc-themes", "rustdoc-themes";
ExpandYamlAnchors, "src/tools/expand-yaml-anchors", "expand-yaml-anchors"; ExpandYamlAnchors, "src/tools/expand-yaml-anchors", "expand-yaml-anchors";
LintDocs, "src/tools/lint-docs", "lint-docs"; LintDocs, "src/tools/lint-docs", "lint-docs";
JsonDocCk, "src/tools/jsondocck", "jsondocck", is_unstable_tool = true;
); );
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)]

View File

@ -4,6 +4,8 @@
# `index` or `paths`. It DOES NOT check that the structure of the produced json is actually in # `index` or `paths`. It DOES NOT check that the structure of the produced json is actually in
# any way correct, for example an empty map would pass. # any way correct, for example an empty map would pass.
# FIXME: Better error output
import sys import sys
import json import json

View File

@ -1,132 +0,0 @@
#!/usr/bin/env python
# This script can check that an expected json blob is a subset of what actually gets produced.
# The comparison is independent of the value of IDs (which are unstable) and instead uses their
# relative ordering to check them against eachother by looking them up in their respective blob's
# `index` or `paths` mappings. To add a new test run `rustdoc --output-format json -o . yourtest.rs`
# and then create `yourtest.expected` by stripping unnecessary details from `yourtest.json`. If
# you're on windows, replace `\` with `/`.
# WARNING: The error messages produced by this may be misleading, in the case of list re-ordering
# it may point to apparently unrelated keys.
import copy
import sys
import json
import types
# Used instead of the string ids when used as references.
# Not used as keys in `index` or `paths`
class ID(str):
pass
class SubsetException(Exception):
def __init__(self, msg, trace):
self.msg = msg
self.trace = msg
super().__init__("{}: {}".format(trace, msg))
def check_subset(expected_main, actual_main, base_dir):
expected_index = expected_main["index"]
expected_paths = expected_main["paths"]
actual_index = actual_main["index"]
actual_paths = actual_main["paths"]
already_checked = set()
def _check_subset(expected, actual, trace):
expected_type = type(expected)
actual_type = type(actual)
if actual_type is str:
actual = normalize(actual).replace(base_dir, "$TEST_BASE_DIR")
if expected_type is not actual_type:
raise SubsetException(
"expected type `{}`, got `{}`".format(expected_type, actual_type), trace
)
if expected_type in (int, bool, str) and expected != actual:
raise SubsetException("expected `{}`, got: `{}`".format(expected, actual), trace)
if expected_type is dict:
for key in expected:
if key not in actual:
raise SubsetException(
"Key `{}` not found in output".format(key), trace
)
new_trace = copy.deepcopy(trace)
new_trace.append(key)
_check_subset(expected[key], actual[key], new_trace)
elif expected_type is list:
expected_elements = len(expected)
actual_elements = len(actual)
if expected_elements != actual_elements:
raise SubsetException(
"Found {} items, expected {}".format(
expected_elements, actual_elements
),
trace,
)
for expected, actual in zip(expected, actual):
new_trace = copy.deepcopy(trace)
new_trace.append(expected)
_check_subset(expected, actual, new_trace)
elif expected_type is ID and expected not in already_checked:
already_checked.add(expected)
_check_subset(
expected_index.get(expected, {}), actual_index.get(actual, {}), trace
)
_check_subset(
expected_paths.get(expected, {}), actual_paths.get(actual, {}), trace
)
_check_subset(expected_main["root"], actual_main["root"], [])
def rustdoc_object_hook(obj):
# No need to convert paths, index and external_crates keys to ids, since
# they are the target of resolution, and never a source itself.
if "id" in obj and obj["id"]:
obj["id"] = ID(obj["id"])
if "root" in obj:
obj["root"] = ID(obj["root"])
if "items" in obj:
obj["items"] = [ID(id) for id in obj["items"]]
if "variants" in obj:
obj["variants"] = [ID(id) for id in obj["variants"]]
if "fields" in obj:
obj["fields"] = [ID(id) for id in obj["fields"]]
if "impls" in obj:
obj["impls"] = [ID(id) for id in obj["impls"]]
if "implementors" in obj:
obj["implementors"] = [ID(id) for id in obj["implementors"]]
if "links" in obj:
obj["links"] = {s: ID(id) for s, id in obj["links"]}
if "variant_kind" in obj and obj["variant_kind"] == "struct":
obj["variant_inner"] = [ID(id) for id in obj["variant_inner"]]
return obj
def main(expected_fpath, actual_fpath, base_dir):
print(
"checking that {} is a logical subset of {}".format(
expected_fpath, actual_fpath
)
)
with open(expected_fpath) as expected_file:
expected_main = json.load(expected_file, object_hook=rustdoc_object_hook)
with open(actual_fpath) as actual_file:
actual_main = json.load(actual_file, object_hook=rustdoc_object_hook)
check_subset(expected_main, actual_main, base_dir)
print("all checks passed")
def normalize(s):
return s.replace('\\', '/')
if __name__ == "__main__":
if len(sys.argv) < 4:
print("Usage: `compare.py expected.json actual.json test-dir`")
else:
main(sys.argv[1], sys.argv[2], normalize(sys.argv[3]))

View File

@ -1,196 +0,0 @@
{
"crate_version": null,
"external_crates": {},
"format_version": 1,
"includes_private": false,
"index": {
"0:0": {
"attrs": [],
"crate_id": 0,
"deprecation": null,
"docs": "",
"id": "0:0",
"inner": {
"is_crate": true,
"items": [
"0:3"
]
},
"kind": "module",
"links": {},
"name": "nested",
"source": {
"begin": [
2,
0
],
"end": [
7,
1
],
"filename": "$TEST_BASE_DIR/nested.rs"
},
"visibility": "public"
},
"0:3": {
"attrs": [],
"crate_id": 0,
"deprecation": null,
"docs": "",
"id": "0:3",
"inner": {
"is_crate": false,
"items": [
"0:4",
"0:7"
]
},
"kind": "module",
"links": {},
"name": "l1",
"source": {
"begin": [
2,
0
],
"end": [
7,
1
],
"filename": "$TEST_BASE_DIR/nested.rs"
},
"visibility": "public"
},
"0:4": {
"attrs": [],
"crate_id": 0,
"deprecation": null,
"docs": "",
"id": "0:4",
"inner": {
"is_crate": false,
"items": [
"0:5"
]
},
"kind": "module",
"links": {},
"name": "l3",
"source": {
"begin": [
3,
4
],
"end": [
5,
5
],
"filename": "$TEST_BASE_DIR/nested.rs"
},
"visibility": "public"
},
"0:5": {
"attrs": [],
"crate_id": 0,
"deprecation": null,
"docs": "",
"id": "0:5",
"inner": {
"fields": [],
"fields_stripped": false,
"generics": {
"params": [],
"where_predicates": []
},
"impls": [
"0:10",
"0:11",
"0:12",
"0:14",
"0:15"
],
"struct_type": "unit"
},
"kind": "struct",
"links": {},
"name": "L4",
"source": {
"begin": [
4,
8
],
"end": [
4,
22
],
"filename": "$TEST_BASE_DIR/nested.rs"
},
"visibility": "public"
},
"0:7": {
"attrs": [],
"crate_id": 0,
"deprecation": null,
"docs": "",
"id": "0:7",
"inner": {
"glob": false,
"id": "0:5",
"name": "L4",
"span": "l3::L4"
},
"kind": "import",
"links": {},
"name": null,
"source": {
"begin": [
6,
4
],
"end": [
6,
19
],
"filename": "$TEST_BASE_DIR/nested.rs"
},
"visibility": "public"
}
},
"paths": {
"0:0": {
"crate_id": 0,
"kind": "module",
"path": [
"nested"
]
},
"0:3": {
"crate_id": 0,
"kind": "module",
"path": [
"nested",
"l1"
]
},
"0:4": {
"crate_id": 0,
"kind": "module",
"path": [
"nested",
"l1",
"l3"
]
},
"0:5": {
"crate_id": 0,
"kind": "struct",
"path": [
"nested",
"l1",
"l3",
"L4"
]
}
},
"root": "0:0"
}

View File

@ -1,7 +1,25 @@
// edition:2018 // edition:2018
// @has nested.json "$.index.['0:0'].kind" \"module\"
// @has - "$.index.['0:0'].inner.is_crate" true
// @has - "$.index.['0:0'].inner.items[*]" \"0:3\"
// @has nested.json "$.index.['0:3'].kind" \"module\"
// @has - "$.index.['0:3'].inner.is_crate" false
// @has - "$.index.['0:3'].inner.items[*]" \"0:4\"
// @has - "$.index.['0:3'].inner.items[*]" \"0:7\"
pub mod l1 { pub mod l1 {
// @has nested.json "$.index.['0:4'].kind" \"module\"
// @has - "$.index.['0:4'].inner.is_crate" false
// @has - "$.index.['0:4'].inner.items[*]" \"0:5\"
pub mod l3 { pub mod l3 {
// @has nested.json "$.index.['0:5'].kind" \"struct\"
// @has - "$.index.['0:5'].inner.struct_type" \"unit\"
pub struct L4; pub struct L4;
} }
// @has nested.json "$.index.['0:7'].kind" \"import\"
// @has - "$.index.['0:7'].inner.glob" false
pub use l3::L4; pub use l3::L4;
} }

View File

@ -1,456 +0,0 @@
{
"root": "0:0",
"version": null,
"includes_private": false,
"index": {
"0:9": {
"crate_id": 0,
"name": "Unit",
"source": {
"filename": "$TEST_BASE_DIR/structs.rs",
"begin": [
7,
0
],
"end": [
7,
16
]
},
"visibility": "public",
"docs": "",
"links": {},
"attrs": [],
"deprecation": null,
"kind": "struct",
"inner": {
"struct_type": "unit",
"generics": {
"params": [],
"where_predicates": []
},
"fields_stripped": false,
"fields": []
}
},
"0:8": {
"crate_id": 0,
"name": "1",
"source": {
"filename": "$TEST_BASE_DIR/structs.rs",
"begin": [
5,
22
],
"end": [
5,
28
]
},
"visibility": "default",
"docs": "",
"links": {},
"attrs": [],
"deprecation": null,
"kind": "struct_field",
"inner": {
"kind": "resolved_path",
"inner": {
"name": "String",
"id": "5:5035",
"args": {
"angle_bracketed": {
"args": [],
"bindings": []
}
},
"param_names": []
}
}
},
"0:18": {
"crate_id": 0,
"name": "stuff",
"source": {
"filename": "$TEST_BASE_DIR/structs.rs",
"begin": [
15,
4
],
"end": [
15,
17
]
},
"visibility": "default",
"docs": "",
"links": {},
"attrs": [],
"deprecation": null,
"kind": "struct_field",
"inner": {
"kind": "resolved_path",
"inner": {
"name": "Vec",
"id": "5:4322",
"args": {
"angle_bracketed": {
"args": [
{
"type": {
"kind": "generic",
"inner": "T"
}
}
],
"bindings": []
}
},
"param_names": []
}
}
},
"0:11": {
"crate_id": 0,
"name": "WithPrimitives",
"source": {
"filename": "$TEST_BASE_DIR/structs.rs",
"begin": [
9,
0
],
"end": [
12,
1
]
},
"visibility": "public",
"docs": "",
"links": {},
"attrs": [],
"deprecation": null,
"kind": "struct",
"inner": {
"struct_type": "plain",
"generics": {
"params": [
{
"name": "'a",
"kind": "lifetime"
}
],
"where_predicates": []
},
"fields_stripped": true
}
},
"0:14": {
"crate_id": 0,
"name": "s",
"source": {
"filename": "$TEST_BASE_DIR/structs.rs",
"begin": [
11,
4
],
"end": [
11,
14
]
},
"visibility": "default",
"docs": "",
"links": {},
"attrs": [],
"deprecation": null,
"kind": "struct_field",
"inner": {
"kind": "borrowed_ref",
"inner": {
"lifetime": "'a",
"mutable": false,
"type": {
"kind": "primitive",
"inner": "str"
}
}
}
},
"0:19": {
"crate_id": 0,
"name": "things",
"source": {
"filename": "$TEST_BASE_DIR/structs.rs",
"begin": [
16,
4
],
"end": [
16,
25
]
},
"visibility": "default",
"docs": "",
"links": {},
"attrs": [],
"deprecation": null,
"kind": "struct_field",
"inner": {
"kind": "resolved_path",
"inner": {
"name": "HashMap",
"id": "1:6600",
"args": {
"angle_bracketed": {
"args": [
{
"type": {
"kind": "generic",
"inner": "U"
}
},
{
"type": {
"kind": "generic",
"inner": "U"
}
}
],
"bindings": []
}
},
"param_names": []
}
}
},
"0:15": {
"crate_id": 0,
"name": "WithGenerics",
"source": {
"filename": "$TEST_BASE_DIR/structs.rs",
"begin": [
14,
0
],
"end": [
17,
1
]
},
"visibility": "public",
"docs": "",
"links": {},
"attrs": [],
"deprecation": null,
"kind": "struct",
"inner": {
"struct_type": "plain",
"generics": {
"params": [
{
"name": "T",
"kind": {
"type": {
"bounds": [],
"default": null
}
}
},
{
"name": "U",
"kind": {
"type": {
"bounds": [],
"default": null
}
}
}
],
"where_predicates": []
},
"fields_stripped": true
}
},
"0:0": {
"crate_id": 0,
"name": "structs",
"source": {
"filename": "$TEST_BASE_DIR/structs.rs",
"begin": [
1,
0
],
"end": [
17,
1
]
},
"visibility": "public",
"docs": "",
"links": {},
"attrs": [],
"deprecation": null,
"kind": "module",
"inner": {
"is_crate": true,
"items": [
"0:4",
"0:5",
"0:9",
"0:11",
"0:15"
]
}
},
"0:13": {
"crate_id": 0,
"name": "num",
"source": {
"filename": "$TEST_BASE_DIR/structs.rs",
"begin": [
10,
4
],
"end": [
10,
12
]
},
"visibility": "default",
"docs": "",
"links": {},
"attrs": [],
"deprecation": null,
"kind": "struct_field",
"inner": {
"kind": "primitive",
"inner": "u32"
}
},
"0:5": {
"crate_id": 0,
"name": "Tuple",
"source": {
"filename": "$TEST_BASE_DIR/structs.rs",
"begin": [
5,
0
],
"end": [
5,
30
]
},
"visibility": "public",
"docs": "",
"links": {},
"attrs": [],
"deprecation": null,
"kind": "struct",
"inner": {
"struct_type": "tuple",
"generics": {
"params": [],
"where_predicates": []
},
"fields_stripped": true
}
},
"0:4": {
"crate_id": 0,
"name": "PlainEmpty",
"source": {
"filename": "$TEST_BASE_DIR/structs.rs",
"begin": [
3,
0
],
"end": [
3,
24
]
},
"visibility": "public",
"docs": "",
"links": {},
"attrs": [],
"deprecation": null,
"kind": "struct",
"inner": {
"struct_type": "plain",
"generics": {
"params": [],
"where_predicates": []
},
"fields_stripped": false,
"fields": []
}
},
"0:7": {
"crate_id": 0,
"name": "0",
"source": {
"filename": "$TEST_BASE_DIR/structs.rs",
"begin": [
5,
17
],
"end": [
5,
20
]
},
"visibility": "default",
"docs": "",
"links": {},
"attrs": [],
"deprecation": null,
"kind": "struct_field",
"inner": {
"kind": "primitive",
"inner": "u32"
}
}
},
"paths": {
"5:4322": {
"crate_id": 5,
"path": [
"alloc",
"vec",
"Vec"
],
"kind": "struct"
},
"5:5035": {
"crate_id": 5,
"path": [
"alloc",
"string",
"String"
],
"kind": "struct"
},
"1:6600": {
"crate_id": 1,
"path": [
"std",
"collections",
"hash",
"map",
"HashMap"
],
"kind": "struct"
}
},
"external_crates": {
"1": {
"name": "std"
},
"5": {
"name": "alloc"
}
},
"format_version": 1
}

View File

@ -1,17 +0,0 @@
use std::collections::HashMap;
pub struct PlainEmpty {}
pub struct Tuple(u32, String);
pub struct Unit;
pub struct WithPrimitives<'a> {
num: u32,
s: &'a str,
}
pub struct WithGenerics<T, U> {
stuff: Vec<T>,
things: HashMap<U, U>,
}

View File

@ -0,0 +1,7 @@
// @has plain_empty.json "$.index.['0:3'].name" \"PlainEmpty\"
// @has - "$.index.['0:3'].visibility" \"public\"
// @has - "$.index.['0:3'].kind" \"struct\"
// @has - "$.index.['0:3'].inner.struct_type" \"plain\"
// @has - "$.index.['0:3'].inner.fields_stripped" false
// @has - "$.index.['0:3'].inner.fields" []
pub struct PlainEmpty {}

View File

@ -0,0 +1,6 @@
// @has tuple.json "$.index.['0:3'].name" \"Tuple\"
// @has - "$.index.['0:3'].visibility" \"public\"
// @has - "$.index.['0:3'].kind" \"struct\"
// @has - "$.index.['0:3'].inner.struct_type" \"tuple\"
// @has - "$.index.['0:3'].inner.fields_stripped" true
pub struct Tuple(u32, String);

View File

@ -0,0 +1,6 @@
// @has unit.json "$.index.['0:3'].name" \"Unit\"
// @has - "$.index.['0:3'].visibility" \"public\"
// @has - "$.index.['0:3'].kind" \"struct\"
// @has - "$.index.['0:3'].inner.struct_type" \"unit\"
// @has - "$.index.['0:3'].inner.fields" []
pub struct Unit;

View File

@ -0,0 +1,15 @@
use std::collections::HashMap;
// @has with_generics.json "$.index.['0:4'].name" \"WithGenerics\"
// @has - "$.index.['0:4'].visibility" \"public\"
// @has - "$.index.['0:4'].kind" \"struct\"
// @has - "$.index.['0:4'].inner.generics.params[0].name" \"T\"
// @has - "$.index.['0:4'].inner.generics.params[0].kind.type"
// @has - "$.index.['0:4'].inner.generics.params[1].name" \"U\"
// @has - "$.index.['0:4'].inner.generics.params[1].kind.type"
// @has - "$.index.['0:4'].inner.struct_type" \"plain\"
// @has - "$.index.['0:4'].inner.fields_stripped" true
pub struct WithGenerics<T, U> {
stuff: Vec<T>,
things: HashMap<U, U>,
}

View File

@ -0,0 +1,11 @@
// @has with_primitives.json "$.index.['0:3'].name" \"WithPrimitives\"
// @has - "$.index.['0:3'].visibility" \"public\"
// @has - "$.index.['0:3'].kind" \"struct\"
// @has - "$.index.['0:3'].inner.generics.params[0].name" \"\'a\"
// @has - "$.index.['0:3'].inner.generics.params[0].kind" \"lifetime\"
// @has - "$.index.['0:3'].inner.struct_type" \"plain\"
// @has - "$.index.['0:3'].inner.fields_stripped" true
pub struct WithPrimitives<'a> {
num: u32,
s: &'a str,
}

View File

@ -198,6 +198,9 @@ pub struct Config {
/// The Python executable to use for htmldocck. /// The Python executable to use for htmldocck.
pub docck_python: String, pub docck_python: String,
/// The jsondocck executable.
pub jsondocck_path: String,
/// The LLVM `FileCheck` binary path. /// The LLVM `FileCheck` binary path.
pub llvm_filecheck: Option<PathBuf>, pub llvm_filecheck: Option<PathBuf>,

View File

@ -45,6 +45,7 @@ fn config() -> Config {
"--rustc-path=", "--rustc-path=",
"--lldb-python=", "--lldb-python=",
"--docck-python=", "--docck-python=",
"--jsondocck-path=",
"--src-base=", "--src-base=",
"--build-base=", "--build-base=",
"--stage-id=stage2", "--stage-id=stage2",

View File

@ -60,6 +60,7 @@ pub fn parse_config(args: Vec<String>) -> Config {
.optopt("", "rust-demangler-path", "path to rust-demangler to use in tests", "PATH") .optopt("", "rust-demangler-path", "path to rust-demangler to use in tests", "PATH")
.reqopt("", "lldb-python", "path to python to use for doc tests", "PATH") .reqopt("", "lldb-python", "path to python to use for doc tests", "PATH")
.reqopt("", "docck-python", "path to python to use for doc tests", "PATH") .reqopt("", "docck-python", "path to python to use for doc tests", "PATH")
.reqopt("", "jsondocck-path", "path to jsondocck to use for doc tests", "PATH")
.optopt("", "valgrind-path", "path to Valgrind executable for Valgrind tests", "PROGRAM") .optopt("", "valgrind-path", "path to Valgrind executable for Valgrind tests", "PROGRAM")
.optflag("", "force-valgrind", "fail if Valgrind tests cannot be run under Valgrind") .optflag("", "force-valgrind", "fail if Valgrind tests cannot be run under Valgrind")
.optopt("", "run-clang-based-tests-with", "path to Clang executable", "PATH") .optopt("", "run-clang-based-tests-with", "path to Clang executable", "PATH")
@ -196,6 +197,7 @@ pub fn parse_config(args: Vec<String>) -> Config {
let has_tidy = Command::new("tidy") let has_tidy = Command::new("tidy")
.arg("--version") .arg("--version")
.stdout(Stdio::null()) .stdout(Stdio::null())
.stderr(Stdio::null())
.status() .status()
.map_or(false, |status| status.success()); .map_or(false, |status| status.success());
Config { Config {
@ -207,6 +209,7 @@ pub fn parse_config(args: Vec<String>) -> Config {
rust_demangler_path: matches.opt_str("rust-demangler-path").map(PathBuf::from), rust_demangler_path: matches.opt_str("rust-demangler-path").map(PathBuf::from),
lldb_python: matches.opt_str("lldb-python").unwrap(), lldb_python: matches.opt_str("lldb-python").unwrap(),
docck_python: matches.opt_str("docck-python").unwrap(), docck_python: matches.opt_str("docck-python").unwrap(),
jsondocck_path: matches.opt_str("jsondocck-path").unwrap(),
valgrind_path: matches.opt_str("valgrind-path"), valgrind_path: matches.opt_str("valgrind-path"),
force_valgrind: matches.opt_present("force-valgrind"), force_valgrind: matches.opt_present("force-valgrind"),
run_clang_based_tests_with: matches.opt_str("run-clang-based-tests-with"), run_clang_based_tests_with: matches.opt_str("run-clang-based-tests-with"),

View File

@ -2487,31 +2487,31 @@ impl<'test> TestCx<'test> {
} }
let root = self.config.find_rust_src_root().unwrap(); let root = self.config.find_rust_src_root().unwrap();
let mut json_out = out_dir.join(self.testpaths.file.file_stem().unwrap());
json_out.set_extension("json");
let res = self.cmd2procres(
Command::new(&self.config.jsondocck_path)
.arg("--doc-dir")
.arg(root.join(&out_dir))
.arg("--template")
.arg(&self.testpaths.file),
);
if !res.status.success() {
self.fatal_proc_rec("jsondocck failed!", &res)
}
let mut json_out = out_dir.join(self.testpaths.file.file_stem().unwrap()); let mut json_out = out_dir.join(self.testpaths.file.file_stem().unwrap());
json_out.set_extension("json"); json_out.set_extension("json");
let res = self.cmd2procres( let res = self.cmd2procres(
Command::new(&self.config.docck_python) Command::new(&self.config.docck_python)
.arg(root.join("src/test/rustdoc-json/check_missing_items.py")) .arg(root.join("src/etc/check_missing_items.py"))
.arg(&json_out), .arg(&json_out),
); );
if !res.status.success() { if !res.status.success() {
self.fatal_proc_rec("check_missing_items failed!", &res); self.fatal_proc_rec("check_missing_items failed!", &res);
} }
let mut expected = self.testpaths.file.clone();
expected.set_extension("expected");
let res = self.cmd2procres(
Command::new(&self.config.docck_python)
.arg(root.join("src/test/rustdoc-json/compare.py"))
.arg(&expected)
.arg(&json_out)
.arg(&expected.parent().unwrap()),
);
if !res.status.success() {
self.fatal_proc_rec("compare failed!", &res);
}
} }
fn get_lines<P: AsRef<Path>>( fn get_lines<P: AsRef<Path>>(

View File

@ -0,0 +1,14 @@
[package]
name = "jsondocck"
version = "0.1.0"
authors = ["Rune Tynan <runetynan@gmail.com>"]
edition = "2018"
[dependencies]
jsonpath_lib = "0.2"
getopts = "0.2"
regex = "1.4"
lazy_static = "1.4"
shlex = "0.1"
serde = "1.0"
serde_json = "1.0"

View File

@ -0,0 +1,70 @@
use crate::error::CkError;
use serde_json::Value;
use std::collections::HashMap;
use std::fs;
use std::path::{Path, PathBuf};
#[derive(Debug)]
pub struct Cache {
root: PathBuf,
files: HashMap<PathBuf, String>,
values: HashMap<PathBuf, Value>,
last_path: Option<PathBuf>,
}
impl Cache {
pub fn new(doc_dir: &str) -> Cache {
Cache {
root: <str as AsRef<Path>>::as_ref(doc_dir).to_owned(),
files: HashMap::new(),
values: HashMap::new(),
last_path: None,
}
}
fn resolve_path(&mut self, path: &String) -> Result<PathBuf, CkError> {
if path != "-" {
let resolve = self.root.join(path);
self.last_path = Some(resolve.clone());
Ok(resolve)
} else {
match &self.last_path {
Some(p) => Ok(p.clone()),
None => unreachable!(),
}
}
}
pub fn get_file(&mut self, path: &String) -> Result<String, CkError> {
let path = self.resolve_path(path)?;
if let Some(f) = self.files.get(&path) {
return Ok(f.clone());
}
let file = fs::read_to_string(&path)?;
self.files.insert(path, file.clone());
Ok(file)
// Err(_) => Err(CkError::FailedCheck(format!("File {:?} does not exist / could not be opened", path)))
}
pub fn get_value(&mut self, path: &String) -> Result<Value, CkError> {
let path = self.resolve_path(path)?;
if let Some(v) = self.values.get(&path) {
return Ok(v.clone());
}
let file = fs::File::open(&path)?;
// Err(_) => return Err(CkError::FailedCheck(format!("File {:?} does not exist / could not be opened", path)))
let val = serde_json::from_reader::<_, Value>(file)?;
self.values.insert(path, val.clone());
Ok(val)
// Err(_) => Err(CkError::FailedCheck(format!("File {:?} did not contain valid JSON", path)))
}
}

View File

@ -0,0 +1,41 @@
use getopts::Options;
#[derive(Debug)]
pub struct Config {
/// The directory documentation output was generated in
pub doc_dir: String,
/// The file documentation was generated for, with docck commands to check
pub template: String,
}
pub fn parse_config(args: Vec<String>) -> Config {
let mut opts = Options::new();
opts.reqopt("", "doc-dir", "Path to the documentation directory", "PATH")
.reqopt("", "template", "Path to the template file", "PATH")
.optflag("h", "help", "show this message");
let (argv0, args_) = args.split_first().unwrap();
if args.len() == 1 || args[1] == "-h" || args[1] == "--help" {
let message = format!("Usage: {} <doc-dir> <template>", argv0);
println!("{}", opts.usage(&message));
println!();
panic!()
}
let matches = &match opts.parse(args_) {
Ok(m) => m,
Err(f) => panic!("{:?}", f),
};
if matches.opt_present("h") || matches.opt_present("help") {
let message = format!("Usage: {} <doc-dir> <template>", argv0);
println!("{}", opts.usage(&message));
println!();
panic!()
}
Config {
doc_dir: matches.opt_str("doc-dir").unwrap(),
template: matches.opt_str("template").unwrap(),
}
}

View File

@ -0,0 +1,28 @@
use crate::Command;
use std::error::Error;
use std::fmt;
#[derive(Debug)]
pub enum CkError {
/// A check failed. File didn't exist or failed to match the command
FailedCheck(String, Command),
/// An error triggered by some other error
Induced(Box<dyn Error>),
}
impl fmt::Display for CkError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
CkError::FailedCheck(msg, cmd) => {
write!(f, "Failed check: {} on line {}", msg, cmd.lineno)
}
CkError::Induced(err) => write!(f, "Check failed: {}", err),
}
}
}
impl<T: Error + 'static> From<T> for CkError {
fn from(err: T) -> CkError {
CkError::Induced(Box::new(err))
}
}

View File

@ -0,0 +1,260 @@
use jsonpath_lib::select;
use lazy_static::lazy_static;
use regex::{Regex, RegexBuilder};
use serde_json::Value;
use std::{env, fmt, fs};
mod cache;
mod config;
mod error;
use cache::Cache;
use config::parse_config;
use error::CkError;
fn main() -> Result<(), String> {
let config = parse_config(env::args().collect());
let mut failed = Vec::new();
let mut cache = Cache::new(&config.doc_dir);
let commands = get_commands(&config.template)
.map_err(|_| format!("Jsondocck failed for {}", &config.template))?;
for command in commands {
if let Err(e) = check_command(command, &mut cache) {
failed.push(e);
}
}
if failed.is_empty() {
Ok(())
} else {
for i in failed {
eprintln!("{}", i);
}
Err(format!("Jsondocck failed for {}", &config.template))
}
}
#[derive(Debug)]
pub struct Command {
negated: bool,
kind: CommandKind,
args: Vec<String>,
lineno: usize,
}
#[derive(Debug)]
pub enum CommandKind {
Has,
Count,
}
impl CommandKind {
fn validate(&self, args: &[String], command_num: usize, lineno: usize) -> bool {
let count = match self {
CommandKind::Has => (1..=3).contains(&args.len()),
CommandKind::Count => 3 == args.len(),
};
if !count {
print_err(&format!("Incorrect number of arguments to `@{}`", self), lineno);
return false;
}
match self {
CommandKind::Has => {
if args[0] == "-" && command_num == 0 {
print_err(
&format!("Tried to use the previous path in the first command"),
lineno,
);
return false;
}
}
CommandKind::Count => {
if args[0] == "-" && command_num == 0 {
print_err(
&format!("Tried to use the previous path in the first command"),
lineno,
);
return false;
}
if args[2].parse::<usize>().is_err() {
print_err(&format!("Third argument to @count must be a valid usize"), lineno);
return false;
}
}
}
true
}
}
impl fmt::Display for CommandKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let text = match self {
CommandKind::Has => "has",
CommandKind::Count => "count",
};
write!(f, "{}", text)
}
}
lazy_static! {
static ref LINE_PATTERN: Regex = RegexBuilder::new(
r#"
\s(?P<invalid>!?)@(?P<negated>!?)
(?P<cmd>[A-Za-z]+(?:-[A-Za-z]+)*)
(?P<args>.*)$
"#
)
.ignore_whitespace(true)
.unicode(true)
.build()
.unwrap();
}
fn print_err(msg: &str, lineno: usize) {
eprintln!("Invalid command: {} on line {}", msg, lineno)
}
fn get_commands(template: &str) -> Result<Vec<Command>, ()> {
let mut commands = Vec::new();
let mut errors = false;
let file = fs::read_to_string(template).unwrap();
for (lineno, line) in file.split('\n').enumerate() {
let lineno = lineno + 1;
let cap = match LINE_PATTERN.captures(line) {
Some(c) => c,
None => continue,
};
let negated = match cap.name("negated") {
Some(m) => m.as_str() == "!",
None => false,
};
let cmd = cap.name("cmd").unwrap().as_str();
let cmd = match cmd {
"has" => CommandKind::Has,
"count" => CommandKind::Count,
_ => {
print_err(&format!("Unrecognized command name `@{}`", cmd), lineno);
errors = true;
continue;
}
};
if let Some(m) = cap.name("invalid") {
if m.as_str() == "!" {
print_err(
&format!(
"`!@{0}{1}`, (help: try with `@!{1}`)",
if negated { "!" } else { "" },
cmd,
),
lineno,
);
errors = true;
continue;
}
}
let args = match cap.name("args") {
Some(m) => shlex::split(m.as_str()).unwrap(),
None => vec![],
};
if !cmd.validate(&args, commands.len(), lineno) {
errors = true;
continue;
}
commands.push(Command { negated, kind: cmd, args, lineno })
}
if !errors { Ok(commands) } else { Err(()) }
}
fn check_command(command: Command, cache: &mut Cache) -> Result<(), CkError> {
let result = match command.kind {
CommandKind::Has => {
match command.args.len() {
// @has <path> = file existence
1 => match cache.get_file(&command.args[0]) {
Ok(_) => true,
Err(_) => false,
},
// @has <path> <jsonpath> = check path exists
2 => {
let val = cache.get_value(&command.args[0])?;
match select(&val, &command.args[1]) {
Ok(results) => !results.is_empty(),
Err(_) => false,
}
}
// @has <path> <jsonpath> <value> = check *any* item matched by path equals value
3 => {
let val = cache.get_value(&command.args[0])?;
match select(&val, &command.args[1]) {
Ok(results) => {
let pat: Value = serde_json::from_str(&command.args[2]).unwrap();
!results.is_empty() && results.into_iter().any(|val| *val == pat)
}
Err(_) => false,
}
}
_ => {
unreachable!()
}
}
}
CommandKind::Count => {
match command.args.len() {
// @count <path> <jsonpath> <count> = Check that the jsonpath matches exactly [count] times
3 => {
let expected: usize = command.args[2].parse().unwrap();
let val = cache.get_value(&command.args[0])?;
match select(&val, &command.args[1]) {
Ok(results) => results.len() == expected,
Err(_) => false,
}
}
_ => {
unreachable!()
}
}
}
};
if result == command.negated {
if command.negated {
Err(CkError::FailedCheck(
format!(
"`@!{} {}` matched when it shouldn't",
command.kind,
command.args.join(" ")
),
command,
))
} else {
// FIXME: In the future, try 'peeling back' each step, and see at what level the match failed
Err(CkError::FailedCheck(
format!(
"`@{} {}` didn't match when it should",
command.kind,
command.args.join(" ")
),
command,
))
}
} else {
Ok(())
}
}