Fix/explicit printing (#395)

* Extract pandoc function to own module

* Bump schema version

* Make string handling more explicit

* Update logging and pandoc crates

* Improve serializing error handling

* Serialize values as string (for elastic)

* Perform option doc parsing entirely in rust

* Show non pandoc'ed results as code elements

* Parse correct html string

* Change expected `option_type` type to String

* Allow treat string unparsable  as html like non-docbook strings

* Improve deserializing error reporting using serde_path_to_error

* Format code
This commit is contained in:
Yannik Sander 2021-12-26 16:03:09 +01:00 committed by GitHub
parent a4d8421312
commit 3dfcf4fd45
Failed to generate hash of commit
13 changed files with 202 additions and 89 deletions

View file

@ -1 +1 @@
24 25

19
flake-info/Cargo.lock generated
View file

@ -273,9 +273,9 @@ dependencies = [
[[package]] [[package]]
name = "env_logger" name = "env_logger"
version = "0.8.4" version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" checksum = "0b2cf0344971ee6c64c31be0d530793fba457d322dfec2810c453d0ef228f9c3"
dependencies = [ dependencies = [
"atty", "atty",
"humantime", "humantime",
@ -310,6 +310,7 @@ dependencies = [
"reqwest", "reqwest",
"serde", "serde",
"serde_json", "serde_json",
"serde_path_to_error",
"sha2", "sha2",
"structopt", "structopt",
"tempfile", "tempfile",
@ -822,8 +823,9 @@ dependencies = [
[[package]] [[package]]
name = "pandoc" name = "pandoc"
version = "0.8.6" version = "0.8.8"
source = "git+https://github.com/ysndr/rust-pandoc#c16ba426cdea58084be731ef8028ba58ca670b40" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0eac785b7de8de25c5ec48b3a9df1be552de03906f99145ed6d7da3d696c0dbb"
dependencies = [ dependencies = [
"itertools", "itertools",
] ]
@ -1197,6 +1199,15 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "serde_path_to_error"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d0421d4f173fab82d72d6babf36d57fae38b994ca5c2d78e704260ba6d12118b"
dependencies = [
"serde",
]
[[package]] [[package]]
name = "serde_urlencoded" name = "serde_urlencoded"
version = "0.7.0" version = "0.7.0"

View file

@ -10,12 +10,13 @@ edition = "2018"
clap = "^2.33" clap = "^2.33"
serde = {version="1.0", features = ["derive"]} serde = {version="1.0", features = ["derive"]}
serde_json = "1.0" serde_json = "1.0"
serde_path_to_error = "0.1.5"
toml = "0.5" toml = "0.5"
anyhow = "1.0" anyhow = "1.0"
thiserror = "1.0" thiserror = "1.0"
structopt = "0.3" structopt = "0.3"
command-run = "0.13" command-run = "0.13"
env_logger = "0.8" env_logger = "0.9"
log = "0.4" log = "0.4"
tempfile = "3" tempfile = "3"
lazy_static = "1.4" lazy_static = "1.4"
@ -23,7 +24,7 @@ fancy-regex = "0.6"
tokio = { version = "*", features = ["full"] } tokio = { version = "*", features = ["full"] }
reqwest = { version = "0.11", features = ["json", "blocking"] } reqwest = { version = "0.11", features = ["json", "blocking"] }
sha2 = "0.9" sha2 = "0.9"
pandoc = { git = "https://github.com/ysndr/rust-pandoc" } pandoc = "0.8"
elasticsearch = {git = "https://github.com/elastic/elasticsearch-rs", features = ["rustls-tls"]} elasticsearch = {git = "https://github.com/elastic/elasticsearch-rs", features = ["rustls-tls"]}

View file

@ -6,7 +6,6 @@ rustPlatform.buildRustPackage rec {
lockFile = ./Cargo.lock; lockFile = ./Cargo.lock;
outputHashes = { outputHashes = {
"elasticsearch-8.0.0-alpha.1" = "sha256-gjmk3Q3LTAvLhzQ+k1knSp1HBwtqNiubjXNnLy/cS5M="; "elasticsearch-8.0.0-alpha.1" = "sha256-gjmk3Q3LTAvLhzQ+k1knSp1HBwtqNiubjXNnLy/cS5M=";
"pandoc-0.8.6" = "sha256-NsHDzqWjQ17cznjOSpXOdUOhJjAO28Z6QZ6Mn6afVVs=";
}; };
}; };
nativeBuildInputs = [ pkg-config ]; nativeBuildInputs = [ pkg-config ];

View file

@ -273,14 +273,15 @@ async fn run_command(
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if !errors.is_empty() { if !errors.is_empty() {
if exports.is_empty() { if exports.is_empty() {
return Err(FlakeInfoError::Group(errors)); return Err(FlakeInfoError::Group(errors));
} }
warn!("=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-="); warn!("=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=");
warn!("Some group members could not be evaluated: {}", FlakeInfoError::Group(errors)); warn!(
"Some group members could not be evaluated: {}",
FlakeInfoError::Group(errors)
);
warn!("=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-="); warn!("=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=");
} }
let hash = { let hash = {

View file

@ -72,7 +72,7 @@ let
cleanUpOption = module: opt: cleanUpOption = module: opt:
let let
applyOnAttr = n: f: lib.optionalAttrs (lib.hasAttr n opt) { ${n} = f opt.${n}; }; applyOnAttr = n: f: lib.optionalAttrs (builtins.hasAttr n opt) { ${n} = f opt.${n}; };
mkDeclaration = decl: mkDeclaration = decl:
let let
discard = lib.concatStringsSep "/" (lib.take 4 (lib.splitString "/" decl)) + "/"; discard = lib.concatStringsSep "/" (lib.take 4 (lib.splitString "/" decl)) + "/";
@ -82,20 +82,18 @@ let
# Replace functions by the string <function> # Replace functions by the string <function>
substFunction = x: substFunction = x:
if x ? _type && (x._type == "literalExample" || x._type == "literalExpression" || x._type == "literalDocBook") then if builtins.isAttrs x then
x.text lib.mapAttrs (_:substFunction ) x
else if builtins.isAttrs x then
lib.mapAttrs (name: substFunction) x
else if builtins.isList x then else if builtins.isList x then
map substFunction x map substFunction x
else if lib.isFunction x then else if lib.isFunction x then
"<function>" "function"
else else
x; x;
in in
opt opt
// applyOnAttr "example" substFunction
// applyOnAttr "default" substFunction // applyOnAttr "default" substFunction
// applyOnAttr "example" substFunction # (_: { __type = "function"; })
// applyOnAttr "type" substFunction // applyOnAttr "type" substFunction
// applyOnAttr "declarations" (map mkDeclaration) // applyOnAttr "declarations" (map mkDeclaration)
// lib.optionalAttrs (!isNixOS) { flake = [ flake module ]; }; // lib.optionalAttrs (!isNixOS) { flake = [ flake module ]; };

View file

@ -2,6 +2,7 @@ use crate::data::import::{FlakeEntry, Kind};
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use command_run::{Command, LogTo}; use command_run::{Command, LogTo};
use log::debug; use log::debug;
use serde_json::Deserializer;
use std::fmt::Display; use std::fmt::Display;
use std::fs::File; use std::fs::File;
use std::io::Write; use std::io::Write;
@ -43,8 +44,9 @@ pub fn get_derivation_info<T: AsRef<str> + Display>(
.run() .run()
.with_context(|| format!("Failed to gather information about {}", flake_ref)) .with_context(|| format!("Failed to gather information about {}", flake_ref))
.and_then(|o| { .and_then(|o| {
debug!("stderr: {}", o.stderr_string_lossy()); let output = &*o.stdout_string_lossy();
serde_json::de::from_str(&o.stdout_string_lossy()) let de = &mut Deserializer::from_str(output);
serde_path_to_error::deserialize(de)
.with_context(|| format!("Failed to analyze flake {}", flake_ref)) .with_context(|| format!("Failed to analyze flake {}", flake_ref))
}); });
parsed parsed

View file

@ -1,4 +1,5 @@
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use serde_json::Deserializer;
use std::io::Write; use std::io::Write;
use std::{collections::HashMap, fmt::Display, fs::File}; use std::{collections::HashMap, fmt::Display, fs::File};
@ -38,9 +39,10 @@ pub fn get_nixpkgs_info<T: AsRef<str> + Display>(nixpkgs_channel: T) -> Result<V
) )
}) })
.and_then(|o| { .and_then(|o| {
debug!("stderr: {}", o.stderr_string_lossy()); let output = &*o.stdout_string_lossy();
let de = &mut Deserializer::from_str(output);
let attr_set: HashMap<String, Package> = let attr_set: HashMap<String, Package> =
serde_json::de::from_str(&o.stdout_string_lossy())?; serde_path_to_error::deserialize(de).with_context(|| "Could not parse packages")?;
Ok(attr_set Ok(attr_set
.into_iter() .into_iter()
.map(|(attribute, package)| NixpkgsEntry::Derivation { attribute, package }) .map(|(attribute, package)| NixpkgsEntry::Derivation { attribute, package })
@ -91,8 +93,10 @@ pub fn get_nixpkgs_options<T: AsRef<str> + Display>(
} }
parsed.and_then(|o| { parsed.and_then(|o| {
debug!("stderr: {}", o.stderr_string_lossy()); let output = &*o.stdout_string_lossy();
let attr_set: Vec<NixOption> = serde_json::de::from_str(&o.stdout_string_lossy())?; let de = &mut Deserializer::from_str(output);
let attr_set: Vec<NixOption> =
serde_path_to_error::deserialize(de).with_context(|| "Could not parse options")?;
Ok(attr_set.into_iter().map(NixpkgsEntry::Option).collect()) Ok(attr_set.into_iter().map(NixpkgsEntry::Option).collect())
}) })
} }

View file

@ -1,14 +1,13 @@
/// This module defines the unified putput format as expected by the elastic search /// This module defines the unified putput format as expected by the elastic search
/// Additionally, we implement converseions from the two possible input formats, i.e. /// Additionally, we implement converseions from the two possible input formats, i.e.
/// Flakes, or Nixpkgs. /// Flakes, or Nixpkgs.
use std::path::PathBuf; use std::{convert::TryInto, path::PathBuf};
use super::{import::DocValue, pandoc::PandocExt};
use crate::data::import::NixOption; use crate::data::import::NixOption;
use log::error; use log::error;
use pandoc::{
InputFormat, InputKind, OutputFormat, OutputKind, PandocError, PandocOption, PandocOutput,
};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::Value;
use super::{ use super::{
import, import,
@ -107,9 +106,9 @@ pub enum Derivation {
option_type: Option<String>, option_type: Option<String>,
option_default: Option<String>, option_default: Option<DocValue>,
option_example: Option<String>, option_example: Option<DocValue>,
option_flake: Option<(String, String)>, option_flake: Option<(String, String)>,
}, },
@ -290,49 +289,23 @@ impl From<import::NixOption> for Derivation {
flake, flake,
}: import::NixOption, }: import::NixOption,
) -> Self { ) -> Self {
let citeref_filter = { let description = description
let mut p = FILTERS_PATH.clone(); .as_ref()
p.push("docbook-reader/citerefentry-to-rst-role.lua"); .map(PandocExt::render)
p .transpose()
}; .expect(&format!("Could not render descript of `{}`", name));
let man_filter = { let option_default = default;
let mut p = FILTERS_PATH.clone(); // .map(TryInto::try_into)
p.push("link-unix-man-references.lua"); // .transpose()
p // .expect(&format!("Could not render option_default of `{}`", name));
}; let option_example = example;
// .map(TryInto::try_into)
let description = if let Some(description) = description { // .transpose()
let mut pandoc = pandoc::new(); // .expect(&format!("Could not render option_example of `{}`", name));
let description_xml = format!( let option_type = option_type;
" // .map(TryInto::try_into)
<xml xmlns:xlink=\"http://www.w3.org/1999/xlink\"> // .transpose()
<para>{}</para> // .expect(&format!("Could not render option_type of `{}`", name));
</xml>
",
description
);
pandoc.set_input(InputKind::Pipe(description_xml));
pandoc.set_input_format(InputFormat::DocBook, Vec::new());
pandoc.set_output(OutputKind::Pipe);
pandoc.set_output_format(OutputFormat::Html, Vec::new());
pandoc.add_options(&[
PandocOption::LuaFilter(citeref_filter),
PandocOption::LuaFilter(man_filter),
]);
let result = pandoc.execute().expect(&format!(
"Pandoc could not parse documentation of '{}'",
name
));
match result {
PandocOutput::ToBuffer(description) => Some(description),
_ => unreachable!(),
}
} else {
description
};
Derivation::Option { Derivation::Option {
option_source: declarations.get(0).map(Clone::clone), option_source: declarations.get(0).map(Clone::clone),
@ -340,8 +313,8 @@ impl From<import::NixOption> for Derivation {
option_name_reverse: Reverse(name.clone()), option_name_reverse: Reverse(name.clone()),
option_description: description.clone(), option_description: description.clone(),
option_description_reverse: description.map(Reverse), option_description_reverse: description.map(Reverse),
option_default: default.map(print_value), option_default,
option_example: example.map(print_value), option_example,
option_flake: flake, option_flake: flake,
option_type, option_type,
option_name_query: AttributeQuery::new(&name), option_name_query: AttributeQuery::new(&name),
@ -435,14 +408,15 @@ mod tests {
let option: NixOption = serde_json::from_str(r#" let option: NixOption = serde_json::from_str(r#"
{ {
"declarations":["/nix/store/s1q1238ahiks5a4g6j6qhhfb3rlmamvz-source/nixos/modules/system/boot/luksroot.nix"], "declarations":["/nix/store/s1q1238ahiks5a4g6j6qhhfb3rlmamvz-source/nixos/modules/system/boot/luksroot.nix"],
"default":"", "default": {"one": 1, "two" : { "three": "tree", "four": []}},
"description":"Commands that should be run right after we have mounted our LUKS device.\n", "description":"Commands that should be run right after we have mounted our LUKS device.\n",
"example":"oneline\ntwoline\nthreeline\n", "example":null,
"internal":false, "internal":false,
"loc":["boot","initrd","luks","devices","<name>","postOpenCommands"], "loc":["boot","initrd","luks","devices","<name>","postOpenCommands"],
"name":"boot.initrd.luks.devices.<name>.postOpenCommands", "name":"boot.initrd.luks.devices.<name>.postOpenCommands",
"readOnly":false,"type": "readOnly":false,
"strings concatenated with \"\\n\"","visible":true "type": "boolean",
"visible":true
}"#).unwrap(); }"#).unwrap();
let option: Derivation = option.into(); let option: Derivation = option.into();

View file

@ -1,13 +1,19 @@
use std::collections::{BTreeMap, HashMap};
use std::convert::TryInto;
use std::fmt::{self, write, Display}; use std::fmt::{self, write, Display};
use std::marker::PhantomData; use std::marker::PhantomData;
use std::{path::PathBuf, str::FromStr}; use std::{path::PathBuf, str::FromStr};
use clap::arg_enum; use clap::arg_enum;
use log::warn;
use pandoc::PandocError;
use serde::de::{self, MapAccess, Visitor}; use serde::de::{self, MapAccess, Visitor};
use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_json::Value; use serde_json::Value;
use thiserror::Error; use thiserror::Error;
use super::pandoc::PandocExt;
use super::prettyprint::{self, print_value};
use super::system::System; use super::system::System;
use super::utility::{Flatten, OneOrMany}; use super::utility::{Flatten, OneOrMany};
@ -50,16 +56,55 @@ pub struct NixOption {
pub description: Option<String>, pub description: Option<String>,
pub name: String, pub name: String,
#[serde(rename = "type")] #[serde(rename = "type")]
/// Nix generated description of the options type /// Nix generated description of the options type
pub option_type: Option<String>, pub option_type: Option<String>,
pub default: Option<Value>, pub default: Option<DocValue>,
pub example: Option<Value>, pub example: Option<DocValue>,
/// If defined in a flake, contains defining flake and module /// If defined in a flake, contains defining flake and module
pub flake: Option<(String, String)>, pub flake: Option<(String, String)>,
} }
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(untagged)]
pub enum DocValue {
Literal(Literal),
Value(Value),
}
#[derive(Debug, Clone, PartialEq, Deserialize)]
#[serde(tag = "_type", content = "text")]
pub enum Literal {
#[serde(rename = "literalExpression")]
LiteralExpression(Value),
#[serde(rename = "literalExample")]
LiteralExample(Value),
#[serde(rename = "literalDocBook")]
LiteralDocBook(String),
}
impl Serialize for DocValue {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
DocValue::Literal(Literal::LiteralExample(s) | Literal::LiteralExpression(s)) => {
return serializer.serialize_some(&s);
}
DocValue::Literal(Literal::LiteralDocBook(doc_book)) => {
return serializer.serialize_str(&doc_book.render().unwrap_or_else(|e| {
warn!("Could not render docbook content: {}", e);
doc_book.to_owned()
}));
}
DocValue::Value(v) => serializer.serialize_str(&print_value(v.to_owned())),
}
}
}
/// Package as defined in nixpkgs /// Package as defined in nixpkgs
/// These packages usually have a "more" homogenic structure that is given by /// These packages usually have a "more" homogenic structure that is given by
/// nixpkgs /// nixpkgs
@ -299,4 +344,7 @@ mod tests {
.map(|(attribute, package)| NixpkgsEntry::Derivation { attribute, package }) .map(|(attribute, package)| NixpkgsEntry::Derivation { attribute, package })
.collect(); .collect();
} }
#[test]
fn test_option_parsing() {}
} }

View file

@ -1,6 +1,7 @@
mod export; mod export;
mod flake; mod flake;
pub mod import; pub mod import;
mod pandoc;
mod prettyprint; mod prettyprint;
mod source; mod source;
mod system; mod system;

View file

@ -0,0 +1,64 @@
use std::path::PathBuf;
use lazy_static::lazy_static;
use log::debug;
use pandoc::{
InputFormat, InputKind, OutputFormat, OutputKind, PandocError, PandocOption, PandocOutput,
};
lazy_static! {
static ref FILTERS_PATH: PathBuf = std::env::var("NIXPKGS_PANDOC_FILTERS_PATH")
.unwrap_or("".into())
.into();
}
pub trait PandocExt {
fn render(&self) -> Result<String, PandocError>;
}
impl<T: AsRef<str>> PandocExt for T {
fn render(&self) -> Result<String, PandocError> {
if !self.as_ref().contains("</") {
return Ok(format!(
"<rendered-docbook>{}</rendered-docbook>",
self.as_ref()
));
}
let citeref_filter = {
let mut p = FILTERS_PATH.clone();
p.push("docbook-reader/citerefentry-to-rst-role.lua");
p
};
let man_filter = {
let mut p = FILTERS_PATH.clone();
p.push("link-unix-man-references.lua");
p
};
let mut pandoc = pandoc::new();
let wrapper_xml = format!(
"
<xml xmlns:xlink=\"http://www.w3.org/1999/xlink\">
<para>{}</para>
</xml>
",
self.as_ref()
);
pandoc.set_input(InputKind::Pipe(wrapper_xml));
pandoc.set_input_format(InputFormat::DocBook, Vec::new());
pandoc.set_output(OutputKind::Pipe);
pandoc.set_output_format(OutputFormat::Html, Vec::new());
pandoc.add_options(&[
PandocOption::LuaFilter(citeref_filter),
PandocOption::LuaFilter(man_filter),
]);
pandoc.execute().map(|result| match result {
PandocOutput::ToBuffer(description) => {
format!("<rendered-docbook>{}</rendered-docbook>", description)
}
_ => unreachable!(),
})
}
}

View file

@ -171,12 +171,14 @@ viewResultItem :
viewResultItem channel _ show item = viewResultItem channel _ show item =
let let
showHtml value = showHtml value =
case Html.Parser.run value of case Html.Parser.run <| String.trim value of
Ok nodes -> Ok [ Html.Parser.Element "rendered-docbook" _ nodes ] ->
Html.Parser.Util.toVirtualDom nodes Just <| Html.Parser.Util.toVirtualDom nodes
Ok _ ->
Nothing
Err _ -> Err _ ->
[] Nothing
default = default =
"Not given" "Not given"
@ -214,15 +216,23 @@ viewResultItem channel _ show item =
, div [] [ text "Description" ] , div [] [ text "Description" ]
, div [] <| , div [] <|
(item.source.description (item.source.description
|> Maybe.map showHtml |> Maybe.andThen showHtml
|> Maybe.withDefault [] |> Maybe.withDefault []
) )
, div [] [ text "Default value" ] , div [] [ text "Default value" ]
, div [] [ withEmpty (wrapped asPreCode) item.source.default ] , div [] <|
(item.source.default
|> Maybe.map (\value -> Maybe.withDefault [ asPreCode value ] (showHtml value))
|> Maybe.withDefault [ asPre default ]
)
, div [] [ text "Type" ] , div [] [ text "Type" ]
, div [] [ withEmpty asPre item.source.type_ ] , div [] [ withEmpty asPre item.source.type_ ]
, div [] [ text "Example" ] , div [] [ text "Example" ]
, div [] [ withEmpty (wrapped asPreCode) item.source.example ] , div [] <|
(item.source.example
|> Maybe.map (\value -> Maybe.withDefault [ asPreCode value ] (showHtml value))
|> Maybe.withDefault [ asPre default ]
)
, div [] [ text "Declared in" ] , div [] [ text "Declared in" ]
, div [] <| findSource channel item.source , div [] <| findSource channel item.source
] ]