Add pandoc support back to rust script (#349)

* Add pandoc support back to rust script

* Dont render invalid HTML

* Bump schema version

* Enable debug build workflow

* Dont wrap script

* Provide pandoc in check phase

* Update Elasticsearch hash

* Add _forked_ rust-pandoc

* Execute Lua filters

* Update nixpkgs containing lua filters

* Expose filter path in dev Shell

* Dont check pandoc think

(for now)

* Provide filter path at build time

* Enable debug builds

* Enable debug builds

* rename main.yaml to frontend.yaml

* Wrap flake-info with pandoc

* Add makeWrapper input

* Add pandoc filter path to wrapper

* Wrap descriptions in XML frame

* Render HTML

* Disable import on PR
This commit is contained in:
Yannik Sander 2021-09-14 09:49:33 +02:00 committed by GitHub
parent fff4e39add
commit 5cfde22eb7
Failed to generate hash of commit
10 changed files with 289 additions and 223 deletions

View file

@ -1 +1 @@
22 23

429
flake-info/Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -23,6 +23,7 @@ fancy-regex = "0.6"
tokio = { version = "*", features = ["full"] } tokio = { version = "*", features = ["full"] }
reqwest = { version = "0.11", features = ["json", "blocking"] } reqwest = { version = "0.11", features = ["json", "blocking"] }
sha2 = "0.9" sha2 = "0.9"
pandoc = { git = "https://github.com/ysndr/rust-pandoc" }
elasticsearch = {git = "https://github.com/elastic/elasticsearch-rs", features = ["rustls-tls"]} elasticsearch = {git = "https://github.com/elastic/elasticsearch-rs", features = ["rustls-tls"]}

View file

@ -5,13 +5,26 @@ rustPlatform.buildRustPackage {
cargoLock = { cargoLock = {
lockFile = ./Cargo.lock; lockFile = ./Cargo.lock;
outputHashes = { outputHashes = {
"elasticsearch-8.0.0-alpha.1" = "0x8iw4m16vy6i28mj30aqdwfw4a3hd174l8l9yigddn3cr53cagx"; "elasticsearch-8.0.0-alpha.1" = "sha256-gjmk3Q3LTAvLhzQ+k1knSp1HBwtqNiubjXNnLy/cS5M=";
"pandoc-0.8.6" = "sha256-NsHDzqWjQ17cznjOSpXOdUOhJjAO28Z6QZ6Mn6afVVs=";
}; };
}; };
nativeBuildInputs = [ pkg-config ]; nativeBuildInputs = [ pkg-config ];
buildInputs = [ openssl openssl.dev ] ++ lib.optional pkgs.stdenv.isDarwin [ libiconv darwin.apple_sdk.frameworks.Security ]; buildInputs = [ openssl openssl.dev makeWrapper ]
++ lib.optional pkgs.stdenv.isDarwin [ libiconv darwin.apple_sdk.frameworks.Security ];
checkInputs = [ pandoc ];
NIXPKGS_PANDOC_FILTERS_PATH = "${pkgs.path + "/doc/build-aux/pandoc-filters"}";
checkFlags = [ checkFlags = [
"--skip elastic::tests" "--skip elastic::tests"
"--skip nix_gc::tests" "--skip nix_gc::tests"
]; ];
postInstall = ''
wrapProgram $out/bin/flake-info \
--set NIXPKGS_PANDOC_FILTERS_PATH "${pkgs.path + "/doc/build-aux/pandoc-filters"}" \
--prefix PATH : ${pandoc}/bin
'';
} }

View file

@ -208,7 +208,7 @@ async fn run_command(
let info = flake_info::get_flake_info(source.to_flake_ref(), temp_store, extra) let info = flake_info::get_flake_info(source.to_flake_ref(), temp_store, extra)
.map_err(FlakeInfoError::Flake)?; .map_err(FlakeInfoError::Flake)?;
let ident = ("flake".to_owned(), info.name, info.revision); let ident = ("flake".to_owned(), info.name, info.revision.unwrap_or("latest".into()));
Ok((exports, ident)) Ok((exports, ident))
} }
@ -241,7 +241,7 @@ async fn run_command(
_ => flake_info::process_flake(source, &kind, temp_store, &extra).and_then( _ => flake_info::process_flake(source, &kind, temp_store, &extra).and_then(
|result| { |result| {
flake_info::get_flake_info(source.to_flake_ref(), temp_store, extra) flake_info::get_flake_info(source.to_flake_ref(), temp_store, extra)
.map(|info| (result, info.revision)) .map(|info| (result, info.revision.unwrap_or("latest".into())))
}, },
), ),
}) })

View file

@ -4,6 +4,7 @@
use std::path::PathBuf; use std::path::PathBuf;
use crate::data::import::NixOption; use crate::data::import::NixOption;
use pandoc::{InputFormat, InputKind, OutputFormat, OutputKind, PandocOption, PandocOutput};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use super::{ use super::{
@ -11,6 +12,13 @@ use super::{
system::System, system::System,
utility::{AttributeQuery, Flatten, OneOrMany, Reverse}, utility::{AttributeQuery, Flatten, OneOrMany, Reverse},
}; };
use lazy_static::lazy_static;
lazy_static! {
static ref FILTERS_PATH: PathBuf = std::env::var("NIXPKGS_PANDOC_FILTERS_PATH")
.unwrap_or("".into())
.into();
}
type Flake = super::Flake; type Flake = super::Flake;
@ -275,6 +283,46 @@ impl From<import::NixOption> for Derivation {
flake, flake,
}: import::NixOption, }: import::NixOption,
) -> Self { ) -> Self {
let citeref_filter = {
let mut p = FILTERS_PATH.clone();
p.push("docbook-reader/citerefentry-to-rst-role.lua");
p
};
let man_filter = {
let mut p = FILTERS_PATH.clone();
p.push("link-unix-man-references.lua");
p
};
let description = if let Some(description) = description {
let mut pandoc = pandoc::new();
let description_xml = format!(
"
<xml xmlns:xlink=\"http://www.w3.org/1999/xlink\">
<para>{}</para>
</xml>
", description
);
pandoc.set_input(InputKind::Pipe(description_xml));
pandoc.set_input_format(InputFormat::DocBook, Vec::new());
pandoc.set_output(OutputKind::Pipe);
pandoc.set_output_format(OutputFormat::Html, Vec::new());
pandoc.add_options(&[
PandocOption::LuaFilter(citeref_filter),
PandocOption::LuaFilter(man_filter),
]);
let result = pandoc.execute().unwrap();
match result {
PandocOutput::ToBuffer(description) => Some(description),
_ => unreachable!(),
}
} else {
description
};
Derivation::Option { Derivation::Option {
option_source: declarations.get(0).map(Clone::clone), option_source: declarations.get(0).map(Clone::clone),
option_name: name.clone(), option_name: name.clone(),

View file

@ -17,7 +17,7 @@ pub struct Flake {
#[serde(rename(serialize = "flake_name"), skip_deserializing)] #[serde(rename(serialize = "flake_name"), skip_deserializing)]
pub name: String, pub name: String,
pub revision: String, pub revision: Option<String>,
#[serde( #[serde(
skip_deserializing, skip_deserializing,
@ -69,7 +69,7 @@ mod tests {
}, },
name: "".into(), name: "".into(),
source: None, source: None,
revision: "9e2f634ffa45da3f5feb158a12ee32e1673bfe35".into() revision: Some("9e2f634ffa45da3f5feb158a12ee32e1673bfe35".into())
} }
); );

View file

@ -2,11 +2,11 @@
"nodes": { "nodes": {
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1629618782, "lastModified": 1631118067,
"narHash": "sha256-2K8SSXu3alo/URI3MClGdDSns6Gb4ZaW4LET53UWyKk=", "narHash": "sha256-tEcFvm3a6ToeBGwHdjfB2mVQwa4LZCZTQYE2LnY3ycA=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "870959c7fb3a42af1863bed9e1756086a74eb649", "rev": "09cd65b33c5653d7d2954fef4b9f0e718c899743",
"type": "github" "type": "github"
}, },
"original": { "original": {

View file

@ -26,6 +26,7 @@
devShell = system: devShell = system:
nixpkgs.legacyPackages.${system}.mkShell { nixpkgs.legacyPackages.${system}.mkShell {
inputsFrom = builtins.attrValues (packages system); inputsFrom = builtins.attrValues (packages system);
NIXPKGS_PANDOC_FILTERS_PATH = "${nixpkgs + "/doc/build-aux/pandoc-filters"}";
}; };
in in
{ {