Translate example/default values from JSON to NIx (#357)

This commit is contained in:
Yannik Sander 2021-11-11 20:29:44 +01:00 committed by GitHub
parent 22f10dc4e6
commit f3571efdef
Failed to generate hash of commit
9 changed files with 246 additions and 34 deletions

View file

@ -1,7 +1,7 @@
name: "Flakes: Hourly import to Elasticsearch" name: "Flakes: Hourly import to Elasticsearch"
on: on:
workflow_dispatch:
schedule: schedule:
- cron: '0 * * * *' - cron: '0 * * * *'

View file

@ -1,7 +1,7 @@
name: "Nixpkgs: Hourly import to Elasticsearch" name: "Nixpkgs: Hourly import to Elasticsearch"
on: on:
workflow_dispatch:
schedule: schedule:
- cron: '0 * * * *' - cron: '0 * * * *'

View file

@ -1 +1 @@
23 24

View file

@ -57,7 +57,9 @@ enum Command {
channel: String, channel: String,
}, },
Group { Group {
#[structopt(help = "Points to a TOML or JSON file containing info targets. If file does not end in 'toml' json is assumed")] #[structopt(
help = "Points to a TOML or JSON file containing info targets. If file does not end in 'toml' json is assumed"
)]
targets: PathBuf, targets: PathBuf,
name: String, name: String,
@ -208,7 +210,11 @@ async fn run_command(
let info = flake_info::get_flake_info(source.to_flake_ref(), temp_store, extra) let info = flake_info::get_flake_info(source.to_flake_ref(), temp_store, extra)
.map_err(FlakeInfoError::Flake)?; .map_err(FlakeInfoError::Flake)?;
let ident = ("flake".to_owned(), info.name, info.revision.unwrap_or("latest".into())); let ident = (
"flake".to_owned(),
info.name,
info.revision.unwrap_or("latest".into()),
);
Ok((exports, ident)) Ok((exports, ident))
} }

View file

@ -9,6 +9,7 @@ use serde::{Deserialize, Serialize};
use super::{ use super::{
import, import,
prettyprint::print_value,
system::System, system::System,
utility::{AttributeQuery, Flatten, OneOrMany, Reverse}, utility::{AttributeQuery, Flatten, OneOrMany, Reverse},
}; };
@ -301,10 +302,10 @@ impl From<import::NixOption> for Derivation {
<xml xmlns:xlink=\"http://www.w3.org/1999/xlink\"> <xml xmlns:xlink=\"http://www.w3.org/1999/xlink\">
<para>{}</para> <para>{}</para>
</xml> </xml>
", description ",
description
); );
pandoc.set_input(InputKind::Pipe(description_xml)); pandoc.set_input(InputKind::Pipe(description_xml));
pandoc.set_input_format(InputFormat::DocBook, Vec::new()); pandoc.set_input_format(InputFormat::DocBook, Vec::new());
pandoc.set_output(OutputKind::Pipe); pandoc.set_output(OutputKind::Pipe);
@ -329,18 +330,8 @@ impl From<import::NixOption> for Derivation {
option_name_reverse: Reverse(name.clone()), option_name_reverse: Reverse(name.clone()),
option_description: description.clone(), option_description: description.clone(),
option_description_reverse: description.map(Reverse), option_description_reverse: description.map(Reverse),
option_default: default.map(|v| { option_default: default.map(print_value),
v.as_str().map_or_else( option_example: example.map(print_value),
|| serde_json::to_string_pretty(&v).unwrap(),
|s| s.to_owned(),
)
}),
option_example: example.map(|v| {
v.as_str().map_or_else(
|| serde_json::to_string_pretty(&v).unwrap(),
|s| s.to_owned(),
)
}),
option_flake: flake, option_flake: flake,
option_type, option_type,
option_name_query: AttributeQuery::new(&name), option_name_query: AttributeQuery::new(&name),

View file

@ -1,6 +1,7 @@
mod export; mod export;
mod flake; mod flake;
pub mod import; pub mod import;
mod prettyprint;
mod source; mod source;
mod system; mod system;
mod utility; mod utility;

View file

@ -0,0 +1,197 @@
use std::fmt::Display;
use serde_json::Value;
struct Indent(usize);
impl Indent {
fn next(&self) -> Indent {
Indent(self.0 + 1)
}
}
impl Display for Indent {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{:width$}", "", width = self.0 * 2)
}
}
pub fn print_value(value: Value) -> String {
print_value_indent(value, Indent(0))
}
fn print_value_indent(value: Value, indent: Indent) -> String {
match value {
Value::Null => "null".to_owned(),
Value::Bool(b) => format!("{}", b),
Value::Number(n) => format!("{}", n),
Value::String(s) => {
let lines: Vec<&str> = s.lines().collect();
if lines.len() > 1 {
let lines = lines.join(&format!("\n{}", indent.next()));
return format!(
r#"''
{next_indent}{lines}
{indent}''"#,
indent = indent,
next_indent = indent.next(),
lines = lines
);
}
format!("{:?}", s)
}
Value::Array(a) => {
if a.is_empty() {
return "[ ]".to_owned();
}
let items = a
.into_iter()
.map(|v| print_value_indent(v, indent.next()))
.collect::<Vec<_>>()
.join(&format!("\n{}", indent.next()));
return format!(
"[
{next_indent}{items}
{indent}]",
indent = indent,
next_indent = indent.next(),
items = items
);
}
Value::Object(o) => {
if o.is_empty() {
return "{ }".to_owned();
}
let items = o
.into_iter()
.map(|(k, v)| format!("{} = {}", k, print_value_indent(v, indent.next())))
.collect::<Vec<_>>()
.join(&format!(";\n{}", indent.next()));
return format!(
"{{
{next_indent}{items};
{indent}}}",
indent = indent,
next_indent = indent.next(),
items = items
);
}
}
}
#[cfg(test)]
mod tests {
use serde_json::json;
use super::*;
#[test]
fn test_string() {
let json = json!("Hello World");
assert_eq!(print_value(json), "\"Hello World\"");
}
#[test]
fn test_multi_line_string() {
let json = json!(
r#" Hello
World
!!!"#
);
assert_eq!(
print_value(json),
r#"''
Hello
World
!!!
''"#
);
}
#[test]
fn test_num() {
let json = json!(1);
assert_eq!(print_value(json), "1");
}
#[test]
fn test_bool() {
let json = json!(true);
assert_eq!(print_value(json), "true");
}
#[test]
fn test_empty_list() {
let json = json!([]);
assert_eq!(print_value(json), "[ ]");
}
#[test]
fn test_filled_list() {
let json = json!([1, "hello", true, null]);
assert_eq!(
print_value(json),
r#"[
1
"hello"
true
null
]"#
);
}
#[test]
fn test_empty_set() {
let json = json!({});
assert_eq!(print_value(json), "{ }");
}
#[test]
fn test_filled_set() {
let json = json!({"hello": "world"});
assert_eq!(
print_value(json),
"{
hello = \"world\";
}"
);
}
#[test]
fn test_nested() {
let json = json!(
[
"HDMI-0",
{
"output": "DVI-0",
"primary": true
},
{
"monitorConfig": "Option \"Rotate\" \"left\"",
"output": "DVI-1"
},
[ "hello", "word" ]
]);
assert_eq!(
print_value(json),
r#"[
"HDMI-0"
{
output = "DVI-0";
primary = true;
}
{
monitorConfig = "Option \"Rotate\" \"left\"";
output = "DVI-1";
}
[
"hello"
"word"
]
]"#
);
}
}

View file

@ -1,10 +1,10 @@
use anyhow::{Context, Result}; use anyhow::{Context, Result};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
ffi::OsStr,
fs::{self, File}, fs::{self, File},
io::Read, io::Read,
path::Path, path::Path,
ffi::OsStr,
}; };
pub type Hash = String; pub type Hash = String;
@ -35,10 +35,9 @@ pub enum Source {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
struct TomlDocument { struct TomlDocument {
sources: Vec<Source> sources: Vec<Source>,
} }
impl Source { impl Source {
pub fn to_flake_ref(&self) -> FlakeRef { pub fn to_flake_ref(&self) -> FlakeRef {
match self { match self {
@ -76,7 +75,6 @@ impl Source {
} }
pub fn read_sources_file(path: &Path) -> Result<Vec<Source>> { pub fn read_sources_file(path: &Path) -> Result<Vec<Source>> {
let mut file = File::open(path).with_context(|| "Failed to open input file")?; let mut file = File::open(path).with_context(|| "Failed to open input file")?;
let mut buf = String::new(); let mut buf = String::new();
@ -85,8 +83,7 @@ impl Source {
if path.extension() == Some(OsStr::new("toml")) { if path.extension() == Some(OsStr::new("toml")) {
let document: TomlDocument = toml::from_str(&buf)?; let document: TomlDocument = toml::from_str(&buf)?;
Ok(document.sources) Ok(document.sources)
} } else {
else {
Ok(serde_json::from_str(&buf)?) Ok(serde_json::from_str(&buf)?)
} }
} }

View file

@ -2,7 +2,14 @@ use std::{borrow::Borrow, collections::HashMap};
use clap::arg_enum; use clap::arg_enum;
pub use elasticsearch::http::transport::Transport; pub use elasticsearch::http::transport::Transport;
use elasticsearch::{BulkOperation, Elasticsearch as Client, http::response::{self, Response}, indices::{IndicesCreateParts, IndicesDeleteAliasParts, IndicesDeleteParts, IndicesExistsParts, IndicesGetAliasParts, IndicesPutAliasParts, IndicesUpdateAliasesParts}}; use elasticsearch::{
http::response::{self, Response},
indices::{
IndicesCreateParts, IndicesDeleteAliasParts, IndicesDeleteParts, IndicesExistsParts,
IndicesGetAliasParts, IndicesPutAliasParts, IndicesUpdateAliasesParts,
},
BulkOperation, Elasticsearch as Client,
};
use lazy_static::lazy_static; use lazy_static::lazy_static;
use log::{info, warn}; use log::{info, warn};
use serde_json::{json, Value}; use serde_json::{json, Value};
@ -376,14 +383,27 @@ impl Elasticsearch {
) -> Result<(), ElasticsearchError> { ) -> Result<(), ElasticsearchError> {
// delete old alias // delete old alias
info!("Try deletig old alias"); info!("Try deletig old alias");
let response = self.client.indices().get_alias(IndicesGetAliasParts::Name(&[alias])).send().await let response = self
.map_err(ElasticsearchError::InitIndexError)?;
let indices = response.json::<HashMap<String,Value>>().await.map_err(ElasticsearchError::InitIndexError)?.keys().cloned().collect::<Vec<String>>();
self
.client .client
.indices() .indices()
.delete_alias(IndicesDeleteAliasParts::IndexName(&indices.iter().map(AsRef::as_ref).collect::<Vec<_>>(), &[alias])) .get_alias(IndicesGetAliasParts::Name(&[alias]))
.send()
.await
.map_err(ElasticsearchError::InitIndexError)?;
let indices = response
.json::<HashMap<String, Value>>()
.await
.map_err(ElasticsearchError::InitIndexError)?
.keys()
.cloned()
.collect::<Vec<String>>();
self.client
.indices()
.delete_alias(IndicesDeleteAliasParts::IndexName(
&indices.iter().map(AsRef::as_ref).collect::<Vec<_>>(),
&[alias],
))
.send() .send()
.await .await
.map_err(ElasticsearchError::InitIndexError)?; .map_err(ElasticsearchError::InitIndexError)?;