Translate example/default values from JSON to NIx (#357)
This commit is contained in:
parent
22f10dc4e6
commit
f3571efdef
2
.github/workflows/cron-flakes.yml
vendored
2
.github/workflows/cron-flakes.yml
vendored
|
@ -1,7 +1,7 @@
|
|||
name: "Flakes: Hourly import to Elasticsearch"
|
||||
|
||||
on:
|
||||
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 * * * *'
|
||||
|
||||
|
|
2
.github/workflows/cron-nixpkgs.yml
vendored
2
.github/workflows/cron-nixpkgs.yml
vendored
|
@ -1,7 +1,7 @@
|
|||
name: "Nixpkgs: Hourly import to Elasticsearch"
|
||||
|
||||
on:
|
||||
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '0 * * * *'
|
||||
|
||||
|
|
|
@ -57,7 +57,9 @@ enum Command {
|
|||
channel: String,
|
||||
},
|
||||
Group {
|
||||
#[structopt(help = "Points to a TOML or JSON file containing info targets. If file does not end in 'toml' json is assumed")]
|
||||
#[structopt(
|
||||
help = "Points to a TOML or JSON file containing info targets. If file does not end in 'toml' json is assumed"
|
||||
)]
|
||||
targets: PathBuf,
|
||||
|
||||
name: String,
|
||||
|
@ -208,7 +210,11 @@ async fn run_command(
|
|||
let info = flake_info::get_flake_info(source.to_flake_ref(), temp_store, extra)
|
||||
.map_err(FlakeInfoError::Flake)?;
|
||||
|
||||
let ident = ("flake".to_owned(), info.name, info.revision.unwrap_or("latest".into()));
|
||||
let ident = (
|
||||
"flake".to_owned(),
|
||||
info.name,
|
||||
info.revision.unwrap_or("latest".into()),
|
||||
);
|
||||
|
||||
Ok((exports, ident))
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ use serde::{Deserialize, Serialize};
|
|||
|
||||
use super::{
|
||||
import,
|
||||
prettyprint::print_value,
|
||||
system::System,
|
||||
utility::{AttributeQuery, Flatten, OneOrMany, Reverse},
|
||||
};
|
||||
|
@ -301,10 +302,10 @@ impl From<import::NixOption> for Derivation {
|
|||
<xml xmlns:xlink=\"http://www.w3.org/1999/xlink\">
|
||||
<para>{}</para>
|
||||
</xml>
|
||||
", description
|
||||
",
|
||||
description
|
||||
);
|
||||
|
||||
|
||||
pandoc.set_input(InputKind::Pipe(description_xml));
|
||||
pandoc.set_input_format(InputFormat::DocBook, Vec::new());
|
||||
pandoc.set_output(OutputKind::Pipe);
|
||||
|
@ -329,18 +330,8 @@ impl From<import::NixOption> for Derivation {
|
|||
option_name_reverse: Reverse(name.clone()),
|
||||
option_description: description.clone(),
|
||||
option_description_reverse: description.map(Reverse),
|
||||
option_default: default.map(|v| {
|
||||
v.as_str().map_or_else(
|
||||
|| serde_json::to_string_pretty(&v).unwrap(),
|
||||
|s| s.to_owned(),
|
||||
)
|
||||
}),
|
||||
option_example: example.map(|v| {
|
||||
v.as_str().map_or_else(
|
||||
|| serde_json::to_string_pretty(&v).unwrap(),
|
||||
|s| s.to_owned(),
|
||||
)
|
||||
}),
|
||||
option_default: default.map(print_value),
|
||||
option_example: example.map(print_value),
|
||||
option_flake: flake,
|
||||
option_type,
|
||||
option_name_query: AttributeQuery::new(&name),
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
mod export;
|
||||
mod flake;
|
||||
pub mod import;
|
||||
mod prettyprint;
|
||||
mod source;
|
||||
mod system;
|
||||
mod utility;
|
||||
|
|
197
flake-info/src/data/prettyprint.rs
Normal file
197
flake-info/src/data/prettyprint.rs
Normal file
|
@ -0,0 +1,197 @@
|
|||
use std::fmt::Display;
|
||||
|
||||
use serde_json::Value;
|
||||
|
||||
struct Indent(usize);
|
||||
impl Indent {
|
||||
fn next(&self) -> Indent {
|
||||
Indent(self.0 + 1)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Indent {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{:width$}", "", width = self.0 * 2)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn print_value(value: Value) -> String {
|
||||
print_value_indent(value, Indent(0))
|
||||
}
|
||||
|
||||
fn print_value_indent(value: Value, indent: Indent) -> String {
|
||||
match value {
|
||||
Value::Null => "null".to_owned(),
|
||||
Value::Bool(b) => format!("{}", b),
|
||||
Value::Number(n) => format!("{}", n),
|
||||
Value::String(s) => {
|
||||
let lines: Vec<&str> = s.lines().collect();
|
||||
if lines.len() > 1 {
|
||||
let lines = lines.join(&format!("\n{}", indent.next()));
|
||||
return format!(
|
||||
r#"''
|
||||
{next_indent}{lines}
|
||||
{indent}''"#,
|
||||
indent = indent,
|
||||
next_indent = indent.next(),
|
||||
lines = lines
|
||||
);
|
||||
}
|
||||
|
||||
format!("{:?}", s)
|
||||
}
|
||||
Value::Array(a) => {
|
||||
if a.is_empty() {
|
||||
return "[ ]".to_owned();
|
||||
}
|
||||
let items = a
|
||||
.into_iter()
|
||||
.map(|v| print_value_indent(v, indent.next()))
|
||||
.collect::<Vec<_>>()
|
||||
.join(&format!("\n{}", indent.next()));
|
||||
|
||||
return format!(
|
||||
"[
|
||||
{next_indent}{items}
|
||||
{indent}]",
|
||||
indent = indent,
|
||||
next_indent = indent.next(),
|
||||
items = items
|
||||
);
|
||||
}
|
||||
Value::Object(o) => {
|
||||
if o.is_empty() {
|
||||
return "{ }".to_owned();
|
||||
}
|
||||
let items = o
|
||||
.into_iter()
|
||||
.map(|(k, v)| format!("{} = {}", k, print_value_indent(v, indent.next())))
|
||||
.collect::<Vec<_>>()
|
||||
.join(&format!(";\n{}", indent.next()));
|
||||
|
||||
return format!(
|
||||
"{{
|
||||
{next_indent}{items};
|
||||
{indent}}}",
|
||||
indent = indent,
|
||||
next_indent = indent.next(),
|
||||
items = items
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use serde_json::json;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_string() {
|
||||
let json = json!("Hello World");
|
||||
assert_eq!(print_value(json), "\"Hello World\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multi_line_string() {
|
||||
let json = json!(
|
||||
r#" Hello
|
||||
World
|
||||
!!!"#
|
||||
);
|
||||
assert_eq!(
|
||||
print_value(json),
|
||||
r#"''
|
||||
Hello
|
||||
World
|
||||
!!!
|
||||
''"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_num() {
|
||||
let json = json!(1);
|
||||
assert_eq!(print_value(json), "1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_bool() {
|
||||
let json = json!(true);
|
||||
assert_eq!(print_value(json), "true");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_list() {
|
||||
let json = json!([]);
|
||||
assert_eq!(print_value(json), "[ ]");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filled_list() {
|
||||
let json = json!([1, "hello", true, null]);
|
||||
assert_eq!(
|
||||
print_value(json),
|
||||
r#"[
|
||||
1
|
||||
"hello"
|
||||
true
|
||||
null
|
||||
]"#
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_set() {
|
||||
let json = json!({});
|
||||
assert_eq!(print_value(json), "{ }");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_filled_set() {
|
||||
let json = json!({"hello": "world"});
|
||||
assert_eq!(
|
||||
print_value(json),
|
||||
"{
|
||||
hello = \"world\";
|
||||
}"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nested() {
|
||||
let json = json!(
|
||||
[
|
||||
"HDMI-0",
|
||||
{
|
||||
"output": "DVI-0",
|
||||
"primary": true
|
||||
},
|
||||
{
|
||||
"monitorConfig": "Option \"Rotate\" \"left\"",
|
||||
"output": "DVI-1"
|
||||
},
|
||||
[ "hello", "word" ]
|
||||
]);
|
||||
|
||||
assert_eq!(
|
||||
print_value(json),
|
||||
r#"[
|
||||
"HDMI-0"
|
||||
{
|
||||
output = "DVI-0";
|
||||
primary = true;
|
||||
}
|
||||
{
|
||||
monitorConfig = "Option \"Rotate\" \"left\"";
|
||||
output = "DVI-1";
|
||||
}
|
||||
[
|
||||
"hello"
|
||||
"word"
|
||||
]
|
||||
]"#
|
||||
);
|
||||
}
|
||||
}
|
|
@ -1,10 +1,10 @@
|
|||
use anyhow::{Context, Result};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
ffi::OsStr,
|
||||
fs::{self, File},
|
||||
io::Read,
|
||||
path::Path,
|
||||
ffi::OsStr,
|
||||
};
|
||||
|
||||
pub type Hash = String;
|
||||
|
@ -35,10 +35,9 @@ pub enum Source {
|
|||
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
struct TomlDocument {
|
||||
sources: Vec<Source>
|
||||
sources: Vec<Source>,
|
||||
}
|
||||
|
||||
|
||||
impl Source {
|
||||
pub fn to_flake_ref(&self) -> FlakeRef {
|
||||
match self {
|
||||
|
@ -76,7 +75,6 @@ impl Source {
|
|||
}
|
||||
|
||||
pub fn read_sources_file(path: &Path) -> Result<Vec<Source>> {
|
||||
|
||||
let mut file = File::open(path).with_context(|| "Failed to open input file")?;
|
||||
|
||||
let mut buf = String::new();
|
||||
|
@ -85,8 +83,7 @@ impl Source {
|
|||
if path.extension() == Some(OsStr::new("toml")) {
|
||||
let document: TomlDocument = toml::from_str(&buf)?;
|
||||
Ok(document.sources)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
Ok(serde_json::from_str(&buf)?)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,14 @@ use std::{borrow::Borrow, collections::HashMap};
|
|||
|
||||
use clap::arg_enum;
|
||||
pub use elasticsearch::http::transport::Transport;
|
||||
use elasticsearch::{BulkOperation, Elasticsearch as Client, http::response::{self, Response}, indices::{IndicesCreateParts, IndicesDeleteAliasParts, IndicesDeleteParts, IndicesExistsParts, IndicesGetAliasParts, IndicesPutAliasParts, IndicesUpdateAliasesParts}};
|
||||
use elasticsearch::{
|
||||
http::response::{self, Response},
|
||||
indices::{
|
||||
IndicesCreateParts, IndicesDeleteAliasParts, IndicesDeleteParts, IndicesExistsParts,
|
||||
IndicesGetAliasParts, IndicesPutAliasParts, IndicesUpdateAliasesParts,
|
||||
},
|
||||
BulkOperation, Elasticsearch as Client,
|
||||
};
|
||||
use lazy_static::lazy_static;
|
||||
use log::{info, warn};
|
||||
use serde_json::{json, Value};
|
||||
|
@ -376,14 +383,27 @@ impl Elasticsearch {
|
|||
) -> Result<(), ElasticsearchError> {
|
||||
// delete old alias
|
||||
info!("Try deletig old alias");
|
||||
let response = self.client.indices().get_alias(IndicesGetAliasParts::Name(&[alias])).send().await
|
||||
.map_err(ElasticsearchError::InitIndexError)?;
|
||||
let indices = response.json::<HashMap<String,Value>>().await.map_err(ElasticsearchError::InitIndexError)?.keys().cloned().collect::<Vec<String>>();
|
||||
|
||||
self
|
||||
let response = self
|
||||
.client
|
||||
.indices()
|
||||
.delete_alias(IndicesDeleteAliasParts::IndexName(&indices.iter().map(AsRef::as_ref).collect::<Vec<_>>(), &[alias]))
|
||||
.get_alias(IndicesGetAliasParts::Name(&[alias]))
|
||||
.send()
|
||||
.await
|
||||
.map_err(ElasticsearchError::InitIndexError)?;
|
||||
let indices = response
|
||||
.json::<HashMap<String, Value>>()
|
||||
.await
|
||||
.map_err(ElasticsearchError::InitIndexError)?
|
||||
.keys()
|
||||
.cloned()
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
self.client
|
||||
.indices()
|
||||
.delete_alias(IndicesDeleteAliasParts::IndexName(
|
||||
&indices.iter().map(AsRef::as_ref).collect::<Vec<_>>(),
|
||||
&[alias],
|
||||
))
|
||||
.send()
|
||||
.await
|
||||
.map_err(ElasticsearchError::InitIndexError)?;
|
||||
|
|
Loading…
Reference in a new issue