flake-info: abort early if index exists (#609)

Make the exports lazy using a closure, so that we can abort early if
the index exists before doing the expensive work.

Raise an error if neither `--json` nor `--push` is specified.

Flakes are staying strict for now because determining the index name is
too tightly coupled with the actual exports.
This commit is contained in:
Naïm Favier 2023-02-07 16:30:51 +01:00 committed by GitHub
parent eb9fa64c33
commit 32097fc62e
Failed to generate hash of commit

View file

@ -153,31 +153,30 @@ struct ElasticOpts {
#[structopt( #[structopt(
long, long,
help = "Whether to disable `latest` alias creation", help = "Whether to disable `latest` alias creation",
env = "FI_ES_VERSION" env = "FI_ES_NO_ALIAS"
)] )]
no_alias: bool, no_alias: bool,
} }
type LazyExports = Box<dyn FnOnce() -> Result<Vec<Export>, FlakeInfoError>>;
#[tokio::main] #[tokio::main]
async fn main() -> Result<()> { async fn main() -> Result<()> {
env_logger::init(); env_logger::init();
let args = Args::from_args(); let args = Args::from_args();
let command_result = run_command(args.command, args.kind, &args.extra).await; anyhow::ensure!(
args.elastic.enable || args.elastic.json,
"at least one of --push or --json must be specified"
);
if let Err(error) = command_result { let (exports, ident) = run_command(args.command, args.kind, &args.extra).await?;
return Err(error.into());
}
let (successes, ident) = command_result.unwrap();
if args.elastic.enable { if args.elastic.enable {
push_to_elastic(&args.elastic, &successes, ident).await?; push_to_elastic(&args.elastic, exports, ident).await?;
} } else if args.elastic.json {
println!("{}", serde_json::to_string(&exports()?)?);
if args.elastic.json {
println!("{}", serde_json::to_string(&successes)?);
} }
Ok(()) Ok(())
} }
@ -201,7 +200,7 @@ async fn run_command(
command: Command, command: Command,
kind: Kind, kind: Kind,
extra: &[String], extra: &[String],
) -> Result<(Vec<Export>, (String, String, String)), FlakeInfoError> { ) -> Result<(LazyExports, (String, String, String)), FlakeInfoError> {
flake_info::commands::check_nix_version(env!("MIN_NIX_VERSION"))?; flake_info::commands::check_nix_version(env!("MIN_NIX_VERSION"))?;
match command { match command {
@ -216,7 +215,7 @@ async fn run_command(
info.revision.unwrap_or("latest".into()), info.revision.unwrap_or("latest".into()),
); );
Ok((exports, ident)) Ok((Box::new(|| Ok(exports)), ident))
} }
Command::Nixpkgs { channel } => { Command::Nixpkgs { channel } => {
let nixpkgs = Source::nixpkgs(channel) let nixpkgs = Source::nixpkgs(channel)
@ -227,10 +226,14 @@ async fn run_command(
nixpkgs.channel.to_owned(), nixpkgs.channel.to_owned(),
nixpkgs.git_ref.to_owned(), nixpkgs.git_ref.to_owned(),
); );
let exports = flake_info::process_nixpkgs(&Source::Nixpkgs(nixpkgs), &kind)
.map_err(FlakeInfoError::Nixpkgs)?;
Ok((exports, ident)) Ok((
Box::new(move || {
flake_info::process_nixpkgs(&Source::Nixpkgs(nixpkgs), &kind)
.map_err(FlakeInfoError::Nixpkgs)
}),
ident,
))
} }
Command::NixpkgsArchive { source, channel } => { Command::NixpkgsArchive { source, channel } => {
let ident = ( let ident = (
@ -238,10 +241,14 @@ async fn run_command(
channel.to_owned(), channel.to_owned(),
"latest".to_string(), "latest".to_string(),
); );
let exports = flake_info::process_nixpkgs(&Source::Git { url: source }, &kind)
.map_err(FlakeInfoError::Nixpkgs)?;
Ok((exports, ident)) Ok((
Box::new(move || {
flake_info::process_nixpkgs(&Source::Git { url: source }, &kind)
.map_err(FlakeInfoError::Nixpkgs)
}),
ident,
))
} }
Command::Group { Command::Group {
targets, targets,
@ -314,14 +321,14 @@ async fn run_command(
let ident = ("group".to_owned(), name, hash); let ident = ("group".to_owned(), name, hash);
Ok((exports, ident)) Ok((Box::new(|| Ok(exports)), ident))
} }
} }
} }
async fn push_to_elastic( async fn push_to_elastic(
elastic: &ElasticOpts, elastic: &ElasticOpts,
successes: &[Export], exports: LazyExports,
ident: (String, String, String), ident: (String, String, String),
) -> Result<()> { ) -> Result<()> {
let (index, alias) = elastic let (index, alias) = elastic
@ -354,7 +361,6 @@ async fn push_to_elastic(
}) })
.unwrap(); .unwrap();
info!("Pushing to elastic");
let es = elastic::Elasticsearch::new(elastic.elastic_url.as_str())?; let es = elastic::Elasticsearch::new(elastic.elastic_url.as_str())?;
let config = elastic::Config { let config = elastic::Config {
index: &index, index: &index,
@ -371,7 +377,10 @@ async fn push_to_elastic(
ensure?; ensure?;
} }
es.push_exports(&config, successes) let successes = exports()?;
info!("Pushing to elastic");
es.push_exports(&config, &successes)
.await .await
.with_context(|| "Failed to push results to elasticsearch".to_string())?; .with_context(|| "Failed to push results to elasticsearch".to_string())?;