2021-08-17 08:55:08 +00:00
use anyhow ::{ Context , Result } ;
2022-03-18 20:57:28 +00:00
use flake_info ::commands ::NixCheckError ;
2022-08-29 15:17:29 +00:00
use flake_info ::data ::import ::Kind ;
use flake_info ::data ::{ self , Export , Source } ;
use flake_info ::elastic ::{ self , ElasticsearchError , ExistsStrategy } ;
use log ::{ error , info , warn } ;
2021-08-17 08:55:08 +00:00
use sha2 ::Digest ;
2022-08-29 15:17:29 +00:00
use std ::io ;
use std ::path ::PathBuf ;
2021-08-17 08:55:08 +00:00
use structopt ::{ clap ::ArgGroup , StructOpt } ;
use thiserror ::Error ;
2022-03-01 12:50:18 +00:00
use tokio ::fs ::File ;
use tokio ::io ::AsyncWriteExt ;
2021-08-17 08:55:08 +00:00
#[ derive(StructOpt, Debug) ]
#[ structopt(
name = " flake-info " ,
about = " Extracts various information from a given flake " ,
group = ArgGroup ::with_name ( " sources " ) . required ( false )
) ]
struct Args {
#[ structopt(subcommand) ]
command : Command ,
#[ structopt(
short ,
long ,
2021-11-23 12:58:55 +00:00
help = " Kind of data to extract " ,
possible_values = & data ::import ::Kind ::variants ( ) ,
case_insensitive = true ,
2021-08-17 08:55:08 +00:00
default_value
) ]
kind : data ::import ::Kind ,
#[ structopt(flatten) ]
elastic : ElasticOpts ,
#[ structopt(help = " Extra arguments that are passed to nix as it " ) ]
extra : Vec < String > ,
}
#[ derive(StructOpt, Debug) ]
enum Command {
2022-03-17 07:14:36 +00:00
#[ structopt(about = " Import a flake " ) ]
2021-08-17 08:55:08 +00:00
Flake {
#[ structopt(help = " Flake identifier passed to nix to gather information about " ) ]
flake : String ,
#[ structopt(
long ,
help = " Whether to use a temporary store or not. Located at /tmp/flake-info-store "
) ]
temp_store : bool ,
} ,
2022-03-17 07:14:36 +00:00
#[ structopt(about = " Import official nixpkgs channel " ) ]
2021-08-17 08:55:08 +00:00
Nixpkgs {
#[ structopt(help = " Nixpkgs channel to import " ) ]
channel : String ,
} ,
2022-03-17 07:14:36 +00:00
#[ structopt(about = " Import nixpkgs channel from archive or local git path " ) ]
NixpkgsArchive {
#[ structopt(help = " Nixpkgs archive to import " ) ]
source : String ,
#[ structopt(
help = " Which channel to assign nixpkgs to " ,
default_value = " unstable "
) ]
channel : String ,
} ,
#[ structopt(about = " Load and import a group of flakes from a file " ) ]
2021-08-17 08:55:08 +00:00
Group {
2021-11-11 19:29:44 +00:00
#[ structopt(
help = " Points to a TOML or JSON file containing info targets. If file does not end in 'toml' json is assumed "
) ]
2021-08-17 08:55:08 +00:00
targets : PathBuf ,
name : String ,
#[ structopt(
long ,
help = " Whether to use a temporary store or not. Located at /tmp/flake-info-store "
) ]
temp_store : bool ,
2022-03-01 12:50:18 +00:00
#[ structopt(long, help = " Whether write an error report about failed packages " ) ]
report : bool ,
2021-08-17 08:55:08 +00:00
} ,
}
#[ derive(StructOpt, Debug) ]
struct ElasticOpts {
#[ structopt(long = " json " , help = " Print ElasticSeach Compatible JSON output " ) ]
json : bool ,
#[ structopt(
long = " push " ,
help = " Push to Elasticsearch (Configure using FI_ES_* environment variables) " ,
requires ( " elastic-schema-version " )
) ]
enable : bool ,
2022-08-29 15:17:29 +00:00
// #[structopt(
// long,
// short = "u",
// env = "FI_ES_USER",
// help = "Elasticsearch username (unimplemented)"
// )]
// elastic_user: Option<String>,
// #[structopt(
// long,
// short = "p",
// env = "FI_ES_PASSWORD",
// help = "Elasticsearch password (unimplemented)"
// )]
// elastic_pw: Option<String>,
2021-08-17 08:55:08 +00:00
#[ structopt(
long ,
env = " FI_ES_URL " ,
default_value = " http://localhost:9200 " ,
help = " Elasticsearch instance url "
) ]
elastic_url : String ,
#[ structopt(
long ,
help = " Name of the index to store results to " ,
env = " FI_ES_INDEX " ,
required_if ( " enable " , " true " )
) ]
elastic_index_name : Option < String > ,
#[ structopt(
long ,
help = " How to react to existing indices " ,
possible_values = & ExistsStrategy ::variants ( ) ,
case_insensitive = true ,
default_value = " abort " ,
env = " FI_ES_EXISTS_STRATEGY "
) ]
elastic_exists : ExistsStrategy ,
#[ structopt(
long ,
help = " Which schema version to associate with the operation " ,
env = " FI_ES_VERSION "
) ]
elastic_schema_version : Option < usize > ,
#[ structopt(
long ,
help = " Whether to disable `latest` alias creation " ,
2023-02-07 15:30:51 +00:00
env = " FI_ES_NO_ALIAS "
2021-08-17 08:55:08 +00:00
) ]
no_alias : bool ,
}
2023-02-07 15:30:51 +00:00
type LazyExports = Box < dyn FnOnce ( ) -> Result < Vec < Export > , FlakeInfoError > > ;
2021-08-17 08:55:08 +00:00
#[ tokio::main ]
async fn main ( ) -> Result < ( ) > {
env_logger ::init ( ) ;
let args = Args ::from_args ( ) ;
2023-02-07 15:30:51 +00:00
anyhow ::ensure! (
args . elastic . enable | | args . elastic . json ,
" at least one of --push or --json must be specified "
) ;
2021-08-17 08:55:08 +00:00
2023-02-07 15:30:51 +00:00
let ( exports , ident ) = run_command ( args . command , args . kind , & args . extra ) . await ? ;
2021-08-17 08:55:08 +00:00
if args . elastic . enable {
2023-02-07 15:30:51 +00:00
push_to_elastic ( & args . elastic , exports , ident ) . await ? ;
} else if args . elastic . json {
println! ( " {} " , serde_json ::to_string ( & exports ( ) ? ) ? ) ;
2021-08-17 08:55:08 +00:00
}
Ok ( ( ) )
}
#[ derive(Debug, Error) ]
enum FlakeInfoError {
2022-03-18 20:57:28 +00:00
#[ error( " Nix check failed: {0} " ) ]
NixCheck ( #[ from ] NixCheckError ) ,
2022-01-25 15:03:21 +00:00
#[ error( " Getting flake info caused an error: {0:?} " ) ]
2021-08-17 08:55:08 +00:00
Flake ( anyhow ::Error ) ,
2022-01-25 15:03:21 +00:00
#[ error( " Getting nixpkgs info caused an error: {0:?} " ) ]
2021-08-17 08:55:08 +00:00
Nixpkgs ( anyhow ::Error ) ,
2022-03-01 12:50:18 +00:00
#[ error( " Some members of the group '{0}' could not be processed: \n {} " , .1.iter().enumerate().map(|(n, e)| format!( " {}: {:?} " , n+1, e)).collect::<Vec<String>>().join( " \n \n " )) ]
Group ( String , Vec < anyhow ::Error > ) ,
2021-08-17 08:55:08 +00:00
#[ error( " Couldn't perform IO: {0} " ) ]
2022-03-01 12:50:18 +00:00
IO ( #[ from ] io ::Error ) ,
2021-08-17 08:55:08 +00:00
}
async fn run_command (
command : Command ,
kind : Kind ,
extra : & [ String ] ,
2023-02-07 15:30:51 +00:00
) -> Result < ( LazyExports , ( String , String , String ) ) , FlakeInfoError > {
2022-03-18 20:57:28 +00:00
flake_info ::commands ::check_nix_version ( env! ( " MIN_NIX_VERSION " ) ) ? ;
2021-08-17 08:55:08 +00:00
match command {
2022-08-29 15:17:29 +00:00
Command ::Flake { flake , temp_store } = > {
2021-08-17 08:55:08 +00:00
let source = Source ::Git { url : flake } ;
2022-07-21 10:20:52 +00:00
let ( info , exports ) = flake_info ::process_flake ( & source , & kind , temp_store , extra )
2021-08-17 08:55:08 +00:00
. map_err ( FlakeInfoError ::Flake ) ? ;
2021-11-11 19:29:44 +00:00
let ident = (
" flake " . to_owned ( ) ,
info . name ,
info . revision . unwrap_or ( " latest " . into ( ) ) ,
) ;
2021-08-17 08:55:08 +00:00
2023-02-07 15:30:51 +00:00
Ok ( ( Box ::new ( | | Ok ( exports ) ) , ident ) )
2021-08-17 08:55:08 +00:00
}
Command ::Nixpkgs { channel } = > {
let nixpkgs = Source ::nixpkgs ( channel )
. await
. map_err ( FlakeInfoError ::Nixpkgs ) ? ;
let ident = (
2021-08-25 22:40:42 +00:00
" nixos " . to_owned ( ) ,
2022-03-17 07:14:36 +00:00
nixpkgs . channel . to_owned ( ) ,
nixpkgs . git_ref . to_owned ( ) ,
2021-08-17 08:55:08 +00:00
) ;
2023-02-07 15:30:51 +00:00
Ok ( (
Box ::new ( move | | {
flake_info ::process_nixpkgs ( & Source ::Nixpkgs ( nixpkgs ) , & kind )
. map_err ( FlakeInfoError ::Nixpkgs )
} ) ,
ident ,
) )
2021-08-17 08:55:08 +00:00
}
2022-03-17 07:14:36 +00:00
Command ::NixpkgsArchive { source , channel } = > {
let ident = (
" nixos " . to_string ( ) ,
channel . to_owned ( ) ,
" latest " . to_string ( ) ,
) ;
2023-02-07 15:30:51 +00:00
Ok ( (
Box ::new ( move | | {
flake_info ::process_nixpkgs ( & Source ::Git { url : source } , & kind )
. map_err ( FlakeInfoError ::Nixpkgs )
} ) ,
ident ,
) )
2022-03-17 07:14:36 +00:00
}
2021-08-17 08:55:08 +00:00
Command ::Group {
targets ,
temp_store ,
name ,
2022-03-01 12:50:18 +00:00
report ,
2021-08-17 08:55:08 +00:00
} = > {
2022-03-01 12:50:18 +00:00
// if reporting is enabled delete old report
if report & & tokio ::fs ::metadata ( " report.txt " ) . await . is_ok ( ) {
tokio ::fs ::remove_file ( " report.txt " ) . await ? ;
}
let sources = Source ::read_sources_file ( & targets ) ? ;
2021-08-17 08:55:08 +00:00
let ( exports_and_hashes , errors ) = sources
. iter ( )
. map ( | source | match source {
Source ::Nixpkgs ( nixpkgs ) = > flake_info ::process_nixpkgs ( source , & kind )
2022-07-21 10:20:52 +00:00
. with_context ( | | {
format! ( " While processing nixpkgs archive {} " , source . to_flake_ref ( ) )
} )
2021-08-17 08:55:08 +00:00
. map ( | result | ( result , nixpkgs . git_ref . to_owned ( ) ) ) ,
2022-07-21 10:20:52 +00:00
_ = > flake_info ::process_flake ( source , & kind , temp_store , & extra )
. with_context ( | | {
format! ( " While processing flake {} " , source . to_flake_ref ( ) )
} )
. map ( | ( info , result ) | ( result , info . revision . unwrap_or ( " latest " . into ( ) ) ) ) ,
2021-08-17 08:55:08 +00:00
} )
. partition ::< Vec < _ > , _ > ( Result ::is_ok ) ;
let ( exports , hashes ) = exports_and_hashes
. into_iter ( )
2022-07-21 10:20:52 +00:00
. map ( | result | result . unwrap ( ) ) // each result is_ok
2021-08-17 08:55:08 +00:00
. fold (
( Vec ::new ( ) , Vec ::new ( ) ) ,
| ( mut exports , mut hashes ) , ( export , hash ) | {
exports . extend ( export ) ;
hashes . push ( hash ) ;
( exports , hashes )
} ,
) ;
let errors = errors
. into_iter ( )
2022-07-21 10:20:52 +00:00
. map ( Result ::unwrap_err ) // each result is_err
2021-08-17 08:55:08 +00:00
. collect ::< Vec < _ > > ( ) ;
if ! errors . is_empty ( ) {
2022-03-01 12:50:18 +00:00
let error = FlakeInfoError ::Group ( name . clone ( ) , errors ) ;
2021-09-30 18:24:35 +00:00
if exports . is_empty ( ) {
2022-03-01 12:50:18 +00:00
return Err ( error ) ;
2021-09-30 18:24:35 +00:00
}
2022-03-01 12:50:18 +00:00
2021-09-30 18:24:35 +00:00
warn! ( " =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= " ) ;
2022-03-01 12:50:18 +00:00
warn! ( " {} " , error ) ;
2021-09-30 18:24:35 +00:00
warn! ( " =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= " ) ;
2022-03-01 12:50:18 +00:00
if report {
let mut file = File ::create ( " report.txt " ) . await ? ;
file . write_all ( format! ( " {} " , error ) . as_bytes ( ) ) . await ? ;
}
2021-08-17 08:55:08 +00:00
}
let hash = {
let mut sha = sha2 ::Sha256 ::new ( ) ;
for hash in hashes {
sha . update ( hash ) ;
}
format! ( " {:08x} " , sha . finalize ( ) )
} ;
let ident = ( " group " . to_owned ( ) , name , hash ) ;
2023-02-07 15:30:51 +00:00
Ok ( ( Box ::new ( | | Ok ( exports ) ) , ident ) )
2021-08-17 08:55:08 +00:00
}
}
}
async fn push_to_elastic (
elastic : & ElasticOpts ,
2023-02-07 15:30:51 +00:00
exports : LazyExports ,
2021-08-17 08:55:08 +00:00
ident : ( String , String , String ) ,
) -> Result < ( ) > {
let ( index , alias ) = elastic
. elastic_index_name
. to_owned ( )
. map ( | ident | {
(
format! ( " {} - {} " , elastic . elastic_schema_version . unwrap ( ) , ident ) ,
None ,
)
} )
. or_else ( | | {
let ( kind , name , hash ) = ident ;
let ident = format! (
" {}-{}-{}-{} " ,
kind ,
elastic . elastic_schema_version . unwrap ( ) ,
& name ,
hash
) ;
let alias = format! (
" latest-{}-{}-{} " ,
elastic . elastic_schema_version . unwrap ( ) ,
kind ,
& name
) ;
warn! ( " Using automatic index identifier: {} " , ident ) ;
Some ( ( ident , Some ( alias ) ) )
} )
. unwrap ( ) ;
let es = elastic ::Elasticsearch ::new ( elastic . elastic_url . as_str ( ) ) ? ;
let config = elastic ::Config {
index : & index ,
exists_strategy : elastic . elastic_exists ,
} ;
// catch error variant if abort strategy was triggered
let ensure = es . ensure_index ( & config ) . await ;
if let Err ( ElasticsearchError ::IndexExistsError ( _ ) ) = ensure {
// abort on abort
return Ok ( ( ) ) ;
} else {
// throw error if present
ensure ? ;
}
2023-02-07 15:30:51 +00:00
let successes = exports ( ) ? ;
info! ( " Pushing to elastic " ) ;
es . push_exports ( & config , & successes )
2021-08-17 08:55:08 +00:00
. await
. with_context ( | | " Failed to push results to elasticsearch " . to_string ( ) ) ? ;
if let Some ( alias ) = alias {
if ! elastic . no_alias {
es . write_alias ( & config , & index , & alias )
. await
. with_context ( | | " Failed to create alias " . to_string ( ) ) ? ;
} else {
warn! ( " Creating alias disabled " )
}
}
Ok ( ( ) )
}