flake-info: fix compilation warnings (#503)
Removes some dead code, including the unimplemented `--elastic-user`, `--elastic-pw` and `--gc` flags. Builds and tests with no warnings.
This commit is contained in:
parent
a76f3f779c
commit
b8518f6d48
|
@ -1,19 +1,15 @@
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
use commands::run_gc;
|
|
||||||
use flake_info::commands::NixCheckError;
|
use flake_info::commands::NixCheckError;
|
||||||
use flake_info::data::import::{Kind, NixOption};
|
use flake_info::data::import::Kind;
|
||||||
use flake_info::data::{self, Export, Nixpkgs, Source};
|
use flake_info::data::{self, Export, Source};
|
||||||
use flake_info::elastic::{ElasticsearchError, ExistsStrategy};
|
use flake_info::elastic::{self, ElasticsearchError, ExistsStrategy};
|
||||||
use flake_info::{commands, elastic};
|
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use log::{debug, error, info, warn};
|
use log::{error, info, warn};
|
||||||
use semver::VersionReq;
|
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use sha2::Digest;
|
use sha2::Digest;
|
||||||
use std::path::{Path, PathBuf};
|
use std::io;
|
||||||
use std::ptr::hash;
|
use std::path::PathBuf;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use std::{fs, io};
|
|
||||||
use structopt::{clap::ArgGroup, StructOpt};
|
use structopt::{clap::ArgGroup, StructOpt};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use tokio::fs::File;
|
use tokio::fs::File;
|
||||||
|
@ -58,9 +54,6 @@ enum Command {
|
||||||
help = "Whether to use a temporary store or not. Located at /tmp/flake-info-store"
|
help = "Whether to use a temporary store or not. Located at /tmp/flake-info-store"
|
||||||
)]
|
)]
|
||||||
temp_store: bool,
|
temp_store: bool,
|
||||||
|
|
||||||
#[structopt(long, help = "Whether to gc the store after info or not")]
|
|
||||||
gc: bool,
|
|
||||||
},
|
},
|
||||||
#[structopt(about = "Import official nixpkgs channel")]
|
#[structopt(about = "Import official nixpkgs channel")]
|
||||||
Nixpkgs {
|
Nixpkgs {
|
||||||
|
@ -95,9 +88,6 @@ enum Command {
|
||||||
)]
|
)]
|
||||||
temp_store: bool,
|
temp_store: bool,
|
||||||
|
|
||||||
#[structopt(long, help = "Whether to gc the store after info or not")]
|
|
||||||
gc: bool,
|
|
||||||
|
|
||||||
#[structopt(long, help = "Whether write an error report about failed packages")]
|
#[structopt(long, help = "Whether write an error report about failed packages")]
|
||||||
report: bool,
|
report: bool,
|
||||||
},
|
},
|
||||||
|
@ -115,22 +105,21 @@ struct ElasticOpts {
|
||||||
)]
|
)]
|
||||||
enable: bool,
|
enable: bool,
|
||||||
|
|
||||||
#[structopt(
|
// #[structopt(
|
||||||
long,
|
// long,
|
||||||
short = "u",
|
// short = "u",
|
||||||
env = "FI_ES_USER",
|
// env = "FI_ES_USER",
|
||||||
help = "Elasticsearch username (unimplemented)"
|
// help = "Elasticsearch username (unimplemented)"
|
||||||
)]
|
// )]
|
||||||
elastic_user: Option<String>,
|
// elastic_user: Option<String>,
|
||||||
|
|
||||||
#[structopt(
|
|
||||||
long,
|
|
||||||
short = "p",
|
|
||||||
env = "FI_ES_PASSWORD",
|
|
||||||
help = "Elasticsearch password (unimplemented)"
|
|
||||||
)]
|
|
||||||
elastic_pw: Option<String>,
|
|
||||||
|
|
||||||
|
// #[structopt(
|
||||||
|
// long,
|
||||||
|
// short = "p",
|
||||||
|
// env = "FI_ES_PASSWORD",
|
||||||
|
// help = "Elasticsearch password (unimplemented)"
|
||||||
|
// )]
|
||||||
|
// elastic_pw: Option<String>,
|
||||||
#[structopt(
|
#[structopt(
|
||||||
long,
|
long,
|
||||||
env = "FI_ES_URL",
|
env = "FI_ES_URL",
|
||||||
|
@ -222,11 +211,7 @@ async fn run_command(
|
||||||
flake_info::commands::check_nix_version(env!("MIN_NIX_VERSION"))?;
|
flake_info::commands::check_nix_version(env!("MIN_NIX_VERSION"))?;
|
||||||
|
|
||||||
match command {
|
match command {
|
||||||
Command::Flake {
|
Command::Flake { flake, temp_store } => {
|
||||||
flake,
|
|
||||||
temp_store,
|
|
||||||
gc,
|
|
||||||
} => {
|
|
||||||
let source = Source::Git { url: flake };
|
let source = Source::Git { url: flake };
|
||||||
let (info, exports) = flake_info::process_flake(&source, &kind, temp_store, extra)
|
let (info, exports) = flake_info::process_flake(&source, &kind, temp_store, extra)
|
||||||
.map_err(FlakeInfoError::Flake)?;
|
.map_err(FlakeInfoError::Flake)?;
|
||||||
|
@ -271,7 +256,6 @@ async fn run_command(
|
||||||
Command::Group {
|
Command::Group {
|
||||||
targets,
|
targets,
|
||||||
temp_store,
|
temp_store,
|
||||||
gc,
|
|
||||||
name,
|
name,
|
||||||
report,
|
report,
|
||||||
} => {
|
} => {
|
||||||
|
|
|
@ -1,10 +1,8 @@
|
||||||
mod nix_check_version;
|
mod nix_check_version;
|
||||||
mod nix_flake_attrs;
|
mod nix_flake_attrs;
|
||||||
mod nix_flake_info;
|
mod nix_flake_info;
|
||||||
mod nix_gc;
|
|
||||||
mod nixpkgs_info;
|
mod nixpkgs_info;
|
||||||
pub use nix_check_version::{check_nix_version, NixCheckError};
|
pub use nix_check_version::{check_nix_version, NixCheckError};
|
||||||
pub use nix_flake_attrs::get_derivation_info;
|
pub use nix_flake_attrs::get_derivation_info;
|
||||||
pub use nix_flake_info::get_flake_info;
|
pub use nix_flake_info::get_flake_info;
|
||||||
pub use nix_gc::run_gc;
|
|
||||||
pub use nixpkgs_info::{get_nixpkgs_info, get_nixpkgs_options};
|
pub use nixpkgs_info::{get_nixpkgs_info, get_nixpkgs_options};
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use crate::data::import::{FlakeEntry, Kind};
|
use crate::data::import::{FlakeEntry, Kind};
|
||||||
use anyhow::{Context, Result};
|
use anyhow::{Context, Result};
|
||||||
use command_run::{Command, LogTo};
|
use command_run::{Command, LogTo};
|
||||||
use log::debug;
|
|
||||||
use serde_json::Deserializer;
|
use serde_json::Deserializer;
|
||||||
use std::fmt::Display;
|
use std::fmt::Display;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
|
|
|
@ -1,54 +0,0 @@
|
||||||
use anyhow::{Context, Result};
|
|
||||||
use log::warn;
|
|
||||||
use std::{
|
|
||||||
path::{self, PathBuf},
|
|
||||||
process::Command,
|
|
||||||
};
|
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
|
||||||
pub enum GCError {
|
|
||||||
#[error("Unexpected exit status: {0}")]
|
|
||||||
ExitStatusError(String),
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn run_gc() -> Result<()> {
|
|
||||||
let temp_store_path = PathBuf::from("/tmp/flake-info-store");
|
|
||||||
if !temp_store_path.exists() {
|
|
||||||
warn!("Temporary store path does not exist, was a temporary store used?");
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut command = Command::new("nix-store");
|
|
||||||
command.args(&[
|
|
||||||
"--gc",
|
|
||||||
"--store",
|
|
||||||
temp_store_path.canonicalize()?.to_str().unwrap(),
|
|
||||||
]);
|
|
||||||
|
|
||||||
dbg!(&command);
|
|
||||||
|
|
||||||
let mut child = command
|
|
||||||
.spawn()
|
|
||||||
.with_context(|| "failed to start `nix-store gc` subprocess")?;
|
|
||||||
|
|
||||||
let result = child.wait()?;
|
|
||||||
|
|
||||||
if !result.success() {
|
|
||||||
return Err(GCError::ExitStatusError(format!("Code: {}", result.code().unwrap())).into());
|
|
||||||
}
|
|
||||||
|
|
||||||
std::fs::remove_dir_all(temp_store_path).with_context(|| "failed to clean up temp dir")?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_gc() {
|
|
||||||
run_gc().unwrap();
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -4,7 +4,7 @@ use std::io::Write;
|
||||||
use std::{collections::HashMap, fmt::Display, fs::File};
|
use std::{collections::HashMap, fmt::Display, fs::File};
|
||||||
|
|
||||||
use command_run::{Command, LogTo};
|
use command_run::{Command, LogTo};
|
||||||
use log::{debug, error};
|
use log::error;
|
||||||
|
|
||||||
use crate::data::import::{NixOption, NixpkgsEntry, Package};
|
use crate::data::import::{NixOption, NixpkgsEntry, Package};
|
||||||
|
|
||||||
|
|
|
@ -12,13 +12,10 @@ use super::{
|
||||||
};
|
};
|
||||||
use crate::data::import::NixOption;
|
use crate::data::import::NixOption;
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use log::error;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::Value;
|
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
import,
|
import,
|
||||||
prettyprint::print_value,
|
|
||||||
system::System,
|
system::System,
|
||||||
utility::{AttributeQuery, Flatten, OneOrMany, Reverse},
|
utility::{AttributeQuery, Flatten, OneOrMany, Reverse},
|
||||||
};
|
};
|
||||||
|
@ -66,6 +63,7 @@ impl From<import::License> for License {
|
||||||
#[serde(tag = "type")]
|
#[serde(tag = "type")]
|
||||||
pub enum Derivation {
|
pub enum Derivation {
|
||||||
#[serde(rename = "package")]
|
#[serde(rename = "package")]
|
||||||
|
#[allow(non_snake_case)]
|
||||||
Package {
|
Package {
|
||||||
package_attr_name: String,
|
package_attr_name: String,
|
||||||
package_attr_name_reverse: Reverse<String>,
|
package_attr_name_reverse: Reverse<String>,
|
||||||
|
|
|
@ -1,19 +1,16 @@
|
||||||
use std::collections::{BTreeMap, HashMap};
|
use std::collections::HashMap;
|
||||||
use std::convert::TryInto;
|
use std::fmt;
|
||||||
use std::fmt::{self, write, Display};
|
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::{path::PathBuf, str::FromStr};
|
use std::{path::PathBuf, str::FromStr};
|
||||||
|
|
||||||
use clap::arg_enum;
|
use clap::arg_enum;
|
||||||
use log::warn;
|
use log::warn;
|
||||||
use pandoc::PandocError;
|
|
||||||
use serde::de::{self, MapAccess, Visitor};
|
use serde::de::{self, MapAccess, Visitor};
|
||||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||||
use serde_json::Value;
|
use serde_json::Value;
|
||||||
use thiserror::Error;
|
|
||||||
|
|
||||||
use super::pandoc::PandocExt;
|
use super::pandoc::PandocExt;
|
||||||
use super::prettyprint::{self, print_value};
|
use super::prettyprint::print_value;
|
||||||
use super::system::System;
|
use super::system::System;
|
||||||
use super::utility::{Flatten, OneOrMany};
|
use super::utility::{Flatten, OneOrMany};
|
||||||
|
|
||||||
|
@ -226,6 +223,7 @@ pub enum License {
|
||||||
Simple {
|
Simple {
|
||||||
license: String,
|
license: String,
|
||||||
},
|
},
|
||||||
|
#[allow(non_snake_case)]
|
||||||
Full {
|
Full {
|
||||||
fullName: String,
|
fullName: String,
|
||||||
// shortName: String,
|
// shortName: String,
|
||||||
|
@ -313,8 +311,6 @@ where
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use serde_json::Value;
|
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -3,7 +3,6 @@ use std::io::Write;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use log::debug;
|
|
||||||
use pandoc::{
|
use pandoc::{
|
||||||
InputFormat, InputKind, OutputFormat, OutputKind, PandocError, PandocOption, PandocOutput,
|
InputFormat, InputKind, OutputFormat, OutputKind, PandocError, PandocOption, PandocOutput,
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
use anyhow::{Context, Result};
|
use anyhow::Result;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::{
|
||||||
ffi::OsStr,
|
ffi::OsStr,
|
||||||
fs::{self, File},
|
fs::File,
|
||||||
io::{self, Read},
|
io::{self, Read},
|
||||||
path::Path,
|
path::Path,
|
||||||
};
|
};
|
||||||
|
|
|
@ -31,8 +31,3 @@ pub struct InstancePlatform {
|
||||||
system: System,
|
system: System,
|
||||||
version: String,
|
version: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,15 +1,8 @@
|
||||||
use std::{borrow::Borrow, collections::HashMap};
|
use std::collections::HashMap;
|
||||||
|
|
||||||
use clap::arg_enum;
|
use clap::arg_enum;
|
||||||
pub use elasticsearch::http::transport::Transport;
|
pub use elasticsearch::http::transport::Transport;
|
||||||
use elasticsearch::{
|
use elasticsearch::{http::response, indices::*, BulkOperation, Elasticsearch as Client};
|
||||||
http::response::{self, Response},
|
|
||||||
indices::{
|
|
||||||
IndicesCreateParts, IndicesDeleteAliasParts, IndicesDeleteParts, IndicesExistsParts,
|
|
||||||
IndicesGetAliasParts, IndicesPutAliasParts, IndicesUpdateAliasesParts,
|
|
||||||
},
|
|
||||||
BulkOperation, Elasticsearch as Client,
|
|
||||||
};
|
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
use log::{info, warn};
|
use log::{info, warn};
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
@ -380,7 +373,7 @@ impl Elasticsearch {
|
||||||
|
|
||||||
pub async fn write_alias(
|
pub async fn write_alias(
|
||||||
&self,
|
&self,
|
||||||
config: &Config<'_>,
|
_config: &Config<'_>,
|
||||||
index: &str,
|
index: &str,
|
||||||
alias: &str,
|
alias: &str,
|
||||||
) -> Result<(), ElasticsearchError> {
|
) -> Result<(), ElasticsearchError> {
|
||||||
|
@ -499,7 +492,7 @@ mod tests {
|
||||||
let exports = sources
|
let exports = sources
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|s| process_flake(s, &Kind::All, false, &[]))
|
.flat_map(|s| process_flake(s, &Kind::All, false, &[]))
|
||||||
.map(|(info, exports)| exports)
|
.map(|(_info, exports)| exports)
|
||||||
.flatten()
|
.flatten()
|
||||||
.collect::<Vec<Export>>();
|
.collect::<Vec<Export>>();
|
||||||
println!("{}", serde_json::to_string(&exports[1]).unwrap());
|
println!("{}", serde_json::to_string(&exports[1]).unwrap());
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
#![recursion_limit = "256"]
|
#![recursion_limit = "256"]
|
||||||
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use data::{import::Kind, Export, Flake, Source};
|
use data::{import::Kind, Export, Flake, Source};
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue