diff options
Diffstat (limited to 'rust/src')
-rw-r--r-- | rust/src/auth.rs | 233 | ||||
-rw-r--r-- | rust/src/bin/fatcat-auth.rs | 2 | ||||
-rw-r--r-- | rust/src/bin/fatcat-doctor.rs | 1 | ||||
-rw-r--r-- | rust/src/bin/fatcat-export.rs | 14 | ||||
-rw-r--r-- | rust/src/bin/fatcatd.rs | 2 | ||||
-rw-r--r-- | rust/src/database_models.rs | 8 | ||||
-rw-r--r-- | rust/src/endpoint_handlers.rs | 10 | ||||
-rw-r--r-- | rust/src/entity_crud.rs | 38 | ||||
-rw-r--r-- | rust/src/errors.rs | 8 | ||||
-rw-r--r-- | rust/src/identifiers.rs | 4 |
10 files changed, 190 insertions, 130 deletions
diff --git a/rust/src/auth.rs b/rust/src/auth.rs index b88a72b0..3a6271e5 100644 --- a/rust/src/auth.rs +++ b/rust/src/auth.rs @@ -5,7 +5,7 @@ //! role-based authentication (RBAC). use data_encoding::BASE64; -use macaroon::{Format, Macaroon, Verifier}; +use macaroon::{ByteString, Caveat, Format, Macaroon, MacaroonError, MacaroonKey, Verifier}; use std::fmt; use swagger::auth::{AuthData, Authorization, Scopes}; @@ -23,10 +23,10 @@ use std::collections::HashMap; use std::env; use std::str::FromStr; -// 32 bytes max (!) -static DUMMY_KEY: &[u8] = b"dummy-key-a-one-two-three-a-la"; +// 32 bytes exactly +static DUMMY_KEY_BYTES: &[u8; 32] = b"dummy-key-a-one-two-three-a-la!!"; -#[derive(Clone, Copy, Debug, PartialEq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum FatcatRole { Public, Editor, @@ -178,8 +178,23 @@ impl iron::middleware::BeforeMiddleware for MacaroonAuthMiddleware { pub struct AuthConfectionary { pub location: String, pub identifier: String, - pub key: Vec<u8>, - pub root_keys: HashMap<String, Vec<u8>>, + pub key: MacaroonKey, + pub root_keys: HashMap<String, MacaroonKey>, +} + +fn parse_macaroon_key(key_base64: &str) -> Result<MacaroonKey> { + // instead of creating a [u8; 32], we decode into an arbitrary Vec (after checking the input + // length first), because the MacaroonKey 'From' trait is implemented differently for [u8] and + // [u8; 32] (sigh). + if key_base64.len() != 44 { + bail!("bad base64-padded-encoded key for macaroons"); + } + let key_bytes = BASE64 + .decode(key_base64.as_bytes()) + .expect("base64 key decode"); + let bytes_ref: &[u8] = key_bytes.as_ref(); + let key = MacaroonKey::from(bytes_ref); + Ok(key) } impl AuthConfectionary { @@ -188,10 +203,10 @@ impl AuthConfectionary { identifier: String, key_base64: &str, ) -> Result<AuthConfectionary> { - macaroon::initialize().unwrap(); - let key = BASE64.decode(key_base64.as_bytes())?; + macaroon::initialize().expect("initializing macaroon library"); + let key = parse_macaroon_key(key_base64)?; let mut root_keys = HashMap::new(); - root_keys.insert(identifier.clone(), key.clone()); + root_keys.insert(identifier.clone(), key); Ok(AuthConfectionary { location, identifier, @@ -204,13 +219,13 @@ impl AuthConfectionary { AuthConfectionary::new( "test.fatcat.wiki".to_string(), "dummy".to_string(), - &BASE64.encode(DUMMY_KEY), + &BASE64.encode(DUMMY_KEY_BYTES), ) - .unwrap() + .expect("creating dummy AuthConfectionary") } pub fn add_keypair(&mut self, identifier: String, key_base64: &str) -> Result<()> { - let key = BASE64.decode(key_base64.as_bytes())?; + let key = parse_macaroon_key(key_base64)?; self.root_keys.insert(identifier, key); Ok(()) } @@ -220,23 +235,31 @@ impl AuthConfectionary { editor_id: FatcatId, duration: Option<chrono::Duration>, ) -> Result<String> { - let mut mac = Macaroon::create(&self.location, &self.key, &self.identifier) - .expect("Macaroon creation"); - mac.add_first_party_caveat(&format!("editor_id = {}", editor_id.to_string())); + let mut mac = Macaroon::create( + Some(self.location.clone()), + &self.key, + self.identifier.clone().into(), + ) + .expect("Macaroon creation"); + mac.add_first_party_caveat(format!("editor_id = {}", editor_id).into()); let now_utc = Utc::now(); let now = now_utc.to_rfc3339_opts(SecondsFormat::Secs, true); - mac.add_first_party_caveat(&format!("time > {}", now)); + mac.add_first_party_caveat(format!("time > {}", now).into()); if let Some(duration) = duration { let expires = now_utc + duration; - mac.add_first_party_caveat(&format!( - "time < {}", - &expires.to_rfc3339_opts(SecondsFormat::Secs, true) - )); + mac.add_first_party_caveat( + format!( + "time < {}", + &expires.to_rfc3339_opts(SecondsFormat::Secs, true) + ) + .into(), + ); }; let raw = mac.serialize(Format::V2).expect("macaroon serialization"); Ok(BASE64.encode(&raw)) } + /// Takes a macaroon as a base64-encoded string, deserializes it pub fn parse_macaroon_token( &self, conn: &DbConn, @@ -255,23 +278,19 @@ impl AuthConfectionary { .into()); } }; - let mac = match mac.validate() { - Ok(m) => m, - Err(e) => { - // TODO: should be "chaining" here - return Err(FatcatError::InvalidCredentials(format!( - "macaroon validate error: {:?}", - e - )) - .into()); - } - }; - let mut verifier = Verifier::new(); + let mut verifier = Verifier::default(); let mut editor_id: Option<FatcatId> = None; for caveat in mac.first_party_caveats() { - if caveat.predicate().starts_with("editor_id = ") { - editor_id = Some(FatcatId::from_str(caveat.predicate().get(12..).unwrap())?); - break; + if let Caveat::FirstParty(fp) = caveat { + let predicate_str = String::from_utf8(fp.predicate().as_ref().to_vec())?; + if predicate_str.starts_with("editor_id = ") { + editor_id = Some(FatcatId::from_str( + predicate_str + .get(12..) + .expect("parsing editor_id from macaroon"), + )?); + break; + } } } let editor_id = match editor_id { @@ -283,25 +302,28 @@ impl AuthConfectionary { .into()); } }; - verifier.satisfy_exact(&format!("editor_id = {}", editor_id.to_string())); + verifier.satisfy_exact(format!("editor_id = {}", editor_id).into()); if let Some(endpoint) = endpoint { // API endpoint - verifier.satisfy_exact(&format!("endpoint = {}", endpoint)); + verifier.satisfy_exact(format!("endpoint = {}", endpoint).into()); } let mut created: Option<DateTime<Utc>> = None; for caveat in mac.first_party_caveats() { - if caveat.predicate().starts_with("time > ") { - let ts: chrono::ParseResult<DateTime<Utc>> = - DateTime::parse_from_rfc3339(caveat.predicate().get(7..).unwrap()) - .map(|x| x.with_timezone(&Utc)); - if let Ok(ts) = ts { - created = Some(ts); - break; - } else { - info!( - "couldn't parse macaroon time constraint: {}", - caveat.predicate() - ); + if let Caveat::FirstParty(fp) = caveat { + let predicate_str = String::from_utf8(fp.predicate().as_ref().to_vec())?; + if predicate_str.starts_with("time > ") { + let ts: chrono::ParseResult<DateTime<Utc>> = DateTime::parse_from_rfc3339( + predicate_str + .get(7..) + .expect("parsing timestamp from macaroon"), + ) + .map(|x| x.with_timezone(&Utc)); + if let Ok(ts) = ts { + created = Some(ts); + break; + } else { + info!("couldn't parse macaroon time constraint: {}", predicate_str); + } } } } @@ -314,23 +336,25 @@ impl AuthConfectionary { .into()); } }; - verifier.satisfy_exact(&format!( - "time > {}", - created.to_rfc3339_opts(SecondsFormat::Secs, true) - )); + verifier.satisfy_exact( + format!( + "time > {}", + created.to_rfc3339_opts(SecondsFormat::Secs, true) + ) + .into(), + ); // not finding the editor_id is an auth issue, not a 404 - let editor: EditorRow = - match Editor::db_get(conn, editor_id).map_err(|e| FatcatError::from(e)) { - Ok(ed) => ed, - Err(FatcatError::NotFound(_, _)) => { - return Err(FatcatError::InvalidCredentials(format!( - "editor_id not found: {}", - editor_id - )) - .into()); - } - other_db_err => other_db_err?, - }; + let editor: EditorRow = match Editor::db_get(conn, editor_id).map_err(FatcatError::from) { + Ok(ed) => ed, + Err(FatcatError::NotFound(_, _)) => { + return Err(FatcatError::InvalidCredentials(format!( + "editor_id not found: {}", + editor_id + )) + .into()); + } + other_db_err => other_db_err?, + }; let auth_epoch = DateTime::<Utc>::from_utc(editor.auth_epoch, Utc); // allow a second of wiggle room for precision and, eg, tests if created < (auth_epoch - chrono::Duration::seconds(1)) { @@ -340,39 +364,48 @@ impl AuthConfectionary { ) .into()); } - verifier.satisfy_general(|p: &str| -> bool { - // not expired (based on time) - if p.starts_with("time < ") { - let expires: chrono::ParseResult<DateTime<Utc>> = - DateTime::parse_from_rfc3339(p.get(7..).unwrap()) - .map(|x| x.with_timezone(&Utc)); - if let Ok(when) = expires { - //info!("checking time constraint: {} < {}", Utc::now(), when); - Utc::now() < when + verifier.satisfy_general(|p_bstr: &ByteString| -> bool { + if let Ok(p) = String::from_utf8(p_bstr.as_ref().to_vec()) { + // not expired (based on time) + if p.starts_with("time < ") { + let expires: chrono::ParseResult<DateTime<Utc>> = DateTime::parse_from_rfc3339( + p.get(7..).expect("parsing datetime from macaroon"), + ) + .map(|x| x.with_timezone(&Utc)); + if let Ok(when) = expires { + //info!("checking time constraint: {} < {}", Utc::now(), when); + Utc::now() < when + } else { + info!("couldn't parse macaroon time constraint: {}", p); + false + } } else { - info!("couldn't parse macaroon time constraint: {}", p); false } } else { false } }); - let verify_key = match self.root_keys.get(mac.identifier()) { + let verify_key = match self + .root_keys + .get(std::str::from_utf8(mac.identifier().as_ref())?) + { Some(key) => key, None => { return Err(FatcatError::InvalidCredentials(format!( "no valid auth signing key for identifier: {}", - mac.identifier() + std::str::from_utf8(mac.identifier().as_ref())? )) .into()); } }; - match mac.verify(verify_key, &mut verifier) { - Ok(true) => (), - Ok(false) => { - return Err(FatcatError::InvalidCredentials( - "auth token (macaroon) not valid (signature and/or caveats failed)".to_string(), - ) + match verifier.verify(&mac, verify_key, Default::default()) { + Ok(()) => (), + Err(MacaroonError::InvalidMacaroon(em)) => { + return Err(FatcatError::InvalidCredentials(format!( + "auth token (macaroon) not valid (signature and/or caveats failed): {}", + em + )) .into()); } Err(e) => { @@ -448,9 +481,15 @@ impl AuthConfectionary { let now = Utc::now().to_rfc3339_opts(SecondsFormat::Secs, true); println!("current time: {}", now); println!("domain (location): {:?}", mac.location()); - println!("signing key name (identifier): {}", mac.identifier()); + println!( + "signing key name (identifier): {}", + std::str::from_utf8(mac.identifier().as_ref())? + ); for caveat in mac.first_party_caveats() { - println!("caveat: {}", caveat.predicate()); + if let Caveat::FirstParty(fp) = caveat { + let predicate_str = String::from_utf8(fp.predicate().as_ref().to_vec())?; + println!("caveat: {}", predicate_str); + } } println!("verify: {:?}", self.parse_macaroon_token(conn, token, None)); Ok(()) @@ -488,7 +527,7 @@ pub fn print_editors(conn: &DbConn) -> Result<()> { for e in all_editors { println!( "{}\t{}/{}/{}\t{}\t{}\t{:?}", - FatcatId::from_uuid(&e.id).to_string(), + FatcatId::from_uuid(&e.id), e.is_superuser, e.is_admin, e.is_bot, @@ -519,3 +558,25 @@ pub fn env_confectionary() -> Result<AuthConfectionary> { }; Ok(confectionary) } + +#[test] +fn test_macaroon_keys() { + assert!(parse_macaroon_key("blah").is_err()); + + let key_bytes: [u8; 32] = [ + 231, 158, 121, 231, 158, 121, 231, 158, 121, 231, 158, 121, 231, 158, 121, 231, 158, 121, + 231, 158, 121, 231, 158, 121, 231, 158, 121, 231, 158, 121, 198, 107, + ]; + + // old way of parsing keys + let old_key = BASE64 + .decode(b"5555555555555555555555555555555555555555xms=") + .unwrap(); + assert_eq!(old_key.len(), 32); + assert_eq!(old_key, key_bytes); + let old_macaroon_key: MacaroonKey = old_key.as_slice().into(); + + // new (2022) way of parsing keys + let key = parse_macaroon_key("5555555555555555555555555555555555555555xms=").unwrap(); + assert_eq!(old_macaroon_key, key); +} diff --git a/rust/src/bin/fatcat-auth.rs b/rust/src/bin/fatcat-auth.rs index 0b5c05b0..c351848e 100644 --- a/rust/src/bin/fatcat-auth.rs +++ b/rust/src/bin/fatcat-auth.rs @@ -89,7 +89,7 @@ fn main() -> Result<()> { }; let editor_row = editor.db_create(&db_conn)?; //println!("{:?}", editor); - println!("{}", FatcatId::from_uuid(&editor_row.id).to_string()); + println!("{}", FatcatId::from_uuid(&editor_row.id)); } ("create-token", Some(subm)) => { let editor_id = FatcatId::from_str(subm.value_of("editor-id").unwrap())?; diff --git a/rust/src/bin/fatcat-doctor.rs b/rust/src/bin/fatcat-doctor.rs index 6e869634..7255a22f 100644 --- a/rust/src/bin/fatcat-doctor.rs +++ b/rust/src/bin/fatcat-doctor.rs @@ -11,7 +11,6 @@ use fatcat::server::DbConn; use std::process; use std::str::FromStr; -use diesel; use diesel::prelude::*; fn backfill_changelog_gap(conn: &DbConn, last_good: i64, max_index: i64) -> Result<()> { diff --git a/rust/src/bin/fatcat-export.rs b/rust/src/bin/fatcat-export.rs index 7d671b9a..9ac977aa 100644 --- a/rust/src/bin/fatcat-export.rs +++ b/rust/src/bin/fatcat-export.rs @@ -28,7 +28,7 @@ use std::thread; const CHANNEL_BUFFER_LEN: usize = 200; arg_enum! { - #[derive(PartialEq, Debug, Clone, Copy)] + #[derive(PartialEq, Debug, Clone, Copy, Eq)] pub enum ExportEntityType { Creator, Container, @@ -163,7 +163,7 @@ fn parse_line(s: &str) -> Result<IdentRow> { let group_id: Option<FatcatId> = if fields.len() == 4 { match fields[3].as_ref() { "" => None, - val => Some(FatcatId::from_uuid(&Uuid::from_str(&val)?)), + val => Some(FatcatId::from_uuid(&Uuid::from_str(val)?)), } } else if fields.len() == 3 { None @@ -174,11 +174,11 @@ fn parse_line(s: &str) -> Result<IdentRow> { ident_id: FatcatId::from_uuid(&Uuid::from_str(&fields[0])?), rev_id: match fields[1].as_ref() { "" => None, - val => Some(Uuid::from_str(&val)?), + val => Some(Uuid::from_str(val)?), }, redirect_id: match fields[2].as_ref() { "" => None, - val => Some(FatcatId::from_uuid(&Uuid::from_str(&val)?)), + val => Some(FatcatId::from_uuid(&Uuid::from_str(val)?)), }, group_id, }) @@ -331,7 +331,7 @@ pub fn do_export_batch( (Some(_), Some(_), false) => (), _ => { if row.group_id == None || row.group_id != last_group_id { - if batch.len() > 0 { + if !batch.is_empty() { row_sender.send(batch); batch = vec![]; } @@ -345,7 +345,7 @@ pub fn do_export_batch( info!("processed {} lines...", count); } } - if batch.len() > 0 { + if !batch.is_empty() { row_sender.send(batch); } drop(row_sender); @@ -385,7 +385,7 @@ fn main() -> Result<()> { None => std::cmp::min(1, num_cpus::get() / 2) as usize, }; let expand = match m.value_of("expand") { - Some(s) => Some(ExpandFlags::from_str(&s)?), + Some(s) => Some(ExpandFlags::from_str(s)?), None => None, }; let log_level = if m.is_present("quiet") { diff --git a/rust/src/bin/fatcatd.rs b/rust/src/bin/fatcatd.rs index 52c2bc25..bfa41805 100644 --- a/rust/src/bin/fatcatd.rs +++ b/rust/src/bin/fatcatd.rs @@ -48,7 +48,7 @@ fn main() -> Result<()> { let drain = slog_async::Async::new(drain).build().fuse(); let logger = Logger::root(drain, o!()); let _scope_guard = slog_scope::set_global_logger(logger.clone()); - let _log_guard = slog_stdlog::init().unwrap(); + slog_stdlog::init().unwrap(); let formatter = DefaultLogFormatter; // sentry exception handling diff --git a/rust/src/database_models.rs b/rust/src/database_models.rs index 0427f9c8..b3b64126 100644 --- a/rust/src/database_models.rs +++ b/rust/src/database_models.rs @@ -13,7 +13,7 @@ use uuid::Uuid; // Ugh. I thought the whole point was to *not* do this, but: // https://github.com/diesel-rs/diesel/issues/1589 -#[derive(Clone, Copy, Debug, PartialEq)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum EntityState { WorkInProgress, Active(Uuid), @@ -552,7 +552,7 @@ pub struct RefsBlobRow { pub refs_json: serde_json::Value, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] +#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, Eq)] /// This model is a stable representation of what goes in a RefsBlobRow `refs_json` field (an array /// of this model). We could rely on the `ReleaseRef` API spec model directly, but that would lock /// the database contents to the API spec rigidly; by defining this struct independently, we can @@ -684,8 +684,8 @@ impl EditgroupRow { Editgroup { editgroup_id: Some(uuid2fcid(&self.id)), editor_id: Some(uuid2fcid(&self.editor_id)), - editor: editor, - changelog_index: changelog_index, + editor, + changelog_index, created: Some(chrono::DateTime::from_utc(self.created, chrono::Utc)), submitted: self .submitted diff --git a/rust/src/endpoint_handlers.rs b/rust/src/endpoint_handlers.rs index 9a76652b..ed6f6641 100644 --- a/rust/src/endpoint_handlers.rs +++ b/rust/src/endpoint_handlers.rs @@ -171,7 +171,7 @@ impl Server { }; let mut entity = ContainerEntity::db_from_row(conn, rev, Some(ident), hide_flags)?; - entity.db_expand(&conn, expand_flags)?; + entity.db_expand(conn, expand_flags)?; Ok(entity) } @@ -210,7 +210,7 @@ impl Server { }; let mut entity = CreatorEntity::db_from_row(conn, rev, Some(ident), hide_flags)?; - entity.db_expand(&conn, expand_flags)?; + entity.db_expand(conn, expand_flags)?; Ok(entity) } @@ -280,7 +280,7 @@ impl Server { }; let mut entity = FileEntity::db_from_row(conn, rev, Some(ident), hide_flags)?; - entity.db_expand(&conn, expand_flags)?; + entity.db_expand(conn, expand_flags)?; Ok(entity) } @@ -705,7 +705,7 @@ impl Server { }; let mut entity = ReleaseEntity::db_from_row(conn, rev, Some(ident), hide_flags)?; - entity.db_expand(&conn, expand_flags)?; + entity.db_expand(conn, expand_flags)?; Ok(entity) } @@ -906,7 +906,7 @@ impl Server { username.truncate(24); let editor = Editor { editor_id: None, - username: username, + username, is_admin: Some(false), is_bot: Some(false), is_active: Some(true), diff --git a/rust/src/entity_crud.rs b/rust/src/entity_crud.rs index f48246a5..51c89108 100644 --- a/rust/src/entity_crud.rs +++ b/rust/src/entity_crud.rs @@ -96,7 +96,7 @@ where fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result<Vec<Uuid>>; } -#[derive(Clone, Copy, PartialEq)] +#[derive(Clone, Copy, PartialEq, Eq)] pub struct ExpandFlags { pub files: bool, pub filesets: bool, @@ -187,7 +187,7 @@ fn test_expand_flags() { ); } -#[derive(Clone, Copy, PartialEq)] +#[derive(Clone, Copy, PartialEq, Eq)] pub struct HideFlags { // release pub abstracts: bool, @@ -1194,7 +1194,7 @@ impl EntityCrud for FileEntity { None => (), Some(url_list) => { let these_url_rows: Vec<FileRevUrlNewRow> = url_list - .into_iter() + .iter() .map(|u| FileRevUrlNewRow { file_rev: *rev_id, rel: u.rel.clone(), @@ -1398,7 +1398,7 @@ impl EntityCrud for FilesetEntity { None => (), Some(file_list) => { let these_file_rows: Vec<FilesetRevFileNewRow> = file_list - .into_iter() + .iter() .map(|f| FilesetRevFileNewRow { fileset_rev: *rev_id, path_name: f.path.clone(), @@ -1418,7 +1418,7 @@ impl EntityCrud for FilesetEntity { None => (), Some(url_list) => { let these_url_rows: Vec<FilesetRevUrlNewRow> = url_list - .into_iter() + .iter() .map(|u| FilesetRevUrlNewRow { fileset_rev: *rev_id, rel: u.rel.clone(), @@ -1653,11 +1653,11 @@ impl EntityCrud for WebcaptureEntity { None => (), Some(cdx_list) => { let these_cdx_rows: Vec<WebcaptureRevCdxNewRow> = cdx_list - .into_iter() + .iter() .map(|c| WebcaptureRevCdxNewRow { webcapture_rev: *rev_id, surt: c.surt.clone(), - timestamp: c.timestamp.clone(), + timestamp: c.timestamp, url: c.url.clone(), mimetype: c.mimetype.clone(), status_code: c.status_code, @@ -1674,7 +1674,7 @@ impl EntityCrud for WebcaptureEntity { None => (), Some(url_list) => { let these_url_rows: Vec<WebcaptureRevUrlNewRow> = url_list - .into_iter() + .iter() .map(|u| WebcaptureRevUrlNewRow { webcapture_rev: *rev_id, rel: u.rel.clone(), @@ -1817,8 +1817,8 @@ impl EntityCrud for ReleaseEntity { Some(ident) => match &self.redirect { // If we're a redirect, then expand for the *target* identifier, not *our* // identifier. Tricky! - None => FatcatId::from_str(&ident)?, - Some(redir) => FatcatId::from_str(&redir)?, + None => FatcatId::from_str(ident)?, + Some(redir) => FatcatId::from_str(redir)?, }, }; self.files = Some(get_release_files(conn, ident, HideFlags::none())?); @@ -1827,8 +1827,8 @@ impl EntityCrud for ReleaseEntity { let ident = match &self.ident { None => bail!("Can't expand filesets on a non-concrete entity"), // redundant with above is_some() Some(ident) => match &self.redirect { - None => FatcatId::from_str(&ident)?, - Some(redir) => FatcatId::from_str(&redir)?, + None => FatcatId::from_str(ident)?, + Some(redir) => FatcatId::from_str(redir)?, }, }; self.filesets = Some(get_release_filesets(conn, ident, HideFlags::none())?); @@ -1837,8 +1837,8 @@ impl EntityCrud for ReleaseEntity { let ident = match &self.ident { None => bail!("Can't expand webcaptures on a non-concrete entity"), // redundant with above is_some() Some(ident) => match &self.redirect { - None => FatcatId::from_str(&ident)?, - Some(redir) => FatcatId::from_str(&redir)?, + None => FatcatId::from_str(ident)?, + Some(redir) => FatcatId::from_str(redir)?, }, }; self.webcaptures = Some(get_release_webcaptures(conn, ident, HideFlags::none())?); @@ -1847,7 +1847,7 @@ impl EntityCrud for ReleaseEntity { if let Some(ref cid) = self.container_id { self.container = Some(ContainerEntity::db_get( conn, - FatcatId::from_str(&cid)?, + FatcatId::from_str(cid)?, HideFlags::none(), )?); } @@ -1928,7 +1928,7 @@ impl EntityCrud for ReleaseEntity { model }) .collect(); - let model_refs: Vec<&Self> = models_with_work_ids.iter().map(|s| s).collect(); + let model_refs: Vec<&Self> = models_with_work_ids.iter().collect(); let models = model_refs.as_slice(); // The rest here is copy/pasta from the generic (how to avoid copypasta?) @@ -1951,7 +1951,7 @@ impl EntityCrud for ReleaseEntity { rev_ids .into_iter() .zip(ident_ids.into_iter()) - .zip(models.into_iter().map(|m| m.edit_extra.clone())) + .zip(models.iter().map(|m| m.edit_extra.clone())) .map(|((rev_id, ident_id), edit_extra)| Self::EditNewRow { editgroup_id: edit_context.editgroup_id.to_uuid(), rev_id: Some(rev_id), @@ -2100,7 +2100,7 @@ impl EntityCrud for ReleaseEntity { withdrawn_status: rev_row.withdrawn_status, withdrawn_date: rev_row.withdrawn_date, withdrawn_year: rev_row.withdrawn_year, - ext_ids: ext_ids, + ext_ids, volume: rev_row.volume, issue: rev_row.issue, pages: rev_row.pages, @@ -2459,7 +2459,7 @@ impl EntityCrud for ReleaseEntity { .collect(); abstract_rows.extend(new_abstracts); let new_release_abstract_rows: Vec<ReleaseRevAbstractNewRow> = abstract_list - .into_iter() + .iter() .map(|c| { Ok(ReleaseRevAbstractNewRow { release_rev: *rev_id, diff --git a/rust/src/errors.rs b/rust/src/errors.rs index ea0f9646..38429802 100644 --- a/rust/src/errors.rs +++ b/rust/src/errors.rs @@ -173,16 +173,16 @@ impl From<data_encoding::DecodeError> for FatcatError { impl From<failure::Error> for FatcatError { fn from(error: failure::Error) -> FatcatError { // TODO: I think it should be possible to match here? regardless, this is *super* janky - if let Some(_) = error.downcast_ref::<FatcatError>() { + if error.downcast_ref::<FatcatError>().is_some() { return error.downcast::<FatcatError>().unwrap(); } - if let Some(_) = error.downcast_ref::<std::fmt::Error>() { + if error.downcast_ref::<std::fmt::Error>().is_some() { return error.downcast::<std::fmt::Error>().unwrap().into(); } - if let Some(_) = error.downcast_ref::<diesel::result::Error>() { + if error.downcast_ref::<diesel::result::Error>().is_some() { return error.downcast::<diesel::result::Error>().unwrap().into(); } - if let Some(_) = error.downcast_ref::<uuid::ParseError>() { + if error.downcast_ref::<uuid::ParseError>().is_some() { return error.downcast::<uuid::ParseError>().unwrap().into(); } FatcatError::InternalError(error.to_string()) diff --git a/rust/src/identifiers.rs b/rust/src/identifiers.rs index e9baf7b8..56174be1 100644 --- a/rust/src/identifiers.rs +++ b/rust/src/identifiers.rs @@ -5,7 +5,7 @@ use std::str::FromStr; use std::{convert, fmt}; use uuid::Uuid; -#[derive(Clone, Copy, PartialEq, Debug)] +#[derive(Clone, Copy, PartialEq, Debug, Eq)] pub struct FatcatId(Uuid); impl fmt::Display for FatcatId { @@ -51,7 +51,7 @@ impl FatcatId { /// Convert fatcat IDs (base32 strings) to UUID pub fn fcid2uuid(fcid: &str) -> Result<Uuid> { - if fcid.is_ascii() == false || fcid.len() != 26 { + if !fcid.is_ascii() || fcid.len() != 26 { return Err(FatcatError::InvalidFatcatId(fcid.to_string()).into()); } let mut raw = vec![0; 16]; |