aboutsummaryrefslogtreecommitdiffstats
path: root/rust
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@robocracy.org>2019-01-28 20:39:34 -0800
committerBryan Newbold <bnewbold@robocracy.org>2019-01-28 20:39:34 -0800
commitdff2e45c51404713460080e7d42b9d7c7869f91c (patch)
tree2dec36b536588c5f980cf288bec5e700e67947c6 /rust
parentc3f90b450ee4449d6b6f44bc690e0cdbca3ba428 (diff)
downloadfatcat-dff2e45c51404713460080e7d42b9d7c7869f91c.tar.gz
fatcat-dff2e45c51404713460080e7d42b9d7c7869f91c.zip
WIP on a hack-y changelog exportx-attic-changelog-export
Diffstat (limited to 'rust')
-rw-r--r--rust/src/bin/fatcat-export.rs95
1 files changed, 84 insertions, 11 deletions
diff --git a/rust/src/bin/fatcat-export.rs b/rust/src/bin/fatcat-export.rs
index 6d2d396c..182c8228 100644
--- a/rust/src/bin/fatcat-export.rs
+++ b/rust/src/bin/fatcat-export.rs
@@ -36,7 +36,8 @@ arg_enum! {
Fileset,
Webcapture,
Release,
- Work
+ Work,
+ Changelog,
}
}
@@ -44,6 +45,7 @@ struct IdentRow {
ident_id: FatcatId,
rev_id: Option<Uuid>,
redirect_id: Option<FatcatId>,
+ index: Option<u64>,
}
macro_rules! generic_loop_work {
@@ -87,6 +89,39 @@ generic_loop_work!(loop_work_webcapture, WebcaptureEntity);
generic_loop_work!(loop_work_release, ReleaseEntity);
generic_loop_work!(loop_work_work, WorkEntity);
+fn look_work_changelog(row_receiver: channel::Receiver<IdentRow>, output_sender: channel::Sender<String>, db_conn: &DbConn, expand: Option<ExpandFlags>) {
+ let result: Result<()> = (|| {
+ for row in row_receiver {
+
+ let cl_row: ChangelogRow = changelog::table.find(row.index.unwrap()).first(conn)?;
+ let editgroup =
+ self.get_editgroup_handler(conn, FatcatId::from_uuid(&cl_row.editgroup_id))?;
+
+ let mut entry = cl_row.into_model();
+ entry.editgroup = Some(editgroup);
+ Ok(entry)
+
+ let mut entity = ChangelogEntry::db_get_rev(
+ db_conn,
+ row.rev_id.expect("valid, non-deleted row"),
+ HideFlags::none(),
+ )?; // .chain_err(|| "reading entity from database")?;
+ entity.state = Some("active".to_string()); // only active lines were passed
+ entity.ident = Some(row.ident_id.to_string());
+ if let Some(expand) = expand {
+ entity.db_expand(db_conn, expand)?
+ // chain_err(|| "expanding sub-entities from database")?;
+ }
+ output_sender.send(serde_json::to_string(&entity)?);
+ }
+ Ok(())
+ })();
+ if let Err(ref e) = result {
+ error!("{}", e); // e.display_chain())
+ }
+ result.unwrap()
+}
+
fn loop_printer(
output_receiver: channel::Receiver<String>,
done_sender: channel::Sender<()>,
@@ -119,6 +154,20 @@ fn parse_line(s: &str) -> Result<IdentRow> {
"" => None,
val => Some(FatcatId::from_uuid(&Uuid::from_str(&val)?)),
},
+ index: None,
+ })
+}
+
+fn parse_line_changelog(s: &str) -> Result<IdentRow> {
+ let fields: Vec<String> = s.split('\t').map(|v| v.to_string()).collect();
+ if fields.len() != 3 {
+ bail!("Invalid input line");
+ }
+ Ok(IdentRow {
+ ident_id: FatcatId::from_uuid(&Uuid::from_str(&fields[1])?),
+ rev_id: None,
+ redirect_id: None,
+ index: fields[0].parse::<u64>()?,
})
}
@@ -139,6 +188,22 @@ fn test_parse_line() {
assert!(parse_line("00000000-0000-0000-3333-000000000002\t00000000-0000-0000-3333-fff000000001\t00000000-0000-0000-3333-000000000001").is_ok());
}
+#[test]
+fn test_parse_line_changelog() {
+ assert!(parse_line(
+ "8\t94f11d0d-32c0-49fb-a71e-f3835af56582\t2019-01-23 20:51:50.321105-08"
+ )
+ .is_ok());
+ assert!(parse_line(
+ "\t94f11d0d-32c0-49fb-a71e-f3835af56582\t2019-01-23 20:51:50.321105-08"
+ )
+ .is_err());
+ assert!(parse_line(
+ "8\t94f11d0d-32c0-49fb-a71e-f3835af56582\t"
+ )
+ .is_err());
+}
+
// Use num_cpus/2, or CLI arg for worker count
//
// 1. open buffered reader, buffered writer, and database pool. create channels
@@ -177,8 +242,8 @@ pub fn do_export(
ExportEntityType::Creator => thread::spawn(move || {
loop_work_creator(row_receiver, output_sender, &db_conn, expand)
}),
- ExportEntityType::File => {
- thread::spawn(move || loop_work_file(row_receiver, output_sender, &db_conn, expand))
+ ExportEntityType::File => { thread::spawn(move ||
+ loop_work_file(row_receiver, output_sender, &db_conn, expand))
}
ExportEntityType::Fileset => thread::spawn(move || {
loop_work_fileset(row_receiver, output_sender, &db_conn, expand)
@@ -189,9 +254,12 @@ pub fn do_export(
ExportEntityType::Release => thread::spawn(move || {
loop_work_release(row_receiver, output_sender, &db_conn, expand)
}),
- ExportEntityType::Work => {
- thread::spawn(move || loop_work_work(row_receiver, output_sender, &db_conn, expand))
- }
+ ExportEntityType::Work => { thread::spawn(move ||
+ loop_work_work(row_receiver, output_sender, &db_conn, expand))
+ },
+ ExportEntityType::Release => thread::spawn(move || {
+ loop_work_changelog(row_receiver, output_sender, &db_conn, expand)
+ }),
};
}
drop(output_sender);
@@ -201,11 +269,16 @@ pub fn do_export(
let mut count = 0;
for line in buf_input.lines() {
let line = line?;
- let row = parse_line(&line)?;
- match (row.rev_id, row.redirect_id, redirects) {
- (None, _, _) => (),
- (Some(_), Some(_), false) => (),
- _ => row_sender.send(row),
+ if ExportEntityType == ExportEntityType::Changelog {
+ let change_row = parse_line_changelog(&line)?;
+ row_sender.send(change_row);
+ } else {
+ let row = parse_line(&line)?;
+ match (row.rev_id, row.redirect_id, redirects) {
+ (None, _, _, _) => (),
+ (Some(_), Some(_), false, None) => (),
+ _ => row_sender.send(row),
+ }
}
count += 1;
if count % 1000 == 0 {