Pull private messages only to self.
- I am experimenting with using these as private notes.
This commit is contained in:
parent
6bc62f52cd
commit
8a9a3174ab
4 changed files with 220 additions and 149 deletions
153
src/main.rs
153
src/main.rs
|
@ -3,9 +3,9 @@ use chrono::{DateTime, Local, Utc};
|
|||
use clap::{arg, command, Parser};
|
||||
use log::{debug, trace};
|
||||
use megalodon::{
|
||||
entities::{Account, Status},
|
||||
entities::{Account, Status, StatusVisibility},
|
||||
generator,
|
||||
megalodon::GetLocalTimelineInputOptions,
|
||||
megalodon::{GetAccountStatusesInputOptions, GetLocalTimelineInputOptions},
|
||||
response::Response,
|
||||
Megalodon,
|
||||
};
|
||||
|
@ -80,38 +80,46 @@ async fn main() -> Result<()> {
|
|||
|
||||
trace!("Page bounds {:?}", page);
|
||||
|
||||
// this age comparison only applies after the first page is fetched; the rest of the loop
|
||||
// body handles if the requested date is newer than any statuses on the first page
|
||||
if last_id_on_page.is_some() && page_start_older_than(&page, &day) {
|
||||
let (last_id, next_iter, mut formatted) =
|
||||
process_page(&client, &account, &statuses, &last_id_on_page, &day, 1).await?;
|
||||
reversed.append(&mut formatted);
|
||||
if let Some(NextIter::Stop) = next_iter {
|
||||
break;
|
||||
}
|
||||
|
||||
// fetching returns 20 at a time, in reverse chronological order so may require skipping
|
||||
// pages after the requested date
|
||||
if let Some(oldest_id) = page_newer_than(&page, &day) {
|
||||
last_id_on_page.replace(oldest_id);
|
||||
if let Some(last_id) = last_id {
|
||||
last_id_on_page.replace(last_id);
|
||||
}
|
||||
if let Some(NextIter::Skip) = next_iter {
|
||||
continue;
|
||||
}
|
||||
|
||||
// mapping the vector runs into thorny ownership issues and only produces futures, not
|
||||
// resolved values; a for in loop works with await but also runs into thorny ownership
|
||||
// issues; a stream resolves both because the stream takes ownership of the statuses and
|
||||
// can be iterated in a simple way that allows the use of await in the body
|
||||
let mut stream = iter(filter_statuses(&account, &day, &statuses));
|
||||
while let Some(status) = stream.next().await {
|
||||
reversed.push(format_status(&client, &account, status).await?);
|
||||
}
|
||||
|
||||
if page_end_older_than(&page, &day) {
|
||||
debug!("No more posts in range.");
|
||||
}
|
||||
last_id_on_page = None;
|
||||
loop {
|
||||
let statuses = fetch_dm_page(&client, &account, &last_id_on_page).await?;
|
||||
if statuses.is_empty() {
|
||||
debug!("No more DMs in range.");
|
||||
break;
|
||||
}
|
||||
let page = bounds_from(&statuses);
|
||||
|
||||
if let Some(id) = page.oldest_id {
|
||||
last_id_on_page.replace(id.clone());
|
||||
trace!("Page bounds {:?}", page);
|
||||
|
||||
let (last_id, next_iter, mut formatted) =
|
||||
process_page(&client, &account, &statuses, &last_id_on_page, &day, 0).await?;
|
||||
reversed.append(&mut formatted);
|
||||
if let Some(NextIter::Stop) = next_iter {
|
||||
break;
|
||||
}
|
||||
if let Some(last_id) = last_id {
|
||||
last_id_on_page.replace(last_id);
|
||||
}
|
||||
if let Some(NextIter::Skip) = next_iter {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
reversed.reverse();
|
||||
|
||||
if let Some(output_dir) = output_dir {
|
||||
let output = format!("{}{}.md", output_dir, date);
|
||||
let mut f = File::options().append(true).open(&output)?;
|
||||
|
@ -123,6 +131,57 @@ async fn main() -> Result<()> {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
enum NextIter {
|
||||
Skip,
|
||||
Stop,
|
||||
}
|
||||
|
||||
async fn process_page(
|
||||
client: &Box<dyn Megalodon + Send + Sync + 'static>,
|
||||
account: &Account,
|
||||
statuses: &Vec<Status>,
|
||||
last_id_on_page: &Option<String>,
|
||||
day: &Range,
|
||||
depth: usize,
|
||||
) -> Result<(Option<String>, Option<NextIter>, Vec<String>)> {
|
||||
let page = bounds_from(&statuses);
|
||||
|
||||
trace!("Page bounds {:?}", page);
|
||||
|
||||
// this age comparison only applies after the first page is fetched; the rest of the loop
|
||||
// body handles if the requested date is newer than any statuses on the first page
|
||||
if last_id_on_page.is_some() && page_start_older_than(&page, day) {
|
||||
return Ok((None, Some(NextIter::Stop), Vec::new()));
|
||||
}
|
||||
|
||||
// fetching returns 20 at a time, in reverse chronological order so may require skipping
|
||||
// pages after the requested date
|
||||
if let Some(oldest_id) = page_newer_than(&page, &day) {
|
||||
return Ok((Some(oldest_id), Some(NextIter::Skip), Vec::new()));
|
||||
}
|
||||
|
||||
// mapping the vector runs into thorny ownership issues and only produces futures, not
|
||||
// resolved values; a for in loop works with await but also runs into thorny ownership
|
||||
// issues; a stream resolves both because the stream takes ownership of the statuses and
|
||||
// can be iterated in a simple way that allows the use of await in the body
|
||||
let mut stream = iter(filter_statuses(account, &day, &statuses));
|
||||
let mut formatted = Vec::new();
|
||||
while let Some(status) = stream.next().await {
|
||||
formatted.push(format_status(client, depth, &account, status).await?);
|
||||
}
|
||||
|
||||
if page_end_older_than(&page, &day) {
|
||||
debug!("No more posts in range.");
|
||||
return Ok((None, Some(NextIter::Stop), formatted));
|
||||
}
|
||||
|
||||
if let Some(id) = page.oldest_id {
|
||||
return Ok((Some(id.clone()), None, formatted));
|
||||
} else {
|
||||
return Ok((None, None, formatted));
|
||||
}
|
||||
}
|
||||
|
||||
// Only ones authored by the user, on the date requested, that aren't a reply to any other status
|
||||
fn filter_statuses<'a>(account: &Account, day: &Range, json: &'a Vec<Status>) -> Vec<&'a Status> {
|
||||
json.iter()
|
||||
|
@ -143,8 +202,9 @@ async fn fetch_page(
|
|||
client: &Box<dyn Megalodon + Send + Sync>,
|
||||
last_id_on_page: &Option<String>,
|
||||
) -> Result<Vec<Status>> {
|
||||
trace!("Fetching page of local timeline");
|
||||
let Response { json, .. } = if let Some(max_id) = last_id_on_page.as_ref() {
|
||||
debug!("Fetching next page");
|
||||
trace!("Fetching next page");
|
||||
client
|
||||
.get_local_timeline(Some(&GetLocalTimelineInputOptions {
|
||||
max_id: Some(max_id.clone()),
|
||||
|
@ -152,12 +212,53 @@ async fn fetch_page(
|
|||
}))
|
||||
.await?
|
||||
} else {
|
||||
debug!("Fetching first page");
|
||||
trace!("Fetching first page");
|
||||
client.get_local_timeline(None).await?
|
||||
};
|
||||
Ok(json)
|
||||
}
|
||||
|
||||
async fn fetch_dm_page(
|
||||
client: &Box<dyn Megalodon + Send + Sync>,
|
||||
account: &Account,
|
||||
last_id_on_page: &Option<String>,
|
||||
) -> Result<Vec<Status>> {
|
||||
trace!("Fetching page of DMs");
|
||||
let Response { json, .. } = if let Some(max_id) = last_id_on_page.as_ref() {
|
||||
trace!("Fetching next page");
|
||||
client
|
||||
.get_account_statuses(
|
||||
account.id.clone(),
|
||||
Some(&GetAccountStatusesInputOptions {
|
||||
max_id: Some(max_id.clone()),
|
||||
..GetAccountStatusesInputOptions::default()
|
||||
}),
|
||||
)
|
||||
.await?
|
||||
} else {
|
||||
trace!("Fetching first page");
|
||||
client
|
||||
.get_account_statuses(account.id.clone(), None)
|
||||
.await?
|
||||
};
|
||||
let json: Vec<Status> = json
|
||||
.into_iter()
|
||||
.filter(|s| {
|
||||
if let StatusVisibility::Direct = s.visibility {
|
||||
(s.in_reply_to_account_id.is_none()
|
||||
|| s.in_reply_to_account_id
|
||||
.as_ref()
|
||||
.map(|r| r == &account.id)
|
||||
.unwrap_or_default())
|
||||
&& s.mentions.is_empty()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
Ok(json)
|
||||
}
|
||||
|
||||
fn page_newer_than(page: &Page, range: &Range) -> Option<String> {
|
||||
page.oldest
|
||||
.filter(|oldest| *oldest > &range.end)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue