summaryrefslogtreecommitdiffstats
path: root/server/src/apub
diff options
context:
space:
mode:
Diffstat (limited to 'server/src/apub')
-rw-r--r--server/src/apub/comment.rs19
-rw-r--r--server/src/apub/fetcher.rs27
2 files changed, 45 insertions, 1 deletions
diff --git a/server/src/apub/comment.rs b/server/src/apub/comment.rs
index 17da45a6..4c2faa21 100644
--- a/server/src/apub/comment.rs
+++ b/server/src/apub/comment.rs
@@ -1,5 +1,24 @@
use super::*;
+#[derive(Deserialize)]
+pub struct CommentQuery {
+ comment_id: String,
+}
+
+/// Return the post json over HTTP.
+pub async fn get_apub_comment(
+ info: Path<CommentQuery>,
+ db: DbPoolParam,
+) -> Result<HttpResponse<Body>, Error> {
+ let id = info.comment_id.parse::<i32>()?;
+ let comment = Comment::read(&&db.get()?, id)?;
+ if !comment.deleted {
+ Ok(create_apub_response(&comment.to_apub(&db.get().unwrap())?))
+ } else {
+ Ok(create_apub_tombstone_response(&comment.to_tombstone()?))
+ }
+}
+
impl ToApub for Comment {
type Response = Note;
diff --git a/server/src/apub/fetcher.rs b/server/src/apub/fetcher.rs
index 115ef6ff..994e75f2 100644
--- a/server/src/apub/fetcher.rs
+++ b/server/src/apub/fetcher.rs
@@ -40,6 +40,7 @@ pub enum SearchAcceptedObjects {
Person(Box<PersonExt>),
Group(Box<GroupExt>),
Page(Box<PageExt>),
+ Comment(Box<Note>),
}
/// Attempt to parse the query as URL, and fetch an ActivityPub object from it.
@@ -47,7 +48,8 @@ pub enum SearchAcceptedObjects {
/// Some working examples for use with the docker/federation/ setup:
/// http://lemmy_alpha:8540/c/main, or !main@lemmy_alpha:8540
/// http://lemmy_alpha:8540/u/lemmy_alpha, or @lemmy_alpha@lemmy_alpha:8540
-/// http://lemmy_alpha:8540/p/3
+/// http://lemmy_alpha:8540/post/3
+/// http://lemmy_alpha:8540/comment/2
pub fn search_by_apub_id(query: &str, conn: &PgConnection) -> Result<SearchResponse, Error> {
// Parse the shorthand query url
let query_url = if query.contains('@') {
@@ -99,6 +101,20 @@ pub fn search_by_apub_id(query: &str, conn: &PgConnection) -> Result<SearchRespo
let p = upsert_post(&PostForm::from_apub(&p, conn)?, conn)?;
response.posts = vec![PostView::read(conn, p.id, None)?];
}
+ SearchAcceptedObjects::Comment(c) => {
+ let post_url = c
+ .object_props
+ .get_many_in_reply_to_xsd_any_uris()
+ .unwrap()
+ .next()
+ .unwrap()
+ .to_string();
+ // TODO: also fetch parent comments if any
+ let post = fetch_remote_object(&Url::parse(&post_url)?)?;
+ upsert_post(&PostForm::from_apub(&post, conn)?, conn)?;
+ let c = upsert_comment(&CommentForm::from_apub(&c, conn)?, conn)?;
+ response.comments = vec![CommentView::read(conn, c.id, None)?];
+ }
}
Ok(response)
}
@@ -198,6 +214,15 @@ fn upsert_post(post_form: &PostForm, conn: &PgConnection) -> Result<Post, Error>
}
}
+fn upsert_comment(comment_form: &CommentForm, conn: &PgConnection) -> Result<Comment, Error> {
+ let existing = Comment::read_from_apub_id(conn, &comment_form.ap_id);
+ match existing {
+ Err(NotFound {}) => Ok(Comment::create(conn, &comment_form)?),
+ Ok(p) => Ok(Comment::update(conn, p.id, &comment_form)?),
+ Err(e) => Err(Error::from(e)),
+ }
+}
+
// TODO It should not be fetching data from a community outbox.
// All posts, comments, comment likes, etc should be posts to our community_inbox
// The only data we should be periodically fetching (if it hasn't been fetched in the last day