From e5da37b4f14e410a43398c4ee2c503e8d38a3fc4 Mon Sep 17 00:00:00 2001 From: Kieran Date: Tue, 26 Dec 2023 22:23:36 +0000 Subject: [PATCH] feat: nip-50 support (postgres) --- src/info.rs | 4 +++ src/repo/postgres.rs | 58 ++++++++++++++++++++-------------- src/repo/postgres_migration.rs | 28 +++++++++++++++- src/subscription.rs | 14 ++++++-- 4 files changed, 76 insertions(+), 28 deletions(-) diff --git a/src/info.rs b/src/info.rs index b8ac975..477a13d 100644 --- a/src/info.rs +++ b/src/info.rs @@ -68,6 +68,10 @@ impl From for RelayInfo { fn from(c: Settings) -> Self { let mut supported_nips = vec![1, 2, 9, 11, 12, 15, 16, 20, 22, 33, 40]; + if c.database.engine == "postgres" { + supported_nips.push(50); + } + if c.authorization.nip42_auth { supported_nips.push(42); supported_nips.sort(); diff --git a/src/repo/postgres.rs b/src/repo/postgres.rs index dd120ab..5cb47b8 100644 --- a/src/repo/postgres.rs +++ b/src/repo/postgres.rs @@ -59,7 +59,7 @@ async fn cleanup_expired(conn: PostgresPool, frequency: Duration) -> Result<()> } } } - }; + } } }); Ok(()) @@ -149,19 +149,19 @@ impl NostrRepo for PostgresRepo { VALUES($1, $2, $3, $4, $5, $6, $7) ON CONFLICT (id) DO NOTHING"#, ) - .bind(&id_blob) - .bind(&pubkey_blob) - .bind(Utc.timestamp_opt(e.created_at as i64, 0).unwrap()) - .bind( - e.expiration() - .and_then(|x| Utc.timestamp_opt(x as i64, 0).latest()), - ) - .bind(e.kind as i64) - .bind(event_str.into_bytes()) - .bind(delegator_blob) - .execute(&mut tx) - .await? - .rows_affected(); + .bind(&id_blob) + .bind(&pubkey_blob) + .bind(Utc.timestamp_opt(e.created_at as i64, 0).unwrap()) + .bind( + e.expiration() + .and_then(|x| Utc.timestamp_opt(x as i64, 0).latest()), + ) + .bind(e.kind as i64) + .bind(event_str) + .bind(delegator_blob) + .execute(&mut tx) + .await? + .rows_affected(); if ins_count == 0 { // if the event was a duplicate, no need to insert event or @@ -281,10 +281,10 @@ ON CONFLICT (id) DO NOTHING"#, LEFT JOIN tag t ON e.id = t.event_id \ WHERE e.pub_key = $1 AND t.\"name\" = 'e' AND e.kind = 5 AND t.value = $2 LIMIT 1", ) - .bind(&pubkey_blob) - .bind(&id_blob) - .fetch_optional(&mut tx) - .await?; + .bind(&pubkey_blob) + .bind(&id_blob) + .fetch_optional(&mut tx) + .await?; // check if a the query returned a result, meaning we should // hid the current event @@ -393,7 +393,7 @@ ON CONFLICT (id) DO NOTHING"#, } row_count += 1; - let event_json: Vec = row.unwrap().get(0); + let event_json: String = row.unwrap().get(0); loop { if query_tx.capacity() != 0 { // we have capacity to add another item @@ -421,7 +421,7 @@ ON CONFLICT (id) DO NOTHING"#, query_tx .send(QueryResult { sub_id: sub.get_id(), - event: String::from_utf8(event_json).unwrap(), + event: event_json, }) .await .ok(); @@ -575,10 +575,10 @@ ON CONFLICT (id) DO NOTHING"#, sqlx::query( "UPDATE account SET is_admitted = TRUE, balance = balance - $1 WHERE pubkey = $2", ) - .bind(admission_cost as i64) - .bind(pub_key) - .execute(&self.conn_write) - .await?; + .bind(admission_cost as i64) + .bind(pub_key) + .execute(&self.conn_write) + .await?; Ok(()) } @@ -863,6 +863,16 @@ fn query_from_filter(f: &ReqFilter) -> Option> { .push_bind(Utc.timestamp_opt(f.until.unwrap() as i64, 0).unwrap()); } + if let Some(search) = &f.search { + if push_and { + query.push(" AND "); + } + push_and = true; + query.push("e.ts_content @@ websearch_to_tsquery(") + .push_bind(search.clone()) + .push(")"); + } + // never display hidden events if push_and { query.push(" AND e.hidden != 1::bit(1)"); diff --git a/src/repo/postgres_migration.rs b/src/repo/postgres_migration.rs index 27eb602..2b1fc19 100644 --- a/src/repo/postgres_migration.rs +++ b/src/repo/postgres_migration.rs @@ -37,6 +37,7 @@ pub async fn run_migrations(db: &PostgresPool) -> crate::error::Result { run_migration(m003::migration(), db).await; run_migration(m004::migration(), db).await; run_migration(m005::migration(), db).await; + run_migration(m006::migration(), db).await; Ok(current_version(db).await as usize) } @@ -286,7 +287,7 @@ mod m005 { pub fn migration() -> impl Migration { SimpleSqlMigration { - serial_number: VERSION, + serial_number: crate::repo::postgres_migration::m005::VERSION, sql: vec![ r#" -- Create account table @@ -318,3 +319,28 @@ CREATE TABLE "invoice" ( } } } + +mod m006 { + use crate::repo::postgres_migration::{Migration, SimpleSqlMigration}; + + pub const VERSION: i64 = 6; + + pub fn migration() -> impl Migration { + SimpleSqlMigration { + serial_number: VERSION, + sql: vec![ + r#" +--- Use text column for content +ALTER TABLE "event" ALTER COLUMN "content" TYPE text USING convert_from("content", 'utf-8); + +--- Create search col for event content +ALTER TABLE event +ADD COLUMN ts_content tsvector +GENERATED ALWAYS AS (to_tsvector('english', "content"::json->>'content')) stored; + +CREATE INDEX ts_content_idx ON event USING GIN (ts_content); +"#, + ], + } + } +} \ No newline at end of file diff --git a/src/subscription.rs b/src/subscription.rs index 4ceca64..67a9f19 100644 --- a/src/subscription.rs +++ b/src/subscription.rs @@ -37,10 +37,12 @@ pub struct ReqFilter { /// Set of tags pub tags: Option>>, /// Force no matches due to malformed data - // we can't represent it in the req filter, so we don't want to - // erroneously match. This basically indicates the req tried to - // do something invalid. + /// we can't represent it in the req filter, so we don't want to + /// erroneously match. This basically indicates the req tried to + /// do something invalid. pub force_no_match: bool, + /// NIP-50 search query + pub search: Option, } impl Serialize for ReqFilter { @@ -67,6 +69,9 @@ impl Serialize for ReqFilter { if let Some(authors) = &self.authors { map.serialize_entry("authors", &authors)?; } + if let Some(search) = &self.search { + map.serialize_entry("search", &search)?; + } // serialize tags if let Some(tags) = &self.tags { for (k, v) in tags { @@ -98,6 +103,7 @@ impl<'de> Deserialize<'de> for ReqFilter { authors: None, limit: None, tags: None, + search: None, force_no_match: false, }; let empty_string = "".into(); @@ -124,6 +130,8 @@ impl<'de> Deserialize<'de> for ReqFilter { rf.until = Deserialize::deserialize(val).ok(); } else if key == "limit" { rf.limit = Deserialize::deserialize(val).ok(); + } else if key == "search" { + rf.search = Deserialize::deserialize(val).ok(); } else if key == "authors" { let raw_authors: Option> = Deserialize::deserialize(val).ok(); if let Some(a) = raw_authors.as_ref() {