feat: nip-50 support (postgres)

This commit is contained in:
Kieran 2023-12-26 22:23:36 +00:00
parent c5fb16cd98
commit e5da37b4f1
No known key found for this signature in database
GPG Key ID: DE71CEB3925BE941
4 changed files with 76 additions and 28 deletions

View File

@ -68,6 +68,10 @@ impl From<Settings> for RelayInfo {
fn from(c: Settings) -> Self { fn from(c: Settings) -> Self {
let mut supported_nips = vec![1, 2, 9, 11, 12, 15, 16, 20, 22, 33, 40]; let mut supported_nips = vec![1, 2, 9, 11, 12, 15, 16, 20, 22, 33, 40];
if c.database.engine == "postgres" {
supported_nips.push(50);
}
if c.authorization.nip42_auth { if c.authorization.nip42_auth {
supported_nips.push(42); supported_nips.push(42);
supported_nips.sort(); supported_nips.sort();

View File

@ -59,7 +59,7 @@ async fn cleanup_expired(conn: PostgresPool, frequency: Duration) -> Result<()>
} }
} }
} }
}; }
} }
}); });
Ok(()) Ok(())
@ -149,19 +149,19 @@ impl NostrRepo for PostgresRepo {
VALUES($1, $2, $3, $4, $5, $6, $7) VALUES($1, $2, $3, $4, $5, $6, $7)
ON CONFLICT (id) DO NOTHING"#, ON CONFLICT (id) DO NOTHING"#,
) )
.bind(&id_blob) .bind(&id_blob)
.bind(&pubkey_blob) .bind(&pubkey_blob)
.bind(Utc.timestamp_opt(e.created_at as i64, 0).unwrap()) .bind(Utc.timestamp_opt(e.created_at as i64, 0).unwrap())
.bind( .bind(
e.expiration() e.expiration()
.and_then(|x| Utc.timestamp_opt(x as i64, 0).latest()), .and_then(|x| Utc.timestamp_opt(x as i64, 0).latest()),
) )
.bind(e.kind as i64) .bind(e.kind as i64)
.bind(event_str.into_bytes()) .bind(event_str)
.bind(delegator_blob) .bind(delegator_blob)
.execute(&mut tx) .execute(&mut tx)
.await? .await?
.rows_affected(); .rows_affected();
if ins_count == 0 { if ins_count == 0 {
// if the event was a duplicate, no need to insert event or // if the event was a duplicate, no need to insert event or
@ -281,10 +281,10 @@ ON CONFLICT (id) DO NOTHING"#,
LEFT JOIN tag t ON e.id = t.event_id \ LEFT JOIN tag t ON e.id = t.event_id \
WHERE e.pub_key = $1 AND t.\"name\" = 'e' AND e.kind = 5 AND t.value = $2 LIMIT 1", WHERE e.pub_key = $1 AND t.\"name\" = 'e' AND e.kind = 5 AND t.value = $2 LIMIT 1",
) )
.bind(&pubkey_blob) .bind(&pubkey_blob)
.bind(&id_blob) .bind(&id_blob)
.fetch_optional(&mut tx) .fetch_optional(&mut tx)
.await?; .await?;
// check if a the query returned a result, meaning we should // check if a the query returned a result, meaning we should
// hid the current event // hid the current event
@ -393,7 +393,7 @@ ON CONFLICT (id) DO NOTHING"#,
} }
row_count += 1; row_count += 1;
let event_json: Vec<u8> = row.unwrap().get(0); let event_json: String = row.unwrap().get(0);
loop { loop {
if query_tx.capacity() != 0 { if query_tx.capacity() != 0 {
// we have capacity to add another item // we have capacity to add another item
@ -421,7 +421,7 @@ ON CONFLICT (id) DO NOTHING"#,
query_tx query_tx
.send(QueryResult { .send(QueryResult {
sub_id: sub.get_id(), sub_id: sub.get_id(),
event: String::from_utf8(event_json).unwrap(), event: event_json,
}) })
.await .await
.ok(); .ok();
@ -575,10 +575,10 @@ ON CONFLICT (id) DO NOTHING"#,
sqlx::query( sqlx::query(
"UPDATE account SET is_admitted = TRUE, balance = balance - $1 WHERE pubkey = $2", "UPDATE account SET is_admitted = TRUE, balance = balance - $1 WHERE pubkey = $2",
) )
.bind(admission_cost as i64) .bind(admission_cost as i64)
.bind(pub_key) .bind(pub_key)
.execute(&self.conn_write) .execute(&self.conn_write)
.await?; .await?;
Ok(()) Ok(())
} }
@ -863,6 +863,16 @@ fn query_from_filter(f: &ReqFilter) -> Option<QueryBuilder<Postgres>> {
.push_bind(Utc.timestamp_opt(f.until.unwrap() as i64, 0).unwrap()); .push_bind(Utc.timestamp_opt(f.until.unwrap() as i64, 0).unwrap());
} }
if let Some(search) = &f.search {
if push_and {
query.push(" AND ");
}
push_and = true;
query.push("e.ts_content @@ websearch_to_tsquery(")
.push_bind(search.clone())
.push(")");
}
// never display hidden events // never display hidden events
if push_and { if push_and {
query.push(" AND e.hidden != 1::bit(1)"); query.push(" AND e.hidden != 1::bit(1)");

View File

@ -37,6 +37,7 @@ pub async fn run_migrations(db: &PostgresPool) -> crate::error::Result<usize> {
run_migration(m003::migration(), db).await; run_migration(m003::migration(), db).await;
run_migration(m004::migration(), db).await; run_migration(m004::migration(), db).await;
run_migration(m005::migration(), db).await; run_migration(m005::migration(), db).await;
run_migration(m006::migration(), db).await;
Ok(current_version(db).await as usize) Ok(current_version(db).await as usize)
} }
@ -286,7 +287,7 @@ mod m005 {
pub fn migration() -> impl Migration { pub fn migration() -> impl Migration {
SimpleSqlMigration { SimpleSqlMigration {
serial_number: VERSION, serial_number: crate::repo::postgres_migration::m005::VERSION,
sql: vec![ sql: vec![
r#" r#"
-- Create account table -- Create account table
@ -318,3 +319,28 @@ CREATE TABLE "invoice" (
} }
} }
} }
mod m006 {
use crate::repo::postgres_migration::{Migration, SimpleSqlMigration};
pub const VERSION: i64 = 6;
pub fn migration() -> impl Migration {
SimpleSqlMigration {
serial_number: VERSION,
sql: vec![
r#"
--- Use text column for content
ALTER TABLE "event" ALTER COLUMN "content" TYPE text USING convert_from("content", 'utf-8);
--- Create search col for event content
ALTER TABLE event
ADD COLUMN ts_content tsvector
GENERATED ALWAYS AS (to_tsvector('english', "content"::json->>'content')) stored;
CREATE INDEX ts_content_idx ON event USING GIN (ts_content);
"#,
],
}
}
}

View File

@ -37,10 +37,12 @@ pub struct ReqFilter {
/// Set of tags /// Set of tags
pub tags: Option<HashMap<char, HashSet<String>>>, pub tags: Option<HashMap<char, HashSet<String>>>,
/// Force no matches due to malformed data /// Force no matches due to malformed data
// we can't represent it in the req filter, so we don't want to /// we can't represent it in the req filter, so we don't want to
// erroneously match. This basically indicates the req tried to /// erroneously match. This basically indicates the req tried to
// do something invalid. /// do something invalid.
pub force_no_match: bool, pub force_no_match: bool,
/// NIP-50 search query
pub search: Option<String>,
} }
impl Serialize for ReqFilter { impl Serialize for ReqFilter {
@ -67,6 +69,9 @@ impl Serialize for ReqFilter {
if let Some(authors) = &self.authors { if let Some(authors) = &self.authors {
map.serialize_entry("authors", &authors)?; map.serialize_entry("authors", &authors)?;
} }
if let Some(search) = &self.search {
map.serialize_entry("search", &search)?;
}
// serialize tags // serialize tags
if let Some(tags) = &self.tags { if let Some(tags) = &self.tags {
for (k, v) in tags { for (k, v) in tags {
@ -98,6 +103,7 @@ impl<'de> Deserialize<'de> for ReqFilter {
authors: None, authors: None,
limit: None, limit: None,
tags: None, tags: None,
search: None,
force_no_match: false, force_no_match: false,
}; };
let empty_string = "".into(); let empty_string = "".into();
@ -124,6 +130,8 @@ impl<'de> Deserialize<'de> for ReqFilter {
rf.until = Deserialize::deserialize(val).ok(); rf.until = Deserialize::deserialize(val).ok();
} else if key == "limit" { } else if key == "limit" {
rf.limit = Deserialize::deserialize(val).ok(); rf.limit = Deserialize::deserialize(val).ok();
} else if key == "search" {
rf.search = Deserialize::deserialize(val).ok();
} else if key == "authors" { } else if key == "authors" {
let raw_authors: Option<Vec<String>> = Deserialize::deserialize(val).ok(); let raw_authors: Option<Vec<String>> = Deserialize::deserialize(val).ok();
if let Some(a) = raw_authors.as_ref() { if let Some(a) = raw_authors.as_ref() {