mirror of
https://github.com/scsibug/nostr-rs-relay.git
synced 2025-09-01 03:40:46 -04:00
Compare commits
5 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
19637d612e | ||
|
afc9a0096a | ||
|
3d56262386 | ||
|
6673fcfd11 | ||
|
b5da3fa2b0 |
2
Cargo.lock
generated
2
Cargo.lock
generated
@@ -649,7 +649,7 @@ checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21"
|
||||
|
||||
[[package]]
|
||||
name = "nostr-rs-relay"
|
||||
version = "0.2.3"
|
||||
version = "0.3.1"
|
||||
dependencies = [
|
||||
"bitcoin_hashes 0.9.7",
|
||||
"config",
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "nostr-rs-relay"
|
||||
version = "0.2.3"
|
||||
version = "0.3.1"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
|
@@ -39,6 +39,9 @@ Text Note [81cf...2652] from 296a...9b92 5 seconds ago
|
||||
hello world
|
||||
```
|
||||
|
||||
A pre-built container is also available on DockerHub:
|
||||
https://hub.docker.com/repository/docker/scsibug/nostr-rs-relay
|
||||
|
||||
## Configuration
|
||||
|
||||
The sample `[config.toml](config.toml)` file demonstrates the
|
||||
|
76
src/db.rs
76
src/db.rs
@@ -13,6 +13,7 @@ use rusqlite::OpenFlags;
|
||||
use crate::config::SETTINGS;
|
||||
use std::path::Path;
|
||||
use std::thread;
|
||||
use std::time::Instant;
|
||||
use tokio::task;
|
||||
|
||||
/// Database file
|
||||
@@ -157,12 +158,17 @@ pub async fn db_writer(
|
||||
}
|
||||
let mut event_write = false;
|
||||
let event = next_event.unwrap();
|
||||
let start = Instant::now();
|
||||
match write_event(&mut conn, &event) {
|
||||
Ok(updated) => {
|
||||
if updated == 0 {
|
||||
debug!("ignoring duplicate event");
|
||||
} else {
|
||||
info!("persisted event: {}", event.get_event_id_prefix());
|
||||
info!(
|
||||
"persisted event: {} in {:?}",
|
||||
event.get_event_id_prefix(),
|
||||
start.elapsed()
|
||||
);
|
||||
event_write = true;
|
||||
// send this out to all clients
|
||||
bcast_tx.send(event.clone()).ok();
|
||||
@@ -302,35 +308,52 @@ fn query_from_sub(sub: &Subscription) -> String {
|
||||
filter_components.push(authors_clause);
|
||||
}
|
||||
// Query for Kind
|
||||
if f.kind.is_some() {
|
||||
if let Some(ks) = &f.kinds {
|
||||
// kind is number, no escaping needed
|
||||
let kind_clause = format!("kind = {}", f.kind.unwrap());
|
||||
let str_kinds: Vec<String> = ks.iter().map(|x| x.to_string()).collect();
|
||||
let kind_clause = format!("kind IN ({})", str_kinds.join(", "));
|
||||
filter_components.push(kind_clause);
|
||||
}
|
||||
// Query for event
|
||||
if f.id.is_some() {
|
||||
let id_str = f.id.as_ref().unwrap();
|
||||
if is_hex(id_str) {
|
||||
let id_clause = format!("event_hash = x'{}'", id_str);
|
||||
filter_components.push(id_clause);
|
||||
}
|
||||
if f.ids.is_some() {
|
||||
let ids_escaped: Vec<String> = f
|
||||
.ids
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.filter(|&x| is_hex(x))
|
||||
.map(|x| format!("x'{}'", x))
|
||||
.collect();
|
||||
let id_clause = format!("event_hash IN ({})", ids_escaped.join(", "));
|
||||
filter_components.push(id_clause);
|
||||
}
|
||||
// Query for referenced event
|
||||
if f.event.is_some() {
|
||||
let ev_str = f.event.as_ref().unwrap();
|
||||
if is_hex(ev_str) {
|
||||
let ev_clause = format!("referenced_event = x'{}'", ev_str);
|
||||
filter_components.push(ev_clause);
|
||||
}
|
||||
if f.events.is_some() {
|
||||
let events_escaped: Vec<String> = f
|
||||
.events
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.filter(|&x| is_hex(x))
|
||||
.map(|x| format!("x'{}'", x))
|
||||
.collect();
|
||||
let events_clause = format!("referenced_event IN ({})", events_escaped.join(", "));
|
||||
filter_components.push(events_clause);
|
||||
}
|
||||
// Query for referenced pet name pubkey
|
||||
if f.pubkey.is_some() {
|
||||
let pet_str = f.pubkey.as_ref().unwrap();
|
||||
if is_hex(pet_str) {
|
||||
let pet_clause = format!("referenced_pubkey = x'{}'", pet_str);
|
||||
filter_components.push(pet_clause);
|
||||
}
|
||||
// Query for referenced pubkey
|
||||
if f.pubkeys.is_some() {
|
||||
let pubkeys_escaped: Vec<String> = f
|
||||
.pubkeys
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.filter(|&x| is_hex(x))
|
||||
.map(|x| format!("x'{}'", x))
|
||||
.collect();
|
||||
let pubkeys_clause = format!("referenced_pubkey IN ({})", pubkeys_escaped.join(", "));
|
||||
filter_components.push(pubkeys_clause);
|
||||
}
|
||||
|
||||
// Query for timestamp
|
||||
if f.since.is_some() {
|
||||
let created_clause = format!("created_at > {}", f.since.unwrap());
|
||||
@@ -385,6 +408,8 @@ pub async fn db_query(
|
||||
Connection::open_with_flags(&full_path, OpenFlags::SQLITE_OPEN_READ_ONLY).unwrap();
|
||||
debug!("opened database for reading");
|
||||
debug!("going to query for: {:?}", sub);
|
||||
let mut row_count: usize = 0;
|
||||
let start = Instant::now();
|
||||
// generate SQL query
|
||||
let q = query_from_sub(&sub);
|
||||
// execute the query
|
||||
@@ -396,6 +421,7 @@ pub async fn db_query(
|
||||
debug!("query aborted");
|
||||
return;
|
||||
}
|
||||
row_count += 1;
|
||||
// TODO: check before unwrapping
|
||||
let event_json = row.get(0).unwrap();
|
||||
query_tx
|
||||
@@ -405,6 +431,10 @@ pub async fn db_query(
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
debug!("query completed");
|
||||
debug!(
|
||||
"query completed ({} rows) in {:?}",
|
||||
row_count,
|
||||
start.elapsed()
|
||||
);
|
||||
});
|
||||
}
|
||||
|
@@ -71,6 +71,7 @@ impl Stream for NostrStream {
|
||||
}
|
||||
Err(e) => {
|
||||
debug!("proto parse error: {:?}", e);
|
||||
debug!("parse error on message: {}", msg.trim());
|
||||
Err(Error::ProtoParseError)
|
||||
}
|
||||
}
|
||||
|
@@ -2,6 +2,7 @@
|
||||
use crate::error::Result;
|
||||
use crate::event::Event;
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use std::collections::HashSet;
|
||||
|
||||
/// Subscription identifier and set of request filters
|
||||
#[derive(Serialize, PartialEq, Debug, Clone)]
|
||||
@@ -17,16 +18,16 @@ pub struct Subscription {
|
||||
/// absent ([`None`]) if it should be ignored.
|
||||
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
||||
pub struct ReqFilter {
|
||||
/// Event hash
|
||||
pub id: Option<String>,
|
||||
/// Event kind
|
||||
pub kind: Option<u64>,
|
||||
/// Event hashes
|
||||
pub ids: Option<Vec<String>>,
|
||||
/// Event kinds
|
||||
pub kinds: Option<Vec<u64>>,
|
||||
/// Referenced event hash
|
||||
#[serde(rename = "#e")]
|
||||
pub event: Option<String>,
|
||||
pub events: Option<Vec<String>>,
|
||||
/// Referenced public key for a petname
|
||||
#[serde(rename = "#p")]
|
||||
pub pubkey: Option<String>,
|
||||
pub pubkeys: Option<Vec<String>>,
|
||||
/// Events published after this time
|
||||
pub since: Option<u64>,
|
||||
/// Events published before this time
|
||||
@@ -105,8 +106,13 @@ impl Subscription {
|
||||
|
||||
impl ReqFilter {
|
||||
/// Check for a match within the authors list.
|
||||
// TODO: Ambiguity; what if the array is empty? Should we
|
||||
// consider that the same as null?
|
||||
fn ids_match(&self, event: &Event) -> bool {
|
||||
self.ids
|
||||
.as_ref()
|
||||
.map(|vs| vs.contains(&event.id.to_owned()))
|
||||
.unwrap_or(true)
|
||||
}
|
||||
|
||||
fn authors_match(&self, event: &Event) -> bool {
|
||||
self.authors
|
||||
.as_ref()
|
||||
@@ -115,29 +121,47 @@ impl ReqFilter {
|
||||
}
|
||||
/// Check if this filter either matches, or does not care about the event tags.
|
||||
fn event_match(&self, event: &Event) -> bool {
|
||||
self.event
|
||||
.as_ref()
|
||||
.map(|t| event.event_tag_match(t))
|
||||
.unwrap_or(true)
|
||||
// This needs to be analyzed for performance; building these
|
||||
// hash sets for each active subscription isn't great.
|
||||
if let Some(es) = &self.events {
|
||||
let event_refs =
|
||||
HashSet::<_>::from_iter(event.get_event_tags().iter().map(|x| x.to_owned()));
|
||||
let filter_refs = HashSet::<_>::from_iter(es.iter().map(|x| &x[..]));
|
||||
let cardinality = event_refs.intersection(&filter_refs).count();
|
||||
cardinality > 0
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if this filter either matches, or does not care about
|
||||
/// the pubkey/petname tags.
|
||||
fn pubkey_match(&self, event: &Event) -> bool {
|
||||
self.pubkey
|
||||
.as_ref()
|
||||
.map(|t| event.pubkey_tag_match(t))
|
||||
.unwrap_or(true)
|
||||
// This needs to be analyzed for performance; building these
|
||||
// hash sets for each active subscription isn't great.
|
||||
if let Some(ps) = &self.pubkeys {
|
||||
let pubkey_refs =
|
||||
HashSet::<_>::from_iter(event.get_pubkey_tags().iter().map(|x| x.to_owned()));
|
||||
let filter_refs = HashSet::<_>::from_iter(ps.iter().map(|x| &x[..]));
|
||||
let cardinality = pubkey_refs.intersection(&filter_refs).count();
|
||||
cardinality > 0
|
||||
} else {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
/// Check if this filter either matches, or does not care about the kind.
|
||||
fn kind_match(&self, kind: u64) -> bool {
|
||||
self.kind.map(|v| v == kind).unwrap_or(true)
|
||||
self.kinds
|
||||
.as_ref()
|
||||
.map(|ks| ks.contains(&kind))
|
||||
.unwrap_or(true)
|
||||
}
|
||||
|
||||
/// Determine if all populated fields in this filter match the provided event.
|
||||
pub fn interested_in_event(&self, event: &Event) -> bool {
|
||||
self.id.as_ref().map(|v| v == &event.id).unwrap_or(true)
|
||||
// self.id.as_ref().map(|v| v == &event.id).unwrap_or(true)
|
||||
self.ids_match(event)
|
||||
&& self.since.map(|t| event.created_at > t).unwrap_or(true)
|
||||
&& self.kind_match(event.kind)
|
||||
&& self.authors_match(event)
|
||||
|
Reference in New Issue
Block a user