mirror of
https://github.com/scsibug/nostr-rs-relay.git
synced 2024-11-09 21:29:06 -05:00
docs: add rustdoc comments
This commit is contained in:
parent
04850506a8
commit
ca0f01c94b
12
src/close.rs
12
src/close.rs
|
@ -1,14 +1,20 @@
|
||||||
|
//! Subscription close request parsing
|
||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
/// Close command in network format
|
||||||
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
||||||
pub struct CloseCmd {
|
pub struct CloseCmd {
|
||||||
|
/// Protocol command, expected to always be "CLOSE".
|
||||||
cmd: String,
|
cmd: String,
|
||||||
|
/// The subscription identifier being closed.
|
||||||
id: String,
|
id: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Close command parsed
|
||||||
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
||||||
pub struct Close {
|
pub struct Close {
|
||||||
|
/// The subscription identifier being closed.
|
||||||
pub id: String,
|
pub id: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -22,9 +28,3 @@ impl From<CloseCmd> for Result<Close> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Close {
|
|
||||||
pub fn get_id(&self) -> String {
|
|
||||||
self.id.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
27
src/conn.rs
27
src/conn.rs
|
@ -1,3 +1,4 @@
|
||||||
|
//! Client connection state
|
||||||
use crate::close::Close;
|
use crate::close::Close;
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::event::Event;
|
use crate::event::Event;
|
||||||
|
@ -6,34 +7,38 @@ use log::*;
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
// subscription identifiers must be reasonably sized.
|
/// A subscription identifier has a maximum length
|
||||||
const MAX_SUBSCRIPTION_ID_LEN: usize = 256;
|
const MAX_SUBSCRIPTION_ID_LEN: usize = 256;
|
||||||
|
|
||||||
// state for a client connection
|
/// State for a client connection
|
||||||
pub struct ClientConn {
|
pub struct ClientConn {
|
||||||
|
/// Unique client identifier generated at connection time
|
||||||
client_id: Uuid,
|
client_id: Uuid,
|
||||||
// current set of subscriptions
|
/// The current set of active client subscriptions
|
||||||
subscriptions: HashMap<String, Subscription>,
|
subscriptions: HashMap<String, Subscription>,
|
||||||
// websocket
|
/// Per-connection maximum concurrent subscriptions
|
||||||
//stream: WebSocketStream<TcpStream>,
|
|
||||||
max_subs: usize,
|
max_subs: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ClientConn {
|
impl ClientConn {
|
||||||
|
/// Create a new, empty connection state.
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let client_id = Uuid::new_v4();
|
let client_id = Uuid::new_v4();
|
||||||
ClientConn {
|
ClientConn {
|
||||||
client_id: client_id,
|
client_id,
|
||||||
subscriptions: HashMap::new(),
|
subscriptions: HashMap::new(),
|
||||||
max_subs: 128,
|
max_subs: 128,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get a short prefix of the client's unique identifier, suitable
|
||||||
|
/// for logging.
|
||||||
pub fn get_client_prefix(&self) -> String {
|
pub fn get_client_prefix(&self) -> String {
|
||||||
self.client_id.to_string().chars().take(8).collect()
|
self.client_id.to_string().chars().take(8).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
// return the first subscription that matches the event.
|
/// Find the first subscription identifier that matches the event,
|
||||||
|
/// if any do.
|
||||||
pub fn get_matching_subscription(&self, e: &Event) -> Option<&str> {
|
pub fn get_matching_subscription(&self, e: &Event) -> Option<&str> {
|
||||||
for (id, sub) in self.subscriptions.iter() {
|
for (id, sub) in self.subscriptions.iter() {
|
||||||
if sub.interested_in_event(e) {
|
if sub.interested_in_event(e) {
|
||||||
|
@ -43,9 +48,12 @@ impl ClientConn {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Add a new subscription for this connection.
|
||||||
pub fn subscribe(&mut self, s: Subscription) -> Result<()> {
|
pub fn subscribe(&mut self, s: Subscription) -> Result<()> {
|
||||||
let k = s.get_id();
|
let k = s.get_id();
|
||||||
let sub_id_len = k.len();
|
let sub_id_len = k.len();
|
||||||
|
// prevent arbitrarily long subscription identifiers from
|
||||||
|
// being used.
|
||||||
if sub_id_len > MAX_SUBSCRIPTION_ID_LEN {
|
if sub_id_len > MAX_SUBSCRIPTION_ID_LEN {
|
||||||
info!("Dropping subscription with huge ({}) length", sub_id_len);
|
info!("Dropping subscription with huge ({}) length", sub_id_len);
|
||||||
return Ok(());
|
return Ok(());
|
||||||
|
@ -54,7 +62,7 @@ impl ClientConn {
|
||||||
if self.subscriptions.contains_key(&k) {
|
if self.subscriptions.contains_key(&k) {
|
||||||
self.subscriptions.remove(&k);
|
self.subscriptions.remove(&k);
|
||||||
self.subscriptions.insert(k, s);
|
self.subscriptions.insert(k, s);
|
||||||
info!("Replaced existing subscription");
|
debug!("Replaced existing subscription");
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,9 +80,10 @@ impl ClientConn {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Remove the subscription for this connection.
|
||||||
pub fn unsubscribe(&mut self, c: Close) {
|
pub fn unsubscribe(&mut self, c: Close) {
|
||||||
// TODO: return notice if subscription did not exist.
|
// TODO: return notice if subscription did not exist.
|
||||||
self.subscriptions.remove(&c.get_id());
|
self.subscriptions.remove(&c.id);
|
||||||
info!(
|
info!(
|
||||||
"Removed subscription, currently have {} active subs",
|
"Removed subscription, currently have {} active subs",
|
||||||
self.subscriptions.len()
|
self.subscriptions.len()
|
||||||
|
|
79
src/db.rs
79
src/db.rs
|
@ -1,3 +1,4 @@
|
||||||
|
//! Event persistence and querying
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use crate::event::Event;
|
use crate::event::Event;
|
||||||
use crate::subscription::Subscription;
|
use crate::subscription::Subscription;
|
||||||
|
@ -9,10 +10,12 @@ use rusqlite::OpenFlags;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use tokio::task;
|
use tokio::task;
|
||||||
|
|
||||||
|
/// Database file
|
||||||
const DB_FILE: &str = "nostr.db";
|
const DB_FILE: &str = "nostr.db";
|
||||||
|
|
||||||
// schema
|
/// Schema definition
|
||||||
const INIT_SQL: &str = r##"
|
const INIT_SQL: &str = r##"
|
||||||
|
-- Database settings
|
||||||
PRAGMA encoding = "UTF-8";
|
PRAGMA encoding = "UTF-8";
|
||||||
PRAGMA journal_mode=WAL;
|
PRAGMA journal_mode=WAL;
|
||||||
PRAGMA main.synchronous=NORMAL;
|
PRAGMA main.synchronous=NORMAL;
|
||||||
|
@ -20,6 +23,8 @@ PRAGMA foreign_keys = ON;
|
||||||
PRAGMA application_id = 1654008667;
|
PRAGMA application_id = 1654008667;
|
||||||
PRAGMA user_version = 1;
|
PRAGMA user_version = 1;
|
||||||
pragma mmap_size = 536870912; -- 512MB of mmap
|
pragma mmap_size = 536870912; -- 512MB of mmap
|
||||||
|
|
||||||
|
-- Event Table
|
||||||
CREATE TABLE IF NOT EXISTS event (
|
CREATE TABLE IF NOT EXISTS event (
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY,
|
||||||
event_hash BLOB NOT NULL, -- 4-byte hash
|
event_hash BLOB NOT NULL, -- 4-byte hash
|
||||||
|
@ -29,23 +34,33 @@ author BLOB NOT NULL, -- author pubkey
|
||||||
kind INTEGER NOT NULL, -- event kind
|
kind INTEGER NOT NULL, -- event kind
|
||||||
content TEXT NOT NULL -- serialized json of event object
|
content TEXT NOT NULL -- serialized json of event object
|
||||||
);
|
);
|
||||||
|
|
||||||
|
-- Event Indexes
|
||||||
CREATE UNIQUE INDEX IF NOT EXISTS event_hash_index ON event(event_hash);
|
CREATE UNIQUE INDEX IF NOT EXISTS event_hash_index ON event(event_hash);
|
||||||
CREATE INDEX IF NOT EXISTS created_at_index ON event(created_at);
|
CREATE INDEX IF NOT EXISTS created_at_index ON event(created_at);
|
||||||
CREATE INDEX IF NOT EXISTS author_index ON event(author);
|
CREATE INDEX IF NOT EXISTS author_index ON event(author);
|
||||||
CREATE INDEX IF NOT EXISTS kind_index ON event(kind);
|
CREATE INDEX IF NOT EXISTS kind_index ON event(kind);
|
||||||
|
|
||||||
|
-- Event References Table
|
||||||
CREATE TABLE IF NOT EXISTS event_ref (
|
CREATE TABLE IF NOT EXISTS event_ref (
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY,
|
||||||
event_id INTEGER NOT NULL, -- an event ID that contains an #e tag.
|
event_id INTEGER NOT NULL, -- an event ID that contains an #e tag.
|
||||||
referenced_event BLOB NOT NULL, -- the event that is referenced.
|
referenced_event BLOB NOT NULL, -- the event that is referenced.
|
||||||
FOREIGN KEY(event_id) REFERENCES event(id) ON UPDATE CASCADE ON DELETE CASCADE
|
FOREIGN KEY(event_id) REFERENCES event(id) ON UPDATE CASCADE ON DELETE CASCADE
|
||||||
);
|
);
|
||||||
|
|
||||||
|
-- Event References Index
|
||||||
CREATE INDEX IF NOT EXISTS event_ref_index ON event_ref(referenced_event);
|
CREATE INDEX IF NOT EXISTS event_ref_index ON event_ref(referenced_event);
|
||||||
|
|
||||||
|
-- Pubkey References Table
|
||||||
CREATE TABLE IF NOT EXISTS pubkey_ref (
|
CREATE TABLE IF NOT EXISTS pubkey_ref (
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY,
|
||||||
event_id INTEGER NOT NULL, -- an event ID that contains an #p tag.
|
event_id INTEGER NOT NULL, -- an event ID that contains an #p tag.
|
||||||
referenced_pubkey BLOB NOT NULL, -- the pubkey that is referenced.
|
referenced_pubkey BLOB NOT NULL, -- the pubkey that is referenced.
|
||||||
FOREIGN KEY(event_id) REFERENCES event(id) ON UPDATE RESTRICT ON DELETE CASCADE
|
FOREIGN KEY(event_id) REFERENCES event(id) ON UPDATE RESTRICT ON DELETE CASCADE
|
||||||
);
|
);
|
||||||
|
|
||||||
|
-- Pubkey References Index
|
||||||
CREATE INDEX IF NOT EXISTS pubkey_ref_index ON pubkey_ref(referenced_pubkey);
|
CREATE INDEX IF NOT EXISTS pubkey_ref_index ON pubkey_ref(referenced_pubkey);
|
||||||
"##;
|
"##;
|
||||||
|
|
||||||
|
@ -70,7 +85,6 @@ pub async fn db_writer(
|
||||||
let next_event = event_rx.blocking_recv();
|
let next_event = event_rx.blocking_recv();
|
||||||
// if the channel has closed, we will never get work
|
// if the channel has closed, we will never get work
|
||||||
if next_event.is_none() {
|
if next_event.is_none() {
|
||||||
info!("No more event senders for DB, shutting down.");
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
let event = next_event.unwrap();
|
let event = next_event.unwrap();
|
||||||
|
@ -84,7 +98,7 @@ pub async fn db_writer(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
info!("event insert failed: {}", err);
|
warn!("event insert failed: {}", err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -94,6 +108,7 @@ pub async fn db_writer(
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Persist an event to the database.
|
||||||
pub fn write_event(conn: &mut Connection, e: &Event) -> Result<usize> {
|
pub fn write_event(conn: &mut Connection, e: &Event) -> Result<usize> {
|
||||||
// start transaction
|
// start transaction
|
||||||
let tx = conn.transaction()?;
|
let tx = conn.transaction()?;
|
||||||
|
@ -101,15 +116,21 @@ pub fn write_event(conn: &mut Connection, e: &Event) -> Result<usize> {
|
||||||
let id_blob = hex::decode(&e.id).ok();
|
let id_blob = hex::decode(&e.id).ok();
|
||||||
let pubkey_blob = hex::decode(&e.pubkey).ok();
|
let pubkey_blob = hex::decode(&e.pubkey).ok();
|
||||||
let event_str = serde_json::to_string(&e).ok();
|
let event_str = serde_json::to_string(&e).ok();
|
||||||
// ignore if the event hash is a duplicate.x
|
// ignore if the event hash is a duplicate.
|
||||||
let ins_count = tx.execute(
|
let ins_count = tx.execute(
|
||||||
"INSERT OR IGNORE INTO event (event_hash, created_at, kind, author, content, first_seen) VALUES (?1, ?2, ?3, ?4, ?5, strftime('%s','now'));",
|
"INSERT OR IGNORE INTO event (event_hash, created_at, kind, author, content, first_seen) VALUES (?1, ?2, ?3, ?4, ?5, strftime('%s','now'));",
|
||||||
params![id_blob, e.created_at, e.kind, pubkey_blob, event_str]
|
params![id_blob, e.created_at, e.kind, pubkey_blob, event_str]
|
||||||
)?;
|
)?;
|
||||||
|
if ins_count == 0 {
|
||||||
|
// if the event was a duplicate, no need to insert event or
|
||||||
|
// pubkey references.
|
||||||
|
return Ok(ins_count);
|
||||||
|
}
|
||||||
|
// remember primary key of the event most recently inserted.
|
||||||
let ev_id = tx.last_insert_rowid();
|
let ev_id = tx.last_insert_rowid();
|
||||||
|
// add all event tags into the event_ref table
|
||||||
let etags = e.get_event_tags();
|
let etags = e.get_event_tags();
|
||||||
if etags.len() > 0 {
|
if etags.len() > 0 {
|
||||||
// this will need to
|
|
||||||
for etag in etags.iter() {
|
for etag in etags.iter() {
|
||||||
tx.execute(
|
tx.execute(
|
||||||
"INSERT OR IGNORE INTO event_ref (event_id, referenced_event) VALUES (?1, ?2)",
|
"INSERT OR IGNORE INTO event_ref (event_id, referenced_event) VALUES (?1, ?2)",
|
||||||
|
@ -117,6 +138,7 @@ pub fn write_event(conn: &mut Connection, e: &Event) -> Result<usize> {
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// add all event tags into the pubkey_ref table
|
||||||
let ptags = e.get_pubkey_tags();
|
let ptags = e.get_pubkey_tags();
|
||||||
if ptags.len() > 0 {
|
if ptags.len() > 0 {
|
||||||
for ptag in ptags.iter() {
|
for ptag in ptags.iter() {
|
||||||
|
@ -130,18 +152,21 @@ pub fn write_event(conn: &mut Connection, e: &Event) -> Result<usize> {
|
||||||
Ok(ins_count)
|
Ok(ins_count)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Queries return a subscription identifier and the serialized event.
|
/// Event resulting from a specific subscription request
|
||||||
#[derive(PartialEq, Debug, Clone)]
|
#[derive(PartialEq, Debug, Clone)]
|
||||||
pub struct QueryResult {
|
pub struct QueryResult {
|
||||||
|
/// Subscription identifier
|
||||||
pub sub_id: String,
|
pub sub_id: String,
|
||||||
|
/// Serialized event
|
||||||
pub event: String,
|
pub event: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: make this hex
|
/// Check if a string contains only hex characters.
|
||||||
fn is_alphanum(s: &str) -> bool {
|
fn is_hex(s: &str) -> bool {
|
||||||
s.chars().all(|x| char::is_ascii_hexdigit(&x))
|
s.chars().all(|x| char::is_ascii_hexdigit(&x))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a dynamic SQL query string from a subscription.
|
||||||
fn query_from_sub(sub: &Subscription) -> String {
|
fn query_from_sub(sub: &Subscription) -> String {
|
||||||
// build a dynamic SQL query. all user-input is either an integer
|
// build a dynamic SQL query. all user-input is either an integer
|
||||||
// (sqli-safe), or a string that is filtered to only contain
|
// (sqli-safe), or a string that is filtered to only contain
|
||||||
|
@ -150,7 +175,6 @@ fn query_from_sub(sub: &Subscription) -> String {
|
||||||
"SELECT DISTINCT(e.content) FROM event e LEFT JOIN event_ref er ON e.id=er.event_id LEFT JOIN pubkey_ref pr ON e.id=pr.event_id "
|
"SELECT DISTINCT(e.content) FROM event e LEFT JOIN event_ref er ON e.id=er.event_id LEFT JOIN pubkey_ref pr ON e.id=pr.event_id "
|
||||||
.to_owned();
|
.to_owned();
|
||||||
// for every filter in the subscription, generate a where clause
|
// for every filter in the subscription, generate a where clause
|
||||||
// all individual filter clause strings for this subscription
|
|
||||||
let mut filter_clauses: Vec<String> = Vec::new();
|
let mut filter_clauses: Vec<String> = Vec::new();
|
||||||
for f in sub.filters.iter() {
|
for f in sub.filters.iter() {
|
||||||
// individual filter components
|
// individual filter components
|
||||||
|
@ -160,7 +184,7 @@ fn query_from_sub(sub: &Subscription) -> String {
|
||||||
// I believe the author & authors fields are redundant.
|
// I believe the author & authors fields are redundant.
|
||||||
if f.author.is_some() {
|
if f.author.is_some() {
|
||||||
let author_str = f.author.as_ref().unwrap();
|
let author_str = f.author.as_ref().unwrap();
|
||||||
if is_alphanum(author_str) {
|
if is_hex(author_str) {
|
||||||
let author_clause = format!("author = x'{}'", author_str);
|
let author_clause = format!("author = x'{}'", author_str);
|
||||||
filter_components.push(author_clause);
|
filter_components.push(author_clause);
|
||||||
}
|
}
|
||||||
|
@ -172,7 +196,7 @@ fn query_from_sub(sub: &Subscription) -> String {
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|&x| is_alphanum(x))
|
.filter(|&x| is_hex(x))
|
||||||
.map(|x| format!("x'{}'", x))
|
.map(|x| format!("x'{}'", x))
|
||||||
.collect();
|
.collect();
|
||||||
let authors_clause = format!("author IN ({})", authors_escaped.join(", "));
|
let authors_clause = format!("author IN ({})", authors_escaped.join(", "));
|
||||||
|
@ -186,34 +210,30 @@ fn query_from_sub(sub: &Subscription) -> String {
|
||||||
}
|
}
|
||||||
// Query for event
|
// Query for event
|
||||||
if f.id.is_some() {
|
if f.id.is_some() {
|
||||||
// whitelist characters
|
|
||||||
let id_str = f.id.as_ref().unwrap();
|
let id_str = f.id.as_ref().unwrap();
|
||||||
if is_alphanum(id_str) {
|
if is_hex(id_str) {
|
||||||
let id_clause = format!("event_hash = x'{}'", id_str);
|
let id_clause = format!("event_hash = x'{}'", id_str);
|
||||||
filter_components.push(id_clause);
|
filter_components.push(id_clause);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Query for referenced event
|
// Query for referenced event
|
||||||
if f.event.is_some() {
|
if f.event.is_some() {
|
||||||
// whitelist characters
|
|
||||||
let ev_str = f.event.as_ref().unwrap();
|
let ev_str = f.event.as_ref().unwrap();
|
||||||
if is_alphanum(ev_str) {
|
if is_hex(ev_str) {
|
||||||
let ev_clause = format!("referenced_event = x'{}'", ev_str);
|
let ev_clause = format!("referenced_event = x'{}'", ev_str);
|
||||||
filter_components.push(ev_clause);
|
filter_components.push(ev_clause);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Query for referenced pet name pubkey
|
// Query for referenced pet name pubkey
|
||||||
if f.pubkey.is_some() {
|
if f.pubkey.is_some() {
|
||||||
// whitelist characters
|
|
||||||
let pet_str = f.pubkey.as_ref().unwrap();
|
let pet_str = f.pubkey.as_ref().unwrap();
|
||||||
if is_alphanum(pet_str) {
|
if is_hex(pet_str) {
|
||||||
let pet_clause = format!("referenced_pubkey = x'{}'", pet_str);
|
let pet_clause = format!("referenced_pubkey = x'{}'", pet_str);
|
||||||
filter_components.push(pet_clause);
|
filter_components.push(pet_clause);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Query for timestamp
|
// Query for timestamp
|
||||||
if f.since.is_some() {
|
if f.since.is_some() {
|
||||||
// timestamp is number, no escaping needed
|
|
||||||
let created_clause = format!("created_at > {}", f.since.unwrap());
|
let created_clause = format!("created_at > {}", f.since.unwrap());
|
||||||
filter_components.push(created_clause);
|
filter_components.push(created_clause);
|
||||||
}
|
}
|
||||||
|
@ -231,10 +251,16 @@ fn query_from_sub(sub: &Subscription) -> String {
|
||||||
query.push_str(" WHERE ");
|
query.push_str(" WHERE ");
|
||||||
query.push_str(&filter_clauses.join(" OR "));
|
query.push_str(&filter_clauses.join(" OR "));
|
||||||
}
|
}
|
||||||
info!("Query: {}", query);
|
debug!("Query: {}", query);
|
||||||
return query;
|
query
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Perform a database query using a subscription.
|
||||||
|
///
|
||||||
|
/// The [`Subscription`] is converted into a SQL query. Each result
|
||||||
|
/// is published on the `query_tx` channel as it is returned. If a
|
||||||
|
/// message becomes available on the `abandon_query_rx` channel, the
|
||||||
|
/// query is immediately aborted.
|
||||||
pub async fn db_query(
|
pub async fn db_query(
|
||||||
sub: Subscription,
|
sub: Subscription,
|
||||||
query_tx: tokio::sync::mpsc::Sender<QueryResult>,
|
query_tx: tokio::sync::mpsc::Sender<QueryResult>,
|
||||||
|
@ -246,22 +272,19 @@ pub async fn db_query(
|
||||||
.unwrap();
|
.unwrap();
|
||||||
info!("Opened database for reading");
|
info!("Opened database for reading");
|
||||||
info!("Going to query for: {:?}", sub);
|
info!("Going to query for: {:?}", sub);
|
||||||
// generate query
|
// generate SQL query
|
||||||
let q = query_from_sub(&sub);
|
let q = query_from_sub(&sub);
|
||||||
|
// execute the query
|
||||||
let mut stmt = conn.prepare(&q).unwrap();
|
let mut stmt = conn.prepare(&q).unwrap();
|
||||||
let mut event_rows = stmt.query([]).unwrap();
|
let mut event_rows = stmt.query([]).unwrap();
|
||||||
let mut i: usize = 0;
|
|
||||||
while let Some(row) = event_rows.next().unwrap() {
|
while let Some(row) = event_rows.next().unwrap() {
|
||||||
// check if this is still active (we could do this every N rows)
|
// check if this is still active (we could do this every N rows)
|
||||||
if abandon_query_rx.try_recv().is_ok() {
|
if abandon_query_rx.try_recv().is_ok() {
|
||||||
info!("Abandoning query...");
|
debug!("query aborted");
|
||||||
// we have received a request to abandon the query
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
// TODO: check before unwrapping
|
||||||
let event_json = row.get(0).unwrap();
|
let event_json = row.get(0).unwrap();
|
||||||
i += 1;
|
|
||||||
info!("Sending event #{}", i);
|
|
||||||
query_tx
|
query_tx
|
||||||
.blocking_send(QueryResult {
|
.blocking_send(QueryResult {
|
||||||
sub_id: sub.get_id(),
|
sub_id: sub.get_id(),
|
||||||
|
@ -269,6 +292,6 @@ pub async fn db_query(
|
||||||
})
|
})
|
||||||
.ok();
|
.ok();
|
||||||
}
|
}
|
||||||
info!("Finished reading");
|
debug!("query completed");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,11 +1,12 @@
|
||||||
//! Error handling.
|
//! Error handling
|
||||||
|
|
||||||
use std::result;
|
use std::result;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use tungstenite::error::Error as WsError;
|
use tungstenite::error::Error as WsError;
|
||||||
|
|
||||||
|
/// Simple `Result` type for errors in this module
|
||||||
pub type Result<T, E = Error> = result::Result<T, E>;
|
pub type Result<T, E = Error> = result::Result<T, E>;
|
||||||
|
|
||||||
|
/// Custom error type for Nostr
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
#[error("Protocol parse error")]
|
#[error("Protocol parse error")]
|
||||||
|
@ -32,18 +33,21 @@ pub enum Error {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<rusqlite::Error> for Error {
|
impl From<rusqlite::Error> for Error {
|
||||||
|
/// Wrap SQL error
|
||||||
fn from(r: rusqlite::Error) -> Self {
|
fn from(r: rusqlite::Error) -> Self {
|
||||||
Error::SqlError(r)
|
Error::SqlError(r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<serde_json::Error> for Error {
|
impl From<serde_json::Error> for Error {
|
||||||
|
/// Wrap JSON error
|
||||||
fn from(r: serde_json::Error) -> Self {
|
fn from(r: serde_json::Error) -> Self {
|
||||||
Error::JsonParseFailed(r)
|
Error::JsonParseFailed(r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<WsError> for Error {
|
impl From<WsError> for Error {
|
||||||
|
/// Wrap Websocket error
|
||||||
fn from(r: WsError) -> Self {
|
fn from(r: WsError) -> Self {
|
||||||
Error::WebsocketError(r)
|
Error::WebsocketError(r)
|
||||||
}
|
}
|
||||||
|
|
23
src/event.rs
23
src/event.rs
|
@ -1,3 +1,4 @@
|
||||||
|
//! Event parsing and validation
|
||||||
use crate::error::Error::*;
|
use crate::error::Error::*;
|
||||||
use crate::error::Result;
|
use crate::error::Result;
|
||||||
use bitcoin_hashes::{sha256, Hash};
|
use bitcoin_hashes::{sha256, Hash};
|
||||||
|
@ -8,12 +9,14 @@ use serde_json::value::Value;
|
||||||
use serde_json::Number;
|
use serde_json::Number;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
/// Event command in network format
|
||||||
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
||||||
pub struct EventCmd {
|
pub struct EventCmd {
|
||||||
cmd: String, // expecting static "EVENT"
|
cmd: String, // expecting static "EVENT"
|
||||||
event: Event,
|
event: Event,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Event parsed
|
||||||
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
||||||
pub struct Event {
|
pub struct Event {
|
||||||
pub id: String,
|
pub id: String,
|
||||||
|
@ -21,15 +24,16 @@ pub struct Event {
|
||||||
pub(crate) created_at: u64,
|
pub(crate) created_at: u64,
|
||||||
pub(crate) kind: u64,
|
pub(crate) kind: u64,
|
||||||
#[serde(deserialize_with = "tag_from_string")]
|
#[serde(deserialize_with = "tag_from_string")]
|
||||||
// TODO: array-of-arrays may need to be more general than a string container
|
// NOTE: array-of-arrays may need to be more general than a string container
|
||||||
pub(crate) tags: Vec<Vec<String>>,
|
pub(crate) tags: Vec<Vec<String>>,
|
||||||
pub(crate) content: String,
|
pub(crate) content: String,
|
||||||
pub(crate) sig: String,
|
pub(crate) sig: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Simple tag type for array of array of strings.
|
||||||
type Tag = Vec<Vec<String>>;
|
type Tag = Vec<Vec<String>>;
|
||||||
|
|
||||||
// handle a default value (empty vec) for null tags
|
/// Deserializer that ensures we always have a [`Tag`].
|
||||||
fn tag_from_string<'de, D>(deserializer: D) -> Result<Tag, D::Error>
|
fn tag_from_string<'de, D>(deserializer: D) -> Result<Tag, D::Error>
|
||||||
where
|
where
|
||||||
D: Deserializer<'de>,
|
D: Deserializer<'de>,
|
||||||
|
@ -38,6 +42,7 @@ where
|
||||||
Ok(opt.unwrap_or_else(|| vec![]))
|
Ok(opt.unwrap_or_else(|| vec![]))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Convert network event to parsed/validated event.
|
||||||
impl From<EventCmd> for Result<Event> {
|
impl From<EventCmd> for Result<Event> {
|
||||||
fn from(ec: EventCmd) -> Result<Event> {
|
fn from(ec: EventCmd) -> Result<Event> {
|
||||||
// ensure command is correct
|
// ensure command is correct
|
||||||
|
@ -52,12 +57,12 @@ impl From<EventCmd> for Result<Event> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Event {
|
impl Event {
|
||||||
// get short event identifer
|
/// Create a short event identifier, suitable for logging.
|
||||||
pub fn get_event_id_prefix(&self) -> String {
|
pub fn get_event_id_prefix(&self) -> String {
|
||||||
self.id.chars().take(8).collect()
|
self.id.chars().take(8).collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
// check if this event is valid (should be propagated, stored) based on signature.
|
/// Check if this event has a valid signature.
|
||||||
fn is_valid(&self) -> bool {
|
fn is_valid(&self) -> bool {
|
||||||
// validation is performed by:
|
// validation is performed by:
|
||||||
// * parsing JSON string into event fields
|
// * parsing JSON string into event fields
|
||||||
|
@ -89,7 +94,7 @@ impl Event {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// convert event to canonical representation for signing
|
/// Convert event to canonical representation for signing.
|
||||||
fn to_canonical(&self) -> Option<String> {
|
fn to_canonical(&self) -> Option<String> {
|
||||||
// create a JsonValue for each event element
|
// create a JsonValue for each event element
|
||||||
let mut c: Vec<Value> = vec![];
|
let mut c: Vec<Value> = vec![];
|
||||||
|
@ -110,6 +115,8 @@ impl Event {
|
||||||
c.push(Value::String(self.content.to_owned()));
|
c.push(Value::String(self.content.to_owned()));
|
||||||
serde_json::to_string(&Value::Array(c)).ok()
|
serde_json::to_string(&Value::Array(c)).ok()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Convert tags to a canonical form for signing.
|
||||||
fn tags_to_canonical(&self) -> Value {
|
fn tags_to_canonical(&self) -> Value {
|
||||||
let mut tags = Vec::<Value>::new();
|
let mut tags = Vec::<Value>::new();
|
||||||
// iterate over self tags,
|
// iterate over self tags,
|
||||||
|
@ -124,7 +131,7 @@ impl Event {
|
||||||
serde_json::Value::Array(tags)
|
serde_json::Value::Array(tags)
|
||||||
}
|
}
|
||||||
|
|
||||||
// get set of event tags
|
/// Get a list of event tags.
|
||||||
pub fn get_event_tags(&self) -> Vec<&str> {
|
pub fn get_event_tags(&self) -> Vec<&str> {
|
||||||
let mut etags = vec![];
|
let mut etags = vec![];
|
||||||
for t in self.tags.iter() {
|
for t in self.tags.iter() {
|
||||||
|
@ -137,7 +144,7 @@ impl Event {
|
||||||
etags
|
etags
|
||||||
}
|
}
|
||||||
|
|
||||||
// get set of pubkey tags
|
/// Get a list of pubkey/petname tags.
|
||||||
pub fn get_pubkey_tags(&self) -> Vec<&str> {
|
pub fn get_pubkey_tags(&self) -> Vec<&str> {
|
||||||
let mut ptags = vec![];
|
let mut ptags = vec![];
|
||||||
for t in self.tags.iter() {
|
for t in self.tags.iter() {
|
||||||
|
@ -150,7 +157,7 @@ impl Event {
|
||||||
ptags
|
ptags
|
||||||
}
|
}
|
||||||
|
|
||||||
// check if given event is referenced in a tag
|
/// Check if a given event is referenced in an event tag.
|
||||||
pub fn event_tag_match(&self, eventid: &str) -> bool {
|
pub fn event_tag_match(&self, eventid: &str) -> bool {
|
||||||
self.get_event_tags().contains(&eventid)
|
self.get_event_tags().contains(&eventid)
|
||||||
}
|
}
|
||||||
|
|
36
src/main.rs
36
src/main.rs
|
@ -1,3 +1,4 @@
|
||||||
|
//! Server process
|
||||||
use futures::SinkExt;
|
use futures::SinkExt;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use log::*;
|
use log::*;
|
||||||
|
@ -20,7 +21,7 @@ use tokio::sync::oneshot;
|
||||||
|
|
||||||
/// Start running a Nostr relay server.
|
/// Start running a Nostr relay server.
|
||||||
fn main() -> Result<(), Error> {
|
fn main() -> Result<(), Error> {
|
||||||
// setup logger
|
// setup logger and environment
|
||||||
let _ = env_logger::try_init();
|
let _ = env_logger::try_init();
|
||||||
let addr = env::args()
|
let addr = env::args()
|
||||||
.nth(1)
|
.nth(1)
|
||||||
|
@ -35,33 +36,28 @@ fn main() -> Result<(), Error> {
|
||||||
rt.block_on(async {
|
rt.block_on(async {
|
||||||
let listener = TcpListener::bind(&addr).await.expect("Failed to bind");
|
let listener = TcpListener::bind(&addr).await.expect("Failed to bind");
|
||||||
info!("Listening on: {}", addr);
|
info!("Listening on: {}", addr);
|
||||||
// Establish global broadcast channel. This is where all
|
// all client-submitted valid events are broadcast to every
|
||||||
// accepted events will be distributed for other connected clients.
|
// other client on this channel. This should be large enough
|
||||||
|
// to accomodate slower readers (messages are dropped if
|
||||||
// this needs to be large enough to accomodate any slow
|
// clients can not keep up).
|
||||||
// readers - otherwise messages will be dropped before they
|
|
||||||
// can be processed. Since this is global to all connections,
|
|
||||||
// we can tolerate this being rather large (for 4096, the
|
|
||||||
// buffer itself is about 1MB).
|
|
||||||
let (bcast_tx, _) = broadcast::channel::<Event>(4096);
|
let (bcast_tx, _) = broadcast::channel::<Event>(4096);
|
||||||
// Establish database writer channel. This needs to be
|
// validated events that need to be persisted are sent to the
|
||||||
// accessible from sync code, which is why the broadcast
|
// database on via this channel.
|
||||||
// cannot be reused.
|
|
||||||
let (event_tx, event_rx) = mpsc::channel::<Event>(16);
|
let (event_tx, event_rx) = mpsc::channel::<Event>(16);
|
||||||
// start the database writer.
|
// start the database writer thread.
|
||||||
db::db_writer(event_rx).await;
|
db::db_writer(event_rx).await;
|
||||||
// setup a broadcast channel for invoking a process shutdown
|
// establish a channel for letting all threads now about a
|
||||||
|
// requested server shutdown.
|
||||||
let (invoke_shutdown, _) = broadcast::channel::<()>(1);
|
let (invoke_shutdown, _) = broadcast::channel::<()>(1);
|
||||||
let shutdown_handler = invoke_shutdown.clone();
|
let ctrl_c_shutdown = invoke_shutdown.clone();
|
||||||
// listen for ctrl-c interruupts
|
// listen for ctrl-c interruupts
|
||||||
tokio::spawn(async move {
|
tokio::spawn(async move {
|
||||||
tokio::signal::ctrl_c().await.unwrap();
|
tokio::signal::ctrl_c().await.unwrap();
|
||||||
// Your handler here
|
info!("Shutting down due to SIGINT");
|
||||||
info!("got ctrl-c");
|
ctrl_c_shutdown.send(()).ok();
|
||||||
shutdown_handler.send(()).ok();
|
|
||||||
});
|
});
|
||||||
let mut stop_listening = invoke_shutdown.subscribe();
|
let mut stop_listening = invoke_shutdown.subscribe();
|
||||||
// shutdown on Ctrl-C, or accept a new connection
|
// handle new client connection requests, or SIGINT signals.
|
||||||
loop {
|
loop {
|
||||||
tokio::select! {
|
tokio::select! {
|
||||||
_ = stop_listening.recv() => {
|
_ = stop_listening.recv() => {
|
||||||
|
@ -81,6 +77,8 @@ fn main() -> Result<(), Error> {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Handle new client connections. This runs through an event loop
|
||||||
|
/// for all client communication.
|
||||||
async fn nostr_server(
|
async fn nostr_server(
|
||||||
stream: TcpStream,
|
stream: TcpStream,
|
||||||
broadcast: Sender<Event>,
|
broadcast: Sender<Event>,
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
//! Nostr protocol layered over WebSocket
|
||||||
use crate::close::CloseCmd;
|
use crate::close::CloseCmd;
|
||||||
use crate::error::{Error, Result};
|
use crate::error::{Error, Result};
|
||||||
use crate::event::EventCmd;
|
use crate::event::EventCmd;
|
||||||
|
@ -14,38 +15,43 @@ use tokio_tungstenite::WebSocketStream;
|
||||||
use tungstenite::error::Error as WsError;
|
use tungstenite::error::Error as WsError;
|
||||||
use tungstenite::protocol::Message;
|
use tungstenite::protocol::Message;
|
||||||
|
|
||||||
// A Nostr message is either event, subscription, or close.
|
/// Nostr protocol messages from a client
|
||||||
#[derive(Deserialize, Serialize, Clone, PartialEq, Debug)]
|
#[derive(Deserialize, Serialize, Clone, PartialEq, Debug)]
|
||||||
#[serde(untagged)]
|
#[serde(untagged)]
|
||||||
pub enum NostrMessage {
|
pub enum NostrMessage {
|
||||||
|
/// An `EVENT` message
|
||||||
EventMsg(EventCmd),
|
EventMsg(EventCmd),
|
||||||
|
/// A `REQ` message
|
||||||
SubMsg(Subscription),
|
SubMsg(Subscription),
|
||||||
|
/// A `CLOSE` message
|
||||||
CloseMsg(CloseCmd),
|
CloseMsg(CloseCmd),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Either an event w/ subscription, or a notice
|
/// Nostr protocol messages from a relay/server
|
||||||
#[derive(Deserialize, Serialize, Clone, PartialEq, Debug)]
|
#[derive(Deserialize, Serialize, Clone, PartialEq, Debug)]
|
||||||
pub enum NostrResponse {
|
pub enum NostrResponse {
|
||||||
|
/// A `NOTICE` response
|
||||||
NoticeRes(String),
|
NoticeRes(String),
|
||||||
// A subscription identifier and serialized response
|
/// An `EVENT` response, composed of the subscription identifier,
|
||||||
|
/// and serialized event JSON
|
||||||
EventRes(String, String),
|
EventRes(String, String),
|
||||||
}
|
}
|
||||||
|
|
||||||
// A Nostr protocol stream is layered on top of a Websocket stream.
|
/// A Nostr protocol stream is layered on top of a Websocket stream.
|
||||||
pub struct NostrStream {
|
pub struct NostrStream {
|
||||||
ws_stream: WebSocketStream<TcpStream>,
|
ws_stream: WebSocketStream<TcpStream>,
|
||||||
}
|
}
|
||||||
|
|
||||||
// given a websocket, return a protocol stream
|
/// Given a websocket, return a protocol stream wrapper.
|
||||||
//impl Stream<Item = Result<BasicMessage, BasicError>> + Sink<BasicResponse>
|
|
||||||
pub fn wrap_ws_in_nostr(ws: WebSocketStream<TcpStream>) -> NostrStream {
|
pub fn wrap_ws_in_nostr(ws: WebSocketStream<TcpStream>) -> NostrStream {
|
||||||
return NostrStream { ws_stream: ws };
|
return NostrStream { ws_stream: ws };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Implement the [`Stream`] interface to produce Nostr messages.
|
||||||
impl Stream for NostrStream {
|
impl Stream for NostrStream {
|
||||||
type Item = Result<NostrMessage>;
|
type Item = Result<NostrMessage>;
|
||||||
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
|
||||||
// convert Message to NostrMessage
|
/// Convert Message to NostrMessage
|
||||||
fn convert(msg: String) -> Result<NostrMessage> {
|
fn convert(msg: String) -> Result<NostrMessage> {
|
||||||
let parsed_res: Result<NostrMessage> = serde_json::from_str(&msg).map_err(|e| e.into());
|
let parsed_res: Result<NostrMessage> = serde_json::from_str(&msg).map_err(|e| e.into());
|
||||||
match parsed_res {
|
match parsed_res {
|
||||||
|
@ -56,22 +62,22 @@ impl Stream for NostrStream {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
match Pin::new(&mut self.ws_stream).poll_next(cx) {
|
match Pin::new(&mut self.ws_stream).poll_next(cx) {
|
||||||
Poll::Pending => Poll::Pending, // not ready
|
Poll::Pending => Poll::Pending,
|
||||||
Poll::Ready(None) => Poll::Ready(None), // done
|
Poll::Ready(None) => Poll::Ready(None),
|
||||||
Poll::Ready(Some(v)) => match v {
|
Poll::Ready(Some(v)) => match v {
|
||||||
Ok(Message::Text(vs)) => Poll::Ready(Some(convert(vs))), // convert message->basicmessage
|
Ok(Message::Text(vs)) => Poll::Ready(Some(convert(vs))),
|
||||||
Ok(Message::Binary(_)) => Poll::Ready(Some(Err(Error::ProtoParseError))),
|
Ok(Message::Binary(_)) => Poll::Ready(Some(Err(Error::ProtoParseError))),
|
||||||
Ok(Message::Pong(_)) | Ok(Message::Ping(_)) => Poll::Pending,
|
Ok(Message::Pong(_)) | Ok(Message::Ping(_)) => Poll::Pending,
|
||||||
Ok(Message::Close(_)) => Poll::Ready(None),
|
Ok(Message::Close(_)) => Poll::Ready(None),
|
||||||
Err(WsError::AlreadyClosed) | Err(WsError::ConnectionClosed) => Poll::Ready(None), // done
|
Err(WsError::AlreadyClosed) | Err(WsError::ConnectionClosed) => Poll::Ready(None),
|
||||||
Err(_) => Poll::Ready(Some(Err(Error::ConnError))),
|
Err(_) => Poll::Ready(Some(Err(Error::ConnError))),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Implement the [`Sink`] interface to produce Nostr responses.
|
||||||
impl Sink<NostrResponse> for NostrStream {
|
impl Sink<NostrResponse> for NostrStream {
|
||||||
type Error = Error;
|
type Error = Error;
|
||||||
|
|
||||||
|
@ -85,9 +91,8 @@ impl Sink<NostrResponse> for NostrStream {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn start_send(mut self: Pin<&mut Self>, item: NostrResponse) -> Result<(), Self::Error> {
|
fn start_send(mut self: Pin<&mut Self>, item: NostrResponse) -> Result<(), Self::Error> {
|
||||||
//let res_message = serde_json::to_string(&item).expect("Could convert message to string");
|
// TODO: do real escaping for these - at least on NOTICE,
|
||||||
// create the string to send.
|
// which surely has some problems if arbitrary text is sent.
|
||||||
// TODO: do real escaping for both of these. Currently output isn't correctly escaped.
|
|
||||||
let send_str = match item {
|
let send_str = match item {
|
||||||
NostrResponse::NoticeRes(msg) => {
|
NostrResponse::NoticeRes(msg) => {
|
||||||
let s = msg.replace("\"", "");
|
let s = msg.replace("\"", "");
|
||||||
|
|
|
@ -1,30 +1,44 @@
|
||||||
use crate::error::{Error, Result};
|
//! Subscription and filter parsing
|
||||||
|
use crate::error::Result;
|
||||||
use crate::event::Event;
|
use crate::event::Event;
|
||||||
use serde::{Deserialize, Deserializer, Serialize};
|
use serde::{Deserialize, Deserializer, Serialize};
|
||||||
//use serde_json::json;
|
|
||||||
//use serde_json::Result;
|
|
||||||
|
|
||||||
|
/// Subscription identifier and set of request filters
|
||||||
#[derive(Serialize, PartialEq, Debug, Clone)]
|
#[derive(Serialize, PartialEq, Debug, Clone)]
|
||||||
pub struct Subscription {
|
pub struct Subscription {
|
||||||
pub id: String,
|
pub id: String,
|
||||||
pub filters: Vec<ReqFilter>,
|
pub filters: Vec<ReqFilter>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Filter for requests
|
||||||
|
///
|
||||||
|
/// Corresponds to client-provided subscription request elements. Any
|
||||||
|
/// element can be present if it should be used in filtering, or
|
||||||
|
/// absent ([`None`]) if it should be ignored.
|
||||||
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
|
||||||
#[serde(deny_unknown_fields)]
|
#[serde(deny_unknown_fields)]
|
||||||
pub struct ReqFilter {
|
pub struct ReqFilter {
|
||||||
|
/// Event hash
|
||||||
pub id: Option<String>,
|
pub id: Option<String>,
|
||||||
|
/// Author public key
|
||||||
pub author: Option<String>,
|
pub author: Option<String>,
|
||||||
|
/// Event kind
|
||||||
pub kind: Option<u64>,
|
pub kind: Option<u64>,
|
||||||
|
/// Referenced event hash
|
||||||
#[serde(rename = "#e")]
|
#[serde(rename = "#e")]
|
||||||
pub event: Option<String>,
|
pub event: Option<String>,
|
||||||
|
/// Referenced public key for a petname
|
||||||
#[serde(rename = "#p")]
|
#[serde(rename = "#p")]
|
||||||
pub pubkey: Option<String>,
|
pub pubkey: Option<String>,
|
||||||
|
/// Events published after this time
|
||||||
pub since: Option<u64>,
|
pub since: Option<u64>,
|
||||||
|
/// List of author public keys
|
||||||
pub authors: Option<Vec<String>>,
|
pub authors: Option<Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'de> Deserialize<'de> for Subscription {
|
impl<'de> Deserialize<'de> for Subscription {
|
||||||
|
/// Custom deserializer for subscriptions, which have a more
|
||||||
|
/// complex structure than the other message types.
|
||||||
fn deserialize<D>(deserializer: D) -> Result<Subscription, D::Error>
|
fn deserialize<D>(deserializer: D) -> Result<Subscription, D::Error>
|
||||||
where
|
where
|
||||||
D: Deserializer<'de>,
|
D: Deserializer<'de>,
|
||||||
|
@ -74,17 +88,13 @@ impl<'de> Deserialize<'de> for Subscription {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Subscription {
|
impl Subscription {
|
||||||
pub fn parse(json: &str) -> Result<Subscription> {
|
/// Get a copy of the subscription identifier.
|
||||||
serde_json::from_str(json).map_err(|e| Error::JsonParseFailed(e))
|
|
||||||
}
|
|
||||||
pub fn get_id(&self) -> String {
|
pub fn get_id(&self) -> String {
|
||||||
self.id.clone()
|
self.id.clone()
|
||||||
}
|
}
|
||||||
pub fn get_filter_count(&self) -> usize {
|
/// Determine if this subscription matches a given [`Event`]. Any
|
||||||
self.filters.len()
|
/// individual filter match is sufficient.
|
||||||
}
|
|
||||||
pub fn interested_in_event(&self, event: &Event) -> bool {
|
pub fn interested_in_event(&self, event: &Event) -> bool {
|
||||||
// loop through every filter, and return true if any match this event.
|
|
||||||
for f in self.filters.iter() {
|
for f in self.filters.iter() {
|
||||||
if f.interested_in_event(event) {
|
if f.interested_in_event(event) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -95,7 +105,7 @@ impl Subscription {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ReqFilter {
|
impl ReqFilter {
|
||||||
// attempt to match against author/authors fields
|
/// Check if this filter either matches, or does not care about an author.
|
||||||
fn author_match(&self, event: &Event) -> bool {
|
fn author_match(&self, event: &Event) -> bool {
|
||||||
self.authors
|
self.authors
|
||||||
.as_ref()
|
.as_ref()
|
||||||
|
@ -107,6 +117,7 @@ impl ReqFilter {
|
||||||
.map(|v| v == &event.pubkey)
|
.map(|v| v == &event.pubkey)
|
||||||
.unwrap_or(true)
|
.unwrap_or(true)
|
||||||
}
|
}
|
||||||
|
/// Check if this filter either matches, or does not care about the event tags.
|
||||||
fn event_match(&self, event: &Event) -> bool {
|
fn event_match(&self, event: &Event) -> bool {
|
||||||
self.event
|
self.event
|
||||||
.as_ref()
|
.as_ref()
|
||||||
|
@ -114,13 +125,13 @@ impl ReqFilter {
|
||||||
.unwrap_or(true)
|
.unwrap_or(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Check if this filter either matches, or does not care about the kind.
|
||||||
fn kind_match(&self, kind: u64) -> bool {
|
fn kind_match(&self, kind: u64) -> bool {
|
||||||
self.kind.map(|v| v == kind).unwrap_or(true)
|
self.kind.map(|v| v == kind).unwrap_or(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Determine if all populated fields in this filter match the provided event.
|
||||||
pub fn interested_in_event(&self, event: &Event) -> bool {
|
pub fn interested_in_event(&self, event: &Event) -> bool {
|
||||||
// determine if all populated fields in this filter match the provided event.
|
|
||||||
// a filter matches an event if all the populated fields match.
|
|
||||||
self.id.as_ref().map(|v| v == &event.id).unwrap_or(true)
|
self.id.as_ref().map(|v| v == &event.id).unwrap_or(true)
|
||||||
&& self.since.map(|t| event.created_at > t).unwrap_or(true)
|
&& self.since.map(|t| event.created_at > t).unwrap_or(true)
|
||||||
&& self.kind_match(event.kind)
|
&& self.kind_match(event.kind)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user