feat: limit_scrapers

Signed-off-by: Greg Heartsfield <scsibug@imap.cc>
This commit is contained in:
Kieran 2023-11-22 22:45:30 +00:00 committed by Greg Heartsfield
parent 1ce029860c
commit 388eadf880
4 changed files with 53 additions and 9 deletions

View File

@ -155,6 +155,11 @@ reject_future_seconds = 1800
# 0, 1, 2, 3, 7, 40, 41, 42, 43, 44, 30023, # 0, 1, 2, 3, 7, 40, 41, 42, 43, 44, 30023,
#] #]
# Rejects imprecise requests (kind only and author only etc)
# This is a temperary measure to improve the adoption of outbox model
# Its recommended to have this enabled
limit_scrapers = true
[authorization] [authorization]
# Pubkey addresses in this array are whitelisted for event publishing. # Pubkey addresses in this array are whitelisted for event publishing.
# Only valid events by these authors will be accepted, if the variable # Only valid events by these authors will be accepted, if the variable

View File

@ -74,6 +74,7 @@ pub struct Limits {
pub event_persist_buffer: usize, // events to buffer for database commits (block senders if database writes are too slow) pub event_persist_buffer: usize, // events to buffer for database commits (block senders if database writes are too slow)
pub event_kind_blacklist: Option<Vec<u64>>, pub event_kind_blacklist: Option<Vec<u64>>,
pub event_kind_allowlist: Option<Vec<u64>>, pub event_kind_allowlist: Option<Vec<u64>>,
pub limit_scrapers: bool,
} }
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -308,6 +309,7 @@ impl Default for Settings {
event_persist_buffer: 4096, event_persist_buffer: 4096,
event_kind_blacklist: None, event_kind_blacklist: None,
event_kind_allowlist: None, event_kind_allowlist: None,
limit_scrapers: false
}, },
authorization: Authorization { authorization: Authorization {
pubkey_whitelist: None, // Allow any address to publish pubkey_whitelist: None, // Allow any address to publish

View File

@ -1261,7 +1261,6 @@ async fn nostr_server(
// handle each type of message // handle each type of message
let evid = ec.event_id().to_owned(); let evid = ec.event_id().to_owned();
let parsed : Result<EventWrapper> = Result::<EventWrapper>::from(ec); let parsed : Result<EventWrapper> = Result::<EventWrapper>::from(ec);
metrics.cmd_event.inc();
match parsed { match parsed {
Ok(WrappedEvent(e)) => { Ok(WrappedEvent(e)) => {
metrics.cmd_event.inc(); metrics.cmd_event.inc();
@ -1342,10 +1341,15 @@ async fn nostr_server(
if conn.has_subscription(&s) { if conn.has_subscription(&s) {
info!("client sent duplicate subscription, ignoring (cid: {}, sub: {:?})", cid, s.id); info!("client sent duplicate subscription, ignoring (cid: {}, sub: {:?})", cid, s.id);
} else { } else {
metrics.cmd_req.inc(); metrics.cmd_req.inc();
if let Some(ref lim) = sub_lim_opt { if let Some(ref lim) = sub_lim_opt {
lim.until_ready_with_jitter(jitter).await; lim.until_ready_with_jitter(jitter).await;
} }
if settings.limits.limit_scrapers && s.is_scraper() {
info!("subscription was scraper, ignoring (cid: {}, sub: {:?})", cid, s.id);
ws_stream.send(Message::Text(format!("[\"EOSE\",\"{}\"]", s.id))).await.ok();
continue
}
let (abandon_query_tx, abandon_query_rx) = oneshot::channel::<()>(); let (abandon_query_tx, abandon_query_rx) = oneshot::channel::<()>();
match conn.subscribe(s.clone()) { match conn.subscribe(s.clone()) {
Ok(()) => { Ok(()) => {
@ -1369,7 +1373,7 @@ async fn nostr_server(
// closing a request simply removes the subscription. // closing a request simply removes the subscription.
let parsed : Result<Close> = Result::<Close>::from(cc); let parsed : Result<Close> = Result::<Close>::from(cc);
if let Ok(c) = parsed { if let Ok(c) = parsed {
metrics.cmd_close.inc(); metrics.cmd_close.inc();
// check if a query is currently // check if a query is currently
// running, and remove it if so. // running, and remove it if so.
let stop_tx = running_queries.remove(&c.id); let stop_tx = running_queries.remove(&c.id);

View File

@ -45,8 +45,8 @@ pub struct ReqFilter {
impl Serialize for ReqFilter { impl Serialize for ReqFilter {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where where
S: Serializer, S: Serializer,
{ {
let mut map = serializer.serialize_map(None)?; let mut map = serializer.serialize_map(None)?;
if let Some(ids) = &self.ids { if let Some(ids) = &self.ids {
@ -80,8 +80,8 @@ impl Serialize for ReqFilter {
impl<'de> Deserialize<'de> for ReqFilter { impl<'de> Deserialize<'de> for ReqFilter {
fn deserialize<D>(deserializer: D) -> Result<ReqFilter, D::Error> fn deserialize<D>(deserializer: D) -> Result<ReqFilter, D::Error>
where where
D: Deserializer<'de>, D: Deserializer<'de>,
{ {
let received: Value = Deserialize::deserialize(deserializer)?; let received: Value = Deserialize::deserialize(deserializer)?;
let filter = received.as_object().ok_or_else(|| { let filter = received.as_object().ok_or_else(|| {
@ -184,8 +184,8 @@ impl<'de> Deserialize<'de> for Subscription {
/// Custom deserializer for subscriptions, which have a more /// Custom deserializer for subscriptions, which have a more
/// complex structure than the other message types. /// complex structure than the other message types.
fn deserialize<D>(deserializer: D) -> Result<Subscription, D::Error> fn deserialize<D>(deserializer: D) -> Result<Subscription, D::Error>
where where
D: Deserializer<'de>, D: Deserializer<'de>,
{ {
let mut v: Value = Deserialize::deserialize(deserializer)?; let mut v: Value = Deserialize::deserialize(deserializer)?;
// this should be a 3-or-more element array. // this should be a 3-or-more element array.
@ -258,6 +258,29 @@ impl Subscription {
} }
false false
} }
/// Is this subscription defined as a scraper query
pub fn is_scraper(&self) -> bool {
for f in &self.filters {
let mut precision = 0;
if f.ids.is_some() {
precision += 2;
}
if f.authors.is_some() {
precision += 1;
}
if f.kinds.is_some() {
precision += 1;
}
if f.tags.is_some() {
precision += 1;
}
if precision < 2 {
return true;
}
}
false
}
} }
fn prefix_match(prefixes: &[String], target: &str) -> bool { fn prefix_match(prefixes: &[String], target: &str) -> bool {
@ -647,4 +670,14 @@ mod tests {
} }
Ok(()) Ok(())
} }
#[test]
fn is_scraper() -> Result<()> {
assert_eq!(true, serde_json::from_str::<Subscription>(r#"["REQ","some-id",{"kinds": [1984],"since": 123,"limit":1}]"#)?.is_scraper());
assert_eq!(true, serde_json::from_str::<Subscription>(r#"["REQ","some-id",{"kinds": [1984]},{"kinds": [1984],"authors":["aaaa"]}]"#)?.is_scraper());
assert_eq!(false, serde_json::from_str::<Subscription>(r#"["REQ","some-id",{"kinds": [1984],"authors":["aaaa"]}]"#)?.is_scraper());
assert_eq!(false, serde_json::from_str::<Subscription>(r#"["REQ","some-id",{"ids": ["aaaa"]}]"#)?.is_scraper());
assert_eq!(false, serde_json::from_str::<Subscription>(r##"["REQ","some-id",{"#p": ["aaaa"],"kinds":[1,4]}]"##)?.is_scraper());
Ok(())
}
} }