mirror of
https://github.com/scsibug/nostr-rs-relay.git
synced 2024-11-22 00:59:07 -05:00
feat: limit_scrapers
Signed-off-by: Greg Heartsfield <scsibug@imap.cc>
This commit is contained in:
parent
1ce029860c
commit
388eadf880
|
@ -155,6 +155,11 @@ reject_future_seconds = 1800
|
||||||
# 0, 1, 2, 3, 7, 40, 41, 42, 43, 44, 30023,
|
# 0, 1, 2, 3, 7, 40, 41, 42, 43, 44, 30023,
|
||||||
#]
|
#]
|
||||||
|
|
||||||
|
# Rejects imprecise requests (kind only and author only etc)
|
||||||
|
# This is a temperary measure to improve the adoption of outbox model
|
||||||
|
# Its recommended to have this enabled
|
||||||
|
limit_scrapers = true
|
||||||
|
|
||||||
[authorization]
|
[authorization]
|
||||||
# Pubkey addresses in this array are whitelisted for event publishing.
|
# Pubkey addresses in this array are whitelisted for event publishing.
|
||||||
# Only valid events by these authors will be accepted, if the variable
|
# Only valid events by these authors will be accepted, if the variable
|
||||||
|
|
|
@ -74,6 +74,7 @@ pub struct Limits {
|
||||||
pub event_persist_buffer: usize, // events to buffer for database commits (block senders if database writes are too slow)
|
pub event_persist_buffer: usize, // events to buffer for database commits (block senders if database writes are too slow)
|
||||||
pub event_kind_blacklist: Option<Vec<u64>>,
|
pub event_kind_blacklist: Option<Vec<u64>>,
|
||||||
pub event_kind_allowlist: Option<Vec<u64>>,
|
pub event_kind_allowlist: Option<Vec<u64>>,
|
||||||
|
pub limit_scrapers: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
@ -308,6 +309,7 @@ impl Default for Settings {
|
||||||
event_persist_buffer: 4096,
|
event_persist_buffer: 4096,
|
||||||
event_kind_blacklist: None,
|
event_kind_blacklist: None,
|
||||||
event_kind_allowlist: None,
|
event_kind_allowlist: None,
|
||||||
|
limit_scrapers: false
|
||||||
},
|
},
|
||||||
authorization: Authorization {
|
authorization: Authorization {
|
||||||
pubkey_whitelist: None, // Allow any address to publish
|
pubkey_whitelist: None, // Allow any address to publish
|
||||||
|
|
|
@ -1261,7 +1261,6 @@ async fn nostr_server(
|
||||||
// handle each type of message
|
// handle each type of message
|
||||||
let evid = ec.event_id().to_owned();
|
let evid = ec.event_id().to_owned();
|
||||||
let parsed : Result<EventWrapper> = Result::<EventWrapper>::from(ec);
|
let parsed : Result<EventWrapper> = Result::<EventWrapper>::from(ec);
|
||||||
metrics.cmd_event.inc();
|
|
||||||
match parsed {
|
match parsed {
|
||||||
Ok(WrappedEvent(e)) => {
|
Ok(WrappedEvent(e)) => {
|
||||||
metrics.cmd_event.inc();
|
metrics.cmd_event.inc();
|
||||||
|
@ -1346,6 +1345,11 @@ async fn nostr_server(
|
||||||
if let Some(ref lim) = sub_lim_opt {
|
if let Some(ref lim) = sub_lim_opt {
|
||||||
lim.until_ready_with_jitter(jitter).await;
|
lim.until_ready_with_jitter(jitter).await;
|
||||||
}
|
}
|
||||||
|
if settings.limits.limit_scrapers && s.is_scraper() {
|
||||||
|
info!("subscription was scraper, ignoring (cid: {}, sub: {:?})", cid, s.id);
|
||||||
|
ws_stream.send(Message::Text(format!("[\"EOSE\",\"{}\"]", s.id))).await.ok();
|
||||||
|
continue
|
||||||
|
}
|
||||||
let (abandon_query_tx, abandon_query_rx) = oneshot::channel::<()>();
|
let (abandon_query_tx, abandon_query_rx) = oneshot::channel::<()>();
|
||||||
match conn.subscribe(s.clone()) {
|
match conn.subscribe(s.clone()) {
|
||||||
Ok(()) => {
|
Ok(()) => {
|
||||||
|
|
|
@ -258,6 +258,29 @@ impl Subscription {
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Is this subscription defined as a scraper query
|
||||||
|
pub fn is_scraper(&self) -> bool {
|
||||||
|
for f in &self.filters {
|
||||||
|
let mut precision = 0;
|
||||||
|
if f.ids.is_some() {
|
||||||
|
precision += 2;
|
||||||
|
}
|
||||||
|
if f.authors.is_some() {
|
||||||
|
precision += 1;
|
||||||
|
}
|
||||||
|
if f.kinds.is_some() {
|
||||||
|
precision += 1;
|
||||||
|
}
|
||||||
|
if f.tags.is_some() {
|
||||||
|
precision += 1;
|
||||||
|
}
|
||||||
|
if precision < 2 {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn prefix_match(prefixes: &[String], target: &str) -> bool {
|
fn prefix_match(prefixes: &[String], target: &str) -> bool {
|
||||||
|
@ -647,4 +670,14 @@ mod tests {
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn is_scraper() -> Result<()> {
|
||||||
|
assert_eq!(true, serde_json::from_str::<Subscription>(r#"["REQ","some-id",{"kinds": [1984],"since": 123,"limit":1}]"#)?.is_scraper());
|
||||||
|
assert_eq!(true, serde_json::from_str::<Subscription>(r#"["REQ","some-id",{"kinds": [1984]},{"kinds": [1984],"authors":["aaaa"]}]"#)?.is_scraper());
|
||||||
|
assert_eq!(false, serde_json::from_str::<Subscription>(r#"["REQ","some-id",{"kinds": [1984],"authors":["aaaa"]}]"#)?.is_scraper());
|
||||||
|
assert_eq!(false, serde_json::from_str::<Subscription>(r#"["REQ","some-id",{"ids": ["aaaa"]}]"#)?.is_scraper());
|
||||||
|
assert_eq!(false, serde_json::from_str::<Subscription>(r##"["REQ","some-id",{"#p": ["aaaa"],"kinds":[1,4]}]"##)?.is_scraper());
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue
Block a user