improvement: dedupe filters in a REQ

This commit is contained in:
Greg Heartsfield 2023-01-31 18:09:43 -06:00
parent c8e8b71b91
commit 9c77b06f79
2 changed files with 75 additions and 11 deletions

View File

@ -349,6 +349,7 @@ impl NostrRepo for SqliteRepo {
// cutoff for displaying slow queries
let slow_cutoff = Duration::from_millis(250);
let mut filter_count = 0;
// remove duplicates from the filter list.
for filter in sub.filters.iter() {
let filter_start = Instant::now();
filter_count += 1;
@ -447,7 +448,7 @@ impl NostrRepo for SqliteRepo {
} else {
warn!("Could not get a database connection for querying");
}
// if the filter took more than 1 second of db_time, print out the JSON.
// if the filter took too much db_time, print out the JSON.
if filter_start.elapsed() > slow_cutoff && client_id.starts_with('0') {
debug!(
"query filter req (slow): {} (cid: {}, sub: {:?}, filter: {})",

View File

@ -2,7 +2,8 @@
use crate::error::Result;
use crate::event::Event;
use serde::de::Unexpected;
use serde::{Deserialize, Deserializer, Serialize};
use serde::ser::SerializeMap;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_json::Value;
use std::collections::HashMap;
use std::collections::HashSet;
@ -19,37 +20,63 @@ pub struct Subscription {
/// Corresponds to client-provided subscription request elements. Any
/// element can be present if it should be used in filtering, or
/// absent ([`None`]) if it should be ignored.
#[derive(Serialize, PartialEq, Eq, Debug, Clone)]
#[derive(PartialEq, Eq, Debug, Clone)]
pub struct ReqFilter {
/// Event hashes
#[serde(skip_serializing_if="Option::is_none")]
pub ids: Option<Vec<String>>,
/// Event kinds
#[serde(skip_serializing_if="Option::is_none")]
pub kinds: Option<Vec<u64>>,
/// Events published after this time
#[serde(skip_serializing_if="Option::is_none")]
pub since: Option<u64>,
/// Events published before this time
#[serde(skip_serializing_if="Option::is_none")]
pub until: Option<u64>,
/// List of author public keys
#[serde(skip_serializing_if="Option::is_none")]
pub authors: Option<Vec<String>>,
/// Limit number of results
#[serde(skip_serializing_if="Option::is_none")]
pub limit: Option<u64>,
/// Set of tags
#[serde(skip)]
pub tags: Option<HashMap<char, HashSet<String>>>,
/// Force no matches due to malformed data
// we can't represent it in the req filter, so we don't want to
// erroneously match. This basically indicates the req tried to
// do something invalid.
#[serde(skip)]
pub force_no_match: bool,
}
impl Serialize for ReqFilter {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S:Serializer,
{
let mut map = serializer.serialize_map(None)?;
if let Some(ids) = &self.ids {
map.serialize_entry("ids", &ids)?;
}
if let Some(kinds) = &self.kinds {
map.serialize_entry("kinds", &kinds)?;
}
if let Some(until) = &self.until {
map.serialize_entry("until", until)?;
}
if let Some(since) = &self.since {
map.serialize_entry("since", since)?;
}
if let Some(limit) = &self.limit {
map.serialize_entry("limit", limit)?;
}
if let Some(authors) = &self.authors {
map.serialize_entry("authors", &authors)?;
}
// serialize tags
if let Some(tags) = &self.tags {
for (k,v) in tags {
let vals:Vec<&String> = v.iter().collect();
map.serialize_entry(&format!("#{}",k), &vals)?;
}
}
map.end()
}
}
impl<'de> Deserialize<'de> for ReqFilter {
fn deserialize<D>(deserializer: D) -> Result<ReqFilter, D::Error>
where
@ -195,6 +222,7 @@ impl<'de> Deserialize<'de> for Subscription {
// create indexes
filters.push(f);
}
filters.dedup();
Ok(Subscription {
id: sub_id.to_owned(),
filters,
@ -346,6 +374,23 @@ mod tests {
assert!(serde_json::from_str::<Subscription>(raw_json).is_ok());
}
#[test]
fn dupe_filter() -> Result<()> {
let raw_json = r#"["REQ","some-id",{"kinds": [1984]}, {"kinds": [1984]}]"#;
let s: Subscription = serde_json::from_str(raw_json)?;
assert_eq!(s.filters.len(), 1);
Ok(())
}
#[test]
fn dupe_filter_many() -> Result<()> {
// duplicate filters in different order
let raw_json = r#"["REQ","some-id",{"kinds":[1984]},{"kinds":[1984]},{"kinds":[1984]},{"kinds":[1984]}]"#;
let s: Subscription = serde_json::from_str(raw_json)?;
assert_eq!(s.filters.len(), 1);
Ok(())
}
#[test]
fn author_filter() -> Result<()> {
let raw_json = r#"["REQ","some-id",{"authors": ["test-author-id"]}]"#;
@ -577,4 +622,22 @@ mod tests {
assert!(!s.interested_in_event(&e));
Ok(())
}
#[test]
fn serialize_filter() -> Result<()> {
let s: Subscription = serde_json::from_str(r##"["REQ","xyz",{"authors":["abc", "bcd"], "since": 10, "until": 20, "limit":100, "#e": ["foo", "bar"], "#d": ["test"]}]"##)?;
let f = s.filters.get(0);
let serialized = serde_json::to_string(&f)?;
let serialized_wrapped = format!(r##"["REQ", "xyz",{}]"##, serialized);
let parsed: Subscription = serde_json::from_str(&serialized_wrapped)?;
let parsed_filter = parsed.filters.get(0);
if let Some(pf) = parsed_filter {
assert_eq!(pf.since, Some(10));
assert_eq!(pf.until, Some(20));
assert_eq!(pf.limit, Some(100));
} else {
assert!(false, "filter could not be parsed");
}
Ok(())
}
}