Skip to content

Commit

Permalink
fix term date truncation
Browse files Browse the repository at this point in the history
  • Loading branch information
PSeitz committed Jul 30, 2024
1 parent ba33a2a commit b943add
Show file tree
Hide file tree
Showing 9 changed files with 246 additions and 62 deletions.
16 changes: 12 additions & 4 deletions src/core/json_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use rustc_hash::FxHashMap;

use crate::postings::{IndexingContext, IndexingPosition, PostingsWriter};
use crate::schema::document::{ReferenceValue, ReferenceValueLeaf, Value};
use crate::schema::Type;
use crate::schema::{Type, DATE_TIME_PRECISION_INDEXED};
use crate::time::format_description::well_known::Rfc3339;
use crate::time::{OffsetDateTime, UtcOffset};
use crate::tokenizer::TextAnalyzer;
Expand Down Expand Up @@ -189,6 +189,7 @@ pub(crate) fn index_json_value<'a, V: Value<'a>>(
ctx.path_to_unordered_id
.get_or_allocate_unordered_id(json_path_writer.as_str()),
);
let val = val.truncate(DATE_TIME_PRECISION_INDEXED);
term_buffer.append_type_and_fast_value(val);
postings_writer.subscribe(doc, 0u32, term_buffer, ctx);
}
Expand Down Expand Up @@ -239,7 +240,11 @@ pub(crate) fn index_json_value<'a, V: Value<'a>>(
/// Tries to infer a JSON type from a string and append it to the term.
///
/// The term must be json + JSON path.
pub fn convert_to_fast_value_and_append_to_json_term(mut term: Term, phrase: &str) -> Option<Term> {
pub fn convert_to_fast_value_and_append_to_json_term(
mut term: Term,
phrase: &str,
truncate_date_for_search: bool,
) -> Option<Term> {
assert_eq!(
term.value()
.as_json_value_bytes()
Expand All @@ -250,8 +255,11 @@ pub fn convert_to_fast_value_and_append_to_json_term(mut term: Term, phrase: &st
"JSON value bytes should be empty"
);
if let Ok(dt) = OffsetDateTime::parse(phrase, &Rfc3339) {
let dt_utc = dt.to_offset(UtcOffset::UTC);
term.append_type_and_fast_value(DateTime::from_utc(dt_utc));
let mut dt = DateTime::from_utc(dt.to_offset(UtcOffset::UTC));
if truncate_date_for_search {
dt = dt.truncate(DATE_TIME_PRECISION_INDEXED);
}
term.append_type_and_fast_value(dt);
return Some(term);
}
if let Ok(i64_val) = str::parse::<i64>(phrase) {
Expand Down
2 changes: 1 addition & 1 deletion src/indexer/merger.rs
Original file line number Diff line number Diff line change
Expand Up @@ -673,7 +673,7 @@ mod tests {
]
);
assert_eq!(
get_doc_ids(vec![Term::from_field_date(
get_doc_ids(vec![Term::from_field_date_for_search(
date_field,
DateTime::from_utc(curr_time)
)])?,
Expand Down
11 changes: 6 additions & 5 deletions src/indexer/segment_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,9 @@ impl SegmentWriter {
///
/// The arguments are defined as follows
///
/// - memory_budget: most of the segment writer data (terms, and postings lists recorders)
/// is stored in a memory arena. This makes it possible for the user to define
/// the flushing behavior as a memory limit.
/// - memory_budget: most of the segment writer data (terms, and postings lists recorders) is
/// stored in a memory arena. This makes it possible for the user to define the flushing
/// behavior as a memory limit.
/// - segment: The segment being written
/// - schema
pub fn for_segment(memory_budget_in_bytes: usize, segment: Segment) -> crate::Result<Self> {
Expand Down Expand Up @@ -431,7 +431,7 @@ mod tests {
use crate::query::{PhraseQuery, QueryParser};
use crate::schema::{
Document, IndexRecordOption, OwnedValue, Schema, TextFieldIndexing, TextOptions, Value,
STORED, STRING, TEXT,
DATE_TIME_PRECISION_INDEXED, STORED, STRING, TEXT,
};
use crate::store::{Compressor, StoreReader, StoreWriter};
use crate::time::format_description::well_known::Rfc3339;
Expand Down Expand Up @@ -651,7 +651,8 @@ mod tests {
set_fast_val(
DateTime::from_utc(
OffsetDateTime::parse("1985-04-12T23:20:50.52Z", &Rfc3339).unwrap(),
),
)
.truncate(DATE_TIME_PRECISION_INDEXED),
term
)
.serialized_value_bytes()
Expand Down
2 changes: 1 addition & 1 deletion src/query/more_like_this/more_like_this.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ impl MoreLikeThis {
let timestamp = value.as_datetime().ok_or_else(|| {
TantivyError::InvalidArgument("invalid value".to_string())
})?;
let term = Term::from_field_date(field, timestamp);
let term = Term::from_field_date_for_search(field, timestamp);
*term_frequencies.entry(term).or_insert(0) += 1;
}
}
Expand Down
18 changes: 9 additions & 9 deletions src/query/query_parser/query_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ fn trim_ast(logical_ast: LogicalAst) -> Option<LogicalAst> {
/// so-called default fields (as set up in the constructor).
///
/// Assuming that the default fields are `body` and `title`, and the query parser is set with
/// conjunction as a default, our query will be interpreted as.
/// conjunction as a default, our query will be interpreted as.
/// `(body:Barack OR title:Barack) AND (title:Obama OR body:Obama)`.
/// By default, all tokenized and indexed fields are default fields.
///
Expand All @@ -148,8 +148,7 @@ fn trim_ast(logical_ast: LogicalAst) -> Option<LogicalAst> {
/// `body:Barack OR (body:Barack OR text:Obama)` .
///
/// * boolean operators `AND`, `OR`. `AND` takes precedence over `OR`, so that `a AND b OR c` is
/// interpreted
/// as `(a AND b) OR c`.
/// interpreted as `(a AND b) OR c`.
///
/// * In addition to the boolean operators, the `-`, `+` can help define. These operators are
/// sufficient to express all queries using boolean operators. For instance `x AND y OR z` can be
Expand Down Expand Up @@ -272,8 +271,7 @@ impl QueryParser {

/// Creates a `QueryParser`, given
/// * an index
/// * a set of default fields used to search if no field is specifically defined
/// in the query.
/// * a set of default fields used to search if no field is specifically defined in the query.
pub fn for_index(index: &Index, default_fields: Vec<Field>) -> QueryParser {
QueryParser::new(index.schema(), default_fields, index.tokenizers().clone())
}
Expand Down Expand Up @@ -500,6 +498,7 @@ impl QueryParser {
convert_to_fast_value_and_append_to_json_term(
get_term_with_path(),
phrase,
false,
)
{
Ok(term)
Expand Down Expand Up @@ -569,7 +568,7 @@ impl QueryParser {
}
FieldType::Date(_) => {
let dt = OffsetDateTime::parse(phrase, &Rfc3339)?;
let dt_term = Term::from_field_date(field, DateTime::from_utc(dt));
let dt_term = Term::from_field_date_for_search(field, DateTime::from_utc(dt));
Ok(vec![LogicalLiteral::Term(dt_term)])
}
FieldType::Str(ref str_options) => {
Expand Down Expand Up @@ -701,8 +700,8 @@ impl QueryParser {
///
/// The terms are identified by a triplet:
/// - tantivy field
/// - field_path: tantivy has JSON fields. It is possible to target a member of a JSON
/// object by naturally extending the json field name with a "." separated field_path
/// - field_path: tantivy has JSON fields. It is possible to target a member of a JSON object by
/// naturally extending the json field name with a "." separated field_path
/// - field_phrase: the phrase that is being searched.
///
/// The literal identifies the targeted field by a so-called *full field path*,
Expand Down Expand Up @@ -965,7 +964,8 @@ fn generate_literals_for_json_object(
|| Term::from_field_json_path(field, json_path, json_options.is_expand_dots_enabled());

// Try to convert the phrase to a fast value
if let Some(term) = convert_to_fast_value_and_append_to_json_term(get_term_with_path(), phrase)
if let Some(term) =
convert_to_fast_value_and_append_to_json_term(get_term_with_path(), phrase, true)
{
logical_literals.push(LogicalLiteral::Term(term));
}
Expand Down
5 changes: 1 addition & 4 deletions src/query/range_query/range_query.rs
Original file line number Diff line number Diff line change
Expand Up @@ -116,10 +116,7 @@ impl Query for RangeQuery {
let field_type = schema.get_field_entry(self.field()).field_type();

if field_type.is_fast() && is_type_valid_for_fastfield_range_query(self.value_type()) {
Ok(Box::new(FastFieldRangeWeight::new(
self.field(),
self.bounds.clone(),
)))
Ok(Box::new(FastFieldRangeWeight::new(self.bounds.clone())))
} else {
if field_type.is_json() {
return Err(crate::TantivyError::InvalidArgument(
Expand Down
Loading

0 comments on commit b943add

Please sign in to comment.