Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

docs(embeddings): finalize embeddings overhaul feature #72

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

5 changes: 3 additions & 2 deletions rig-core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,13 @@ rig-derive = { path = "./rig-core-derive", optional = true }
anyhow = "1.0.75"
tokio = { version = "1.34.0", features = ["full"] }
tracing-subscriber = "0.3.18"
tokio-test = "0.4.4"

[features]
derive = ["dep:rig-derive"]

[[test]]
name = "embeddable_macro"
name = "extract_embedding_fields_macro"
required-features = ["derive"]

[[example]]
Expand All @@ -47,4 +48,4 @@ required-features = ["derive"]

[[example]]
name = "vector_search_cohere"
required-features = ["derive"]
required-features = ["derive"]
7 changes: 1 addition & 6 deletions rig-core/examples/calculator_chatbot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -252,12 +252,7 @@ async fn main() -> Result<(), anyhow::Error> {
.await?;

let index = InMemoryVectorStore::default()
.add_documents(
embeddings
.into_iter()
.map(|(tool, embedding)| (tool.name.clone(), tool, embedding))
.collect(),
)?
.add_documents_with_id(embeddings, |tool| tool.name.clone())?
.index(embedding_model);

// Create RAG agent with a single context prompt and a dynamic tool source
Expand Down
17 changes: 6 additions & 11 deletions rig-core/examples/rag.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,14 @@ use rig::{
embeddings::EmbeddingsBuilder,
providers::openai::{Client, TEXT_EMBEDDING_ADA_002},
vector_store::in_memory_store::InMemoryVectorStore,
Embeddable,
ExtractEmbeddingFields,
};
use serde::Serialize;

// Shape of data that needs to be RAG'ed.
// The definition field will be used to generate embeddings.
#[derive(Embeddable, Clone, Debug, Serialize, Eq, PartialEq, Default)]
// A vector search needs to be performed on the definitions, so we derive the `ExtractEmbeddingFields` trait for `FakeDefinition`
// and tag that field with `#[embed]`.
#[derive(ExtractEmbeddingFields, Serialize, Clone, Debug, Eq, PartialEq, Default)]
struct FakeDefinition {
id: String,
#[embed]
Expand All @@ -26,6 +27,7 @@ async fn main() -> Result<(), anyhow::Error> {

let embedding_model = openai_client.embedding_model(TEXT_EMBEDDING_ADA_002);

// Generate embeddings for the definitions of all the documents using the specified embedding model.
let embeddings = EmbeddingsBuilder::new(embedding_model.clone())
.documents(vec![
FakeDefinition {
Expand Down Expand Up @@ -54,14 +56,7 @@ async fn main() -> Result<(), anyhow::Error> {
.await?;

let index = InMemoryVectorStore::default()
.add_documents(
embeddings
.into_iter()
.map(|(fake_definition, embedding_vec)| {
(fake_definition.id.clone(), fake_definition, embedding_vec)
})
.collect(),
)?
.add_documents_with_id(embeddings, |definition| definition.id.clone())?
.index(embedding_model);

let rag_agent = openai_client.agent("gpt-4")
Expand Down
7 changes: 1 addition & 6 deletions rig-core/examples/rag_dynamic_tools.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,12 +161,7 @@ async fn main() -> Result<(), anyhow::Error> {
.await?;

let index = InMemoryVectorStore::default()
.add_documents(
embeddings
.into_iter()
.map(|(tool, embedding)| (tool.name.clone(), tool, embedding))
.collect(),
)?
.add_documents_with_id(embeddings, |tool| tool.name.clone())?
.index(embedding_model);

// Create RAG agent with a single context prompt and a dynamic tool source
Expand Down
13 changes: 3 additions & 10 deletions rig-core/examples/vector_search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@ use rig::{
embeddings::EmbeddingsBuilder,
providers::openai::{Client, TEXT_EMBEDDING_ADA_002},
vector_store::{in_memory_store::InMemoryVectorStore, VectorStoreIndex},
Embeddable,
ExtractEmbeddingFields,
};
use serde::{Deserialize, Serialize};

// Shape of data that needs to be RAG'ed.
// The definition field will be used to generate embeddings.
#[derive(Embeddable, Clone, Deserialize, Debug, Serialize, Eq, PartialEq, Default)]
#[derive(ExtractEmbeddingFields, Clone, Deserialize, Debug, Serialize, Eq, PartialEq, Default)]
struct FakeDefinition {
id: String,
word: String,
Expand Down Expand Up @@ -57,14 +57,7 @@ async fn main() -> Result<(), anyhow::Error> {
.await?;

let index = InMemoryVectorStore::default()
.add_documents(
embeddings
.into_iter()
.map(|(fake_definition, embedding_vec)| {
(fake_definition.id.clone(), fake_definition, embedding_vec)
})
.collect(),
)?
.add_documents_with_id(embeddings, |definition| definition.id.clone())?
.index(model);

let results = index
Expand Down
13 changes: 3 additions & 10 deletions rig-core/examples/vector_search_cohere.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@ use rig::{
embeddings::EmbeddingsBuilder,
providers::cohere::{Client, EMBED_ENGLISH_V3},
vector_store::{in_memory_store::InMemoryVectorStore, VectorStoreIndex},
Embeddable,
ExtractEmbeddingFields,
};
use serde::{Deserialize, Serialize};

// Shape of data that needs to be RAG'ed.
// The definition field will be used to generate embeddings.
#[derive(Embeddable, Clone, Deserialize, Debug, Serialize, Eq, PartialEq, Default)]
#[derive(ExtractEmbeddingFields, Clone, Deserialize, Debug, Serialize, Eq, PartialEq, Default)]
struct FakeDefinition {
id: String,
word: String,
Expand Down Expand Up @@ -58,14 +58,7 @@ async fn main() -> Result<(), anyhow::Error> {
.await?;

let index = InMemoryVectorStore::default()
.add_documents(
embeddings
.into_iter()
.map(|(fake_definition, embedding_vec)| {
(fake_definition.id.clone(), fake_definition, embedding_vec)
})
.collect(),
)?
.add_documents_with_id(embeddings, |definition| definition.id.clone())?
.index(search_model);

let results = index
Expand Down
4 changes: 2 additions & 2 deletions rig-core/rig-core-derive/src/basic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,11 @@ pub(crate) fn basic_embed_fields(data_struct: &DataStruct) -> impl Iterator<Item
})
}

/// Adds bounds to where clause that force all fields tagged with #[embed] to implement the Embeddable trait.
/// Adds bounds to where clause that force all fields tagged with #[embed] to implement the ExtractEmbeddingFields trait.
pub(crate) fn add_struct_bounds(generics: &mut syn::Generics, field_type: &syn::Type) {
let where_clause = generics.make_where_clause();

where_clause.predicates.push(parse_quote! {
#field_type: Embeddable
#field_type: ExtractEmbeddingFields
});
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ pub(crate) fn expand_derive_embedding(input: &mut syn::DeriveInput) -> syn::Resu
let (custom_targets, custom_target_size) = data_struct.custom()?;

// If there are no fields tagged with #[embed] or #[embed(embed_with = "...")], return an empty TokenStream.
// ie. do not implement Embeddable trait for the struct.
// ie. do not implement `ExtractEmbeddingFields` trait for the struct.
if basic_target_size + custom_target_size == 0 {
return Err(syn::Error::new_spanned(
name,
Expand All @@ -34,26 +34,26 @@ pub(crate) fn expand_derive_embedding(input: &mut syn::DeriveInput) -> syn::Resu
_ => {
return Err(syn::Error::new_spanned(
input,
"Embeddable derive macro should only be used on structs",
"ExtractEmbeddingFields derive macro should only be used on structs",
))
}
};

let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();

let gen = quote! {
// Note: Embeddable trait is imported with the macro.
// Note: `ExtractEmbeddingFields` trait is imported with the macro.

impl #impl_generics Embeddable for #name #ty_generics #where_clause {
type Error = rig::embeddings::embeddable::EmbeddableError;
impl #impl_generics ExtractEmbeddingFields for #name #ty_generics #where_clause {
type Error = rig::embeddings::extract_embedding_fields::ExtractEmbeddingFieldsError;

fn embeddable(&self) -> Result<rig::OneOrMany<String>, Self::Error> {
fn extract_embedding_fields(&self) -> Result<rig::OneOrMany<String>, Self::Error> {
#target_stream;

rig::OneOrMany::merge(
embed_targets.into_iter()
.collect::<Result<Vec<_>, _>>()?
).map_err(rig::embeddings::embeddable::EmbeddableError::new)
).map_err(rig::embeddings::extract_embedding_fields::ExtractEmbeddingFieldsError::new)
}
}
};
Expand Down Expand Up @@ -87,7 +87,7 @@ impl StructParser for DataStruct {
if !embed_targets.is_empty() {
(
quote! {
vec![#(#embed_targets.embeddable()),*]
vec![#(#embed_targets.extract_embedding_fields()),*]
},
embed_targets.len(),
)
Expand Down
6 changes: 3 additions & 3 deletions rig-core/rig-core-derive/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,18 @@ use syn::{parse_macro_input, DeriveInput};

mod basic;
mod custom;
mod embeddable;
mod extract_embedding_fields;

pub(crate) const EMBED: &str = "embed";

// https://doc.rust-lang.org/book/ch19-06-macros.html#how-to-write-a-custom-derive-macro
// https://doc.rust-lang.org/reference/procedural-macros.html

#[proc_macro_derive(Embeddable, attributes(embed))]
#[proc_macro_derive(ExtractEmbeddingFields, attributes(embed))]
pub fn derive_embedding_trait(item: TokenStream) -> TokenStream {
let mut input = parse_macro_input!(item as DeriveInput);

embeddable::expand_derive_embedding(&mut input)
extract_embedding_fields::expand_derive_embedding(&mut input)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
Loading
Loading