Core Rust SDK examples demonstrating RuVector's vector database capabilities.
| File | Description |
|---|---|
basic_usage.rs |
Getting started with vector DB operations |
batch_operations.rs |
High-throughput batch ingestion |
rag_pipeline.rs |
Retrieval-Augmented Generation pipeline |
advanced_features.rs |
Hypergraphs, neural hashing, topology |
agenticdb_demo.rs |
AI agent memory with 5 tables |
gnn_example.rs |
Graph Neural Network layer usage |
# Run basic example
cargo run --example basic_usage
# Run with release optimizations
cargo run --release --example advanced_featuresuse ruvector_core::{VectorDB, VectorEntry, DbOptions, Result};
fn main() -> Result<()> {
// Create database
let mut options = DbOptions::default();
options.dimensions = 128;
let db = VectorDB::new(options)?;
// Insert vector
let entry = VectorEntry {
id: Some("doc_001".to_string()),
vector: vec![0.1; 128],
metadata: None,
};
db.insert(entry)?;
// Search
let results = db.search(&vec![0.1; 128], 10)?;
Ok(())
}Multi-entity relationships with weighted edges.
use ruvector_core::advanced::*;
let mut index = HypergraphIndex::new(DistanceMetric::Cosine);
index.add_entity(1, vec![0.9, 0.1, 0.0]);
index.add_entity(2, vec![0.8, 0.2, 0.0]);
let edge = Hyperedge::new(
vec![1, 2],
"Co-cited papers".to_string(),
vec![0.7, 0.2, 0.1],
0.95,
);
index.add_hyperedge(edge)?;Time-aware relationships for event tracking.
let mut temporal = TemporalHypergraph::new(DistanceMetric::Cosine);
temporal.add_entity_at_time(1, vec![0.5; 3], 1000);
temporal.add_entity_at_time(1, vec![0.6; 3], 2000); // Entity evolvesCause-effect relationship chains.
let mut causal = CausalMemory::new(DistanceMetric::Cosine);
let id1 = causal.add_pattern(vec![0.9, 0.1], "initial event")?;
let id2 = causal.add_pattern_with_cause(
vec![0.8, 0.2],
"consequence",
id1, // Caused by id1
0.9 // High confidence
)?;ML-optimized index structure.
let mut learned = LearnedIndex::new(DistanceMetric::Cosine);
learned.set_model_type(ModelType::LinearRegression);
for (i, vec) in vectors.iter().enumerate() {
learned.insert(i, vec.clone())?;
}
learned.train()?; // Train the modelLocality-sensitive hashing.
let neural_hash = NeuralHash::new(128, 64, 8)?;
let hash = neural_hash.hash(&vector)?;
let candidates = neural_hash.query_approximate(&query, 10)?;| Table | Purpose |
|---|---|
reflexion_episodes |
Self-critique memories |
skill_library |
Consolidated patterns |
causal_memory |
Hypergraph relationships |
learning_sessions |
RL training data |
vector_db |
Core embeddings |
use ruvector_core::AgenticDB;
let db = AgenticDB::new(options)?;
// Store reflexion episode
db.store_episode(
"Task description".to_string(),
vec!["Action 1".to_string()],
vec!["Error observed".to_string()],
"What I learned".to_string(),
)?;
// Query similar past experiences
let episodes = db.query_similar_episodes(&embedding, 5)?;use ruvector_gnn::RuvectorLayer;
let gnn = RuvectorLayer::new(128, 256, 4, 0.1);
let node = vec![0.5; 128];
let neighbors = vec![vec![0.3; 128], vec![0.7; 128]];
let weights = vec![0.8, 0.6];
let updated = gnn.forward(&node, &neighbors, &weights);- Batch Operations: Use
insert_batchfor bulk inserts - Dimension: Match embedding dimensions exactly
- Index Type: Choose based on query patterns
- Distance Metric: Cosine for normalized, Euclidean for raw
[dependencies]
ruvector-core = "0.1"
ruvector-gnn = "0.1"