Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 8 additions & 34 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,9 @@ serde = "1.0.219"
serde_json = "1.0"
sqlparser = "0.51"
sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "postgres", "uuid", "time"] }
tracing = "0.1"
tracing-log = "0.1"
tracing-subscriber = "0.3.20"
thiserror = "2.0.12"
tiktoken-rs = "0.7.0"
tokio = { version = "1.0", features = ["full"] }
Expand Down
3 changes: 2 additions & 1 deletion extension/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ clean:
setup.dependencies: install-pg_cron install-pgvector install-pgmq install-vectorscale
setup.shared_preload_libraries:
echo "shared_preload_libraries = 'pg_cron, vectorize'" >> ~/.pgrx/data-${PG_VERSION}/postgresql.conf
echo "cron.database_name = 'postgres'" >> ~/.pgrx/data-${PG_VERSION}/postgresql.conf
setup.urls:
echo "vectorize.embedding_service_url = 'http://localhost:3000/v1'" >> ~/.pgrx/data-${PG_VERSION}/postgresql.conf
echo "vectorize.ollama_service_url = 'http://localhost:3001'" >> ~/.pgrx/data-${PG_VERSION}/postgresql.conf
Expand Down Expand Up @@ -93,7 +94,7 @@ test-integration:
cargo test ${TEST_NAME} -- --ignored --test-threads=1 --nocapture

test-unit:
cargo test ${TEST_NAME} -- --test-threads=1
cargo test ${TEST_NAME} -- --test-threads=1 --nocapture

test-version:
git fetch --tags
Expand Down
34 changes: 0 additions & 34 deletions extension/src/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -176,40 +176,6 @@ fn encode(
Ok(transform(input, &model, api_key).remove(0))
}

#[allow(clippy::too_many_arguments)]
#[deprecated(since = "0.22.0", note = "Please use vectorize.table() instead")]
#[pg_extern]
fn init_rag(
agent_name: &str,
table_name: &str,
unique_record_id: &str,
// column that have data we want to be able to chat with
column: &str,
schema: default!(&str, "'public'"),
index_dist_type: default!(types::IndexDist, "'pgv_hnsw_cosine'"),
// transformer model to use in vector-search
transformer: default!(&str, "'sentence-transformers/all-MiniLM-L6-v2'"),
table_method: default!(types::TableMethod, "'join'"),
schedule: default!(&str, "'* * * * *'"),
) -> Result<String> {
pgrx::warning!("DEPRECATED: vectorize.init_rag() will be removed in a future version. Please use vectorize.table() instead.");
// chat only supports single columns transform
let columns = vec![column.to_string()];
let transformer_model = Model::new(transformer)?;
init_table(
agent_name,
schema,
table_name,
columns,
unique_record_id,
None,
index_dist_type.into(),
&transformer_model,
table_method.into(),
schedule,
)
}

/// creates a table indexed with embeddings for chat completion workloads
#[pg_extern]
fn rag(
Expand Down
2 changes: 1 addition & 1 deletion extension/src/executor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ pub fn batch_texts(
return TableIterator::new(vec![record_ids].into_iter().map(|arr| (arr,)));
}

let num_batches = (total_records + batch_size - 1) / batch_size;
let num_batches = total_records.div_ceil(batch_size);

let mut batches = Vec::with_capacity(num_batches);

Expand Down
1 change: 0 additions & 1 deletion extension/src/guc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,6 @@ pub fn get_guc(guc: VectorizeGuc) -> Option<String> {
}
}

#[allow(dead_code)]
fn handle_cstr(cstr: &CStr) -> Result<String> {
if let Ok(s) = cstr.to_str() {
Ok(s.to_owned())
Expand Down
54 changes: 53 additions & 1 deletion extension/src/search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -429,7 +429,7 @@ pub fn cosine_similarity_search(
num_results,
where_clause,
),
TableMethod::join => query::join_table_cosine_similarity(
TableMethod::join => join_table_cosine_similarity(
Comment thread
ChuckHend marked this conversation as resolved.
project,
&job_params.schema,
&job_params.relation,
Expand All @@ -452,6 +452,52 @@ pub fn cosine_similarity_search(
})
}

pub fn join_table_cosine_similarity(
project: &str,
schema: &str,
table: &str,
join_key: &str,
return_columns: &[String],
num_results: i32,
where_clause: Option<String>,
) -> String {
let cols = &return_columns
.iter()
.map(|s| format!("t0.{s}"))
.collect::<Vec<_>>()
.join(",");
let where_str = if let Some(w) = where_clause {
prepare_filter(&w, join_key)
} else {
"".to_string()
};
let inner_query = format!(
"
SELECT
{join_key},
1 - (embeddings <=> $1::vector) AS similarity_score
FROM vectorize._embeddings_{project}
ORDER BY similarity_score DESC
"
);
format!(
"
SELECT to_jsonb(t) as results
FROM (
SELECT {cols}, t1.similarity_score
FROM
(
{inner_query}
) t1
INNER JOIN {schema}.{table} t0 on t0.{join_key} = t1.{join_key}
{where_str}
) t
ORDER BY t.similarity_score DESC
LIMIT {num_results};
"
)
}

fn single_table_cosine_similarity(
project: &str,
schema: &str,
Expand Down Expand Up @@ -482,3 +528,9 @@ fn single_table_cosine_similarity(
cols = return_columns.join(", "),
)
}

// transform user's where_sql into the format search query expects
fn prepare_filter(filter: &str, pkey: &str) -> String {
let wc = filter.replace(pkey, &format!("t0.{pkey}"));
format!("AND {wc}")
}
2 changes: 0 additions & 2 deletions extension/tests/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ pub mod common {
use sqlx::{Pool, Postgres, Row};
use url::{ParseError, Url};

#[allow(dead_code)]
#[derive(FromRow, Debug, serde::Deserialize)]
pub struct SearchResult {
pub product_id: i32,
Expand All @@ -16,7 +15,6 @@ pub mod common {
pub similarity_score: f64,
}

#[allow(dead_code)]
#[derive(FromRow, Debug, Serialize)]
pub struct SearchJSON {
pub search_results: serde_json::Value,
Expand Down
3 changes: 3 additions & 0 deletions proxy/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,5 +13,8 @@ serde_json = { workspace = true }
sqlx = { workspace = true}
thiserror = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
tracing-subscriber = { workspace = true }
url = { workspace = true }

pgwire = { version = "0.30", features = ["server-api-aws-lc-rs"] }
63 changes: 61 additions & 2 deletions proxy/src/proxy.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,15 @@
use log::{error, info};
use std::collections::HashMap;
use std::net::SocketAddr;
use std::net::ToSocketAddrs;
use std::sync::Arc;
use std::time::Duration;
use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};
use tokio::net::TcpStream;
use tokio::net::{TcpListener, TcpStream};
use tokio::sync::RwLock;
use tokio::time::timeout;
use tracing::{error, info};
use url::Url;
use vectorize_core::types::VectorizeJob;

use super::message_parser::{log_message_processing, try_parse_complete_message};
use super::protocol::{BUFFER_SIZE, ProxyConfig, WireProxyError};
Expand Down Expand Up @@ -129,3 +136,55 @@ where
info!("Standard proxy stream closed: {total_bytes} bytes transferred");
Ok(())
}

pub async fn start_postgres_proxy(
proxy_port: u16,
database_url: String,
job_cache: Arc<RwLock<HashMap<String, VectorizeJob>>>,
db_pool: sqlx::PgPool,
) -> Result<(), Box<dyn std::error::Error>> {
let bind_address = "0.0.0.0";
let timeout = 30;

let listen_addr: SocketAddr = format!("{}:{}", bind_address, proxy_port).parse()?;

let url = Url::parse(&database_url)?;
let postgres_host = url.host_str().unwrap();
let postgres_port = url.port().unwrap();

let postgres_addr: SocketAddr = format!("{postgres_host}:{postgres_port}")
.to_socket_addrs()?
.next()
.ok_or("Failed to resolve PostgreSQL host address")?;

let config = Arc::new(ProxyConfig {
postgres_addr,
timeout: Duration::from_secs(timeout),
jobmap: job_cache,
db_pool,
prepared_statements: Arc::new(RwLock::new(HashMap::new())),
});

info!("Proxy listening on: {listen_addr}");
info!("Forwarding to PostgreSQL at: {postgres_addr}");

let listener = TcpListener::bind(listen_addr).await?;

loop {
match listener.accept().await {
Ok((client_stream, client_addr)) => {
info!("New proxy connection from: {client_addr}");

let config = Arc::clone(&config);
tokio::spawn(async move {
if let Err(e) = handle_connection_with_timeout(client_stream, config).await {
error!("Proxy connection error from {client_addr}: {e}");
}
});
}
Err(e) => {
error!("Failed to accept proxy connection: {e}");
}
}
}
}
6 changes: 3 additions & 3 deletions server/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@ vectorize_core = { package = "vectorize-core", path = "../core" }
vectorize_worker = { package = "vectorize-worker", path = "../worker" }
vectorize_proxy = { package = "vectorize-proxy", path = "../proxy" }
pgmq = { workspace = true }
tracing-subscriber = { workspace = true }
tracing = { workspace = true }
tracing-log = { workspace = true }

actix-cors = "0.7.1"
actix-http = "3.11.0"
Expand All @@ -26,8 +29,6 @@ bytes = "1.10.1"
chrono = {version = "0.4.41", features = ["serde"] }
clap = { version = "4.0", features = ["derive"] }
env = "1.0.1"
tracing = "0.1"
tracing-log = "0.1"
fallible-iterator = "0.3.0"
futures = "0.3.31"
lazy_static = "1.5.0"
Expand All @@ -46,7 +47,6 @@ thiserror = "2.0.12"
tiktoken-rs = "0.7.0"
tokio = { version = "1.0", features = ["full"] }
tokio-postgres = "0.7"
tracing-subscriber = "0.3"
url = "2.2"
utoipa = { version = "4", features = ["actix_extras", "chrono", "uuid"] }
utoipa-swagger-ui = { version = "7", features = ["actix-web"] }
Expand Down
Loading
Loading