feat: add Greenplum 7 compatibility and AI SQL generation
Greenplum 7 (PG12-based) compatibility: - Auto-detect GP via version() string, store DbFlavor per connection - connect returns ConnectResult with version + flavor - Fix pg_total_relation_size to use c.oid (universal, safer on both PG/GP) - Branch is_identity column query for GP (lacks the column) - Branch list_sessions wait_event fields for GP - Exclude gp_toolkit schema in schema listing, completion, lookup, AI context - Smart StatusBar version display: GP shows "GP 7.0.0 (PG 12.4)" - Fix connection list spinner showing on all cards during connect AI SQL generation (Ollama): - Add AI settings, model selection, and generate_sql command - Frontend AI panel with prompt input and SQL output Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
299
src-tauri/src/commands/ai.rs
Normal file
299
src-tauri/src/commands/ai.rs
Normal file
@@ -0,0 +1,299 @@
|
||||
use crate::error::{TuskError, TuskResult};
|
||||
use crate::models::ai::{
|
||||
AiSettings, OllamaChatMessage, OllamaChatRequest, OllamaChatResponse, OllamaModel,
|
||||
OllamaTagsResponse,
|
||||
};
|
||||
use crate::state::AppState;
|
||||
use sqlx::Row;
|
||||
use std::collections::BTreeMap;
|
||||
use std::fs;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use tauri::{AppHandle, Manager, State};
|
||||
|
||||
fn http_client() -> reqwest::Client {
|
||||
reqwest::Client::builder()
|
||||
.connect_timeout(Duration::from_secs(5))
|
||||
.timeout(Duration::from_secs(300))
|
||||
.build()
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn get_ai_settings_path(app: &AppHandle) -> TuskResult<std::path::PathBuf> {
|
||||
let dir = app
|
||||
.path()
|
||||
.app_data_dir()
|
||||
.map_err(|e| TuskError::Custom(e.to_string()))?;
|
||||
fs::create_dir_all(&dir)?;
|
||||
Ok(dir.join("ai_settings.json"))
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_ai_settings(app: AppHandle) -> TuskResult<AiSettings> {
|
||||
let path = get_ai_settings_path(&app)?;
|
||||
if !path.exists() {
|
||||
return Ok(AiSettings::default());
|
||||
}
|
||||
let data = fs::read_to_string(&path)?;
|
||||
let settings: AiSettings = serde_json::from_str(&data)?;
|
||||
Ok(settings)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn save_ai_settings(app: AppHandle, settings: AiSettings) -> TuskResult<()> {
|
||||
let path = get_ai_settings_path(&app)?;
|
||||
let data = serde_json::to_string_pretty(&settings)?;
|
||||
fs::write(&path, data)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn list_ollama_models(ollama_url: String) -> TuskResult<Vec<OllamaModel>> {
|
||||
let url = format!("{}/api/tags", ollama_url.trim_end_matches('/'));
|
||||
let resp = http_client()
|
||||
.get(&url)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| TuskError::Ai(format!("Cannot connect to Ollama at {}: {}", ollama_url, e)))?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let status = resp.status();
|
||||
let body = resp.text().await.unwrap_or_default();
|
||||
return Err(TuskError::Ai(format!(
|
||||
"Ollama error ({}): {}",
|
||||
status, body
|
||||
)));
|
||||
}
|
||||
|
||||
let tags: OllamaTagsResponse = resp
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| TuskError::Ai(format!("Failed to parse Ollama response: {}", e)))?;
|
||||
|
||||
Ok(tags.models)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn generate_sql(
|
||||
app: AppHandle,
|
||||
state: State<'_, Arc<AppState>>,
|
||||
connection_id: String,
|
||||
prompt: String,
|
||||
) -> TuskResult<String> {
|
||||
// Load AI settings
|
||||
let settings = {
|
||||
let path = get_ai_settings_path(&app)?;
|
||||
if !path.exists() {
|
||||
return Err(TuskError::Ai(
|
||||
"No AI model selected. Open AI settings to choose a model.".to_string(),
|
||||
));
|
||||
}
|
||||
let data = fs::read_to_string(&path)?;
|
||||
serde_json::from_str::<AiSettings>(&data)?
|
||||
};
|
||||
|
||||
if settings.model.is_empty() {
|
||||
return Err(TuskError::Ai(
|
||||
"No AI model selected. Open AI settings to choose a model.".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
// Build schema context
|
||||
let schema_text = build_schema_context(&state, &connection_id).await?;
|
||||
|
||||
let system_prompt = format!(
|
||||
"You are a PostgreSQL SQL generator. Given the database schema below and a natural language request, \
|
||||
output ONLY a valid PostgreSQL SQL query. Do not include any explanation, markdown formatting, \
|
||||
or code fences. Output raw SQL only.\n\n\
|
||||
RULES:\n\
|
||||
- Use FK relationships for correct JOIN conditions.\n\
|
||||
- timestamp - timestamp = interval. To get a number use EXTRACT(EPOCH FROM (ts1 - ts2)).\n\
|
||||
- interval cannot be cast to numeric directly.\n\
|
||||
- When using UNION/UNION ALL, ensure matching column types; cast enums to text if they differ.\n\
|
||||
- Use COALESCE for nullable columns in aggregations when appropriate.\n\
|
||||
- Prefer LEFT JOIN when the related row may not exist.\n\n\
|
||||
DATABASE SCHEMA:\n{}",
|
||||
schema_text
|
||||
);
|
||||
|
||||
let request = OllamaChatRequest {
|
||||
model: settings.model,
|
||||
messages: vec![
|
||||
OllamaChatMessage {
|
||||
role: "system".to_string(),
|
||||
content: system_prompt,
|
||||
},
|
||||
OllamaChatMessage {
|
||||
role: "user".to_string(),
|
||||
content: prompt,
|
||||
},
|
||||
],
|
||||
stream: false,
|
||||
};
|
||||
|
||||
let url = format!(
|
||||
"{}/api/chat",
|
||||
settings.ollama_url.trim_end_matches('/')
|
||||
);
|
||||
|
||||
let resp = http_client()
|
||||
.post(&url)
|
||||
.json(&request)
|
||||
.send()
|
||||
.await
|
||||
.map_err(|e| {
|
||||
TuskError::Ai(format!(
|
||||
"Cannot connect to Ollama at {}: {}",
|
||||
settings.ollama_url, e
|
||||
))
|
||||
})?;
|
||||
|
||||
if !resp.status().is_success() {
|
||||
let status = resp.status();
|
||||
let body = resp.text().await.unwrap_or_default();
|
||||
return Err(TuskError::Ai(format!(
|
||||
"Ollama error ({}): {}",
|
||||
status, body
|
||||
)));
|
||||
}
|
||||
|
||||
let chat_resp: OllamaChatResponse = resp
|
||||
.json()
|
||||
.await
|
||||
.map_err(|e| TuskError::Ai(format!("Failed to parse Ollama response: {}", e)))?;
|
||||
|
||||
let sql = clean_sql_response(&chat_resp.message.content);
|
||||
Ok(sql)
|
||||
}
|
||||
|
||||
async fn build_schema_context(
|
||||
state: &AppState,
|
||||
connection_id: &str,
|
||||
) -> TuskResult<String> {
|
||||
let pools = state.pools.read().await;
|
||||
let pool = pools
|
||||
.get(connection_id)
|
||||
.ok_or_else(|| TuskError::NotConnected(connection_id.to_string()))?;
|
||||
|
||||
// Single query: all columns with real type names (enum types show actual name, not USER-DEFINED)
|
||||
let col_rows = sqlx::query(
|
||||
"SELECT \
|
||||
c.table_schema, c.table_name, c.column_name, \
|
||||
CASE WHEN c.data_type = 'USER-DEFINED' THEN c.udt_name ELSE c.data_type END AS data_type, \
|
||||
c.is_nullable = 'NO' AS not_null, \
|
||||
EXISTS( \
|
||||
SELECT 1 FROM information_schema.table_constraints tc \
|
||||
JOIN information_schema.key_column_usage kcu \
|
||||
ON tc.constraint_name = kcu.constraint_name AND tc.table_schema = kcu.table_schema \
|
||||
WHERE tc.constraint_type = 'PRIMARY KEY' \
|
||||
AND tc.table_schema = c.table_schema \
|
||||
AND tc.table_name = c.table_name \
|
||||
AND kcu.column_name = c.column_name \
|
||||
) AS is_pk \
|
||||
FROM information_schema.columns c \
|
||||
WHERE c.table_schema NOT IN ('pg_catalog', 'information_schema', 'pg_toast', 'gp_toolkit') \
|
||||
ORDER BY c.table_schema, c.table_name, c.ordinal_position",
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(TuskError::Database)?;
|
||||
|
||||
// Group columns by schema.table
|
||||
let mut tables: BTreeMap<String, Vec<String>> = BTreeMap::new();
|
||||
for row in &col_rows {
|
||||
let schema: String = row.get(0);
|
||||
let table: String = row.get(1);
|
||||
let col_name: String = row.get(2);
|
||||
let data_type: String = row.get(3);
|
||||
let not_null: bool = row.get(4);
|
||||
let is_pk: bool = row.get(5);
|
||||
|
||||
let mut parts = vec![col_name, data_type];
|
||||
if is_pk {
|
||||
parts.push("PK".to_string());
|
||||
}
|
||||
if not_null {
|
||||
parts.push("NOT NULL".to_string());
|
||||
}
|
||||
|
||||
let key = format!("{}.{}", schema, table);
|
||||
tables.entry(key).or_default().push(parts.join(" "));
|
||||
}
|
||||
|
||||
let mut lines: Vec<String> = tables
|
||||
.into_iter()
|
||||
.map(|(key, cols)| format!("{}({})", key, cols.join(", ")))
|
||||
.collect();
|
||||
|
||||
// Fetch FK relationships
|
||||
let fks = fetch_foreign_keys_from_pool(pool).await?;
|
||||
for fk in &fks {
|
||||
lines.push(fk.clone());
|
||||
}
|
||||
|
||||
Ok(lines.join("\n"))
|
||||
}
|
||||
|
||||
async fn fetch_foreign_keys_from_pool(
|
||||
pool: &sqlx::PgPool,
|
||||
) -> TuskResult<Vec<String>> {
|
||||
let rows = sqlx::query(
|
||||
"SELECT \
|
||||
cn.nspname AS schema_name, cl.relname AS table_name, \
|
||||
array_agg(DISTINCT a.attname ORDER BY a.attname) AS columns, \
|
||||
cnf.nspname AS ref_schema, clf.relname AS ref_table, \
|
||||
array_agg(DISTINCT af.attname ORDER BY af.attname) AS ref_columns \
|
||||
FROM pg_constraint con \
|
||||
JOIN pg_class cl ON con.conrelid = cl.oid \
|
||||
JOIN pg_namespace cn ON cl.relnamespace = cn.oid \
|
||||
JOIN pg_class clf ON con.confrelid = clf.oid \
|
||||
JOIN pg_namespace cnf ON clf.relnamespace = cnf.oid \
|
||||
JOIN pg_attribute a ON a.attrelid = con.conrelid AND a.attnum = ANY(con.conkey) \
|
||||
JOIN pg_attribute af ON af.attrelid = con.confrelid AND af.attnum = ANY(con.confkey) \
|
||||
WHERE con.contype = 'f' \
|
||||
AND cn.nspname NOT IN ('pg_catalog','information_schema','pg_toast','gp_toolkit') \
|
||||
GROUP BY cn.nspname, cl.relname, cnf.nspname, clf.relname, con.oid",
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(TuskError::Database)?;
|
||||
|
||||
let fks: Vec<String> = rows
|
||||
.iter()
|
||||
.map(|r| {
|
||||
let schema: String = r.get(0);
|
||||
let table: String = r.get(1);
|
||||
let cols: Vec<String> = r.get(2);
|
||||
let ref_schema: String = r.get(3);
|
||||
let ref_table: String = r.get(4);
|
||||
let ref_cols: Vec<String> = r.get(5);
|
||||
format!(
|
||||
"FK: {}.{}({}) -> {}.{}({})",
|
||||
schema,
|
||||
table,
|
||||
cols.join(", "),
|
||||
ref_schema,
|
||||
ref_table,
|
||||
ref_cols.join(", ")
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(fks)
|
||||
}
|
||||
|
||||
fn clean_sql_response(raw: &str) -> String {
|
||||
let trimmed = raw.trim();
|
||||
// Remove markdown code fences
|
||||
let without_fences = if trimmed.starts_with("```") {
|
||||
let inner = trimmed
|
||||
.strip_prefix("```sql")
|
||||
.or_else(|| trimmed.strip_prefix("```SQL"))
|
||||
.or_else(|| trimmed.strip_prefix("```"))
|
||||
.unwrap_or(trimmed);
|
||||
inner.strip_suffix("```").unwrap_or(inner)
|
||||
} else {
|
||||
trimmed
|
||||
};
|
||||
without_fences.trim().to_string()
|
||||
}
|
||||
@@ -1,12 +1,19 @@
|
||||
use crate::error::{TuskError, TuskResult};
|
||||
use crate::models::connection::ConnectionConfig;
|
||||
use crate::state::AppState;
|
||||
use crate::state::{AppState, DbFlavor};
|
||||
use serde::Serialize;
|
||||
use sqlx::PgPool;
|
||||
use sqlx::Row;
|
||||
use std::fs;
|
||||
use std::sync::Arc;
|
||||
use tauri::{AppHandle, Manager, State};
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct ConnectResult {
|
||||
pub version: String,
|
||||
pub flavor: DbFlavor,
|
||||
}
|
||||
|
||||
fn get_connections_path(app: &AppHandle) -> TuskResult<std::path::PathBuf> {
|
||||
let dir = app
|
||||
.path()
|
||||
@@ -72,6 +79,9 @@ pub async fn delete_connection(
|
||||
let mut ro = state.read_only.write().await;
|
||||
ro.remove(&id);
|
||||
|
||||
let mut flavors = state.db_flavors.write().await;
|
||||
flavors.remove(&id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -92,7 +102,10 @@ pub async fn test_connection(config: ConnectionConfig) -> TuskResult<String> {
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn connect(state: State<'_, Arc<AppState>>, config: ConnectionConfig) -> TuskResult<()> {
|
||||
pub async fn connect(
|
||||
state: State<'_, Arc<AppState>>,
|
||||
config: ConnectionConfig,
|
||||
) -> TuskResult<ConnectResult> {
|
||||
let pool = PgPool::connect(&config.connection_url())
|
||||
.await
|
||||
.map_err(TuskError::Database)?;
|
||||
@@ -103,13 +116,29 @@ pub async fn connect(state: State<'_, Arc<AppState>>, config: ConnectionConfig)
|
||||
.await
|
||||
.map_err(TuskError::Database)?;
|
||||
|
||||
// Detect database flavor via version()
|
||||
let row = sqlx::query("SELECT version()")
|
||||
.fetch_one(&pool)
|
||||
.await
|
||||
.map_err(TuskError::Database)?;
|
||||
let version: String = row.get(0);
|
||||
|
||||
let flavor = if version.to_lowercase().contains("greenplum") {
|
||||
DbFlavor::Greenplum
|
||||
} else {
|
||||
DbFlavor::PostgreSQL
|
||||
};
|
||||
|
||||
let mut pools = state.pools.write().await;
|
||||
pools.insert(config.id.clone(), pool);
|
||||
|
||||
let mut ro = state.read_only.write().await;
|
||||
ro.insert(config.id.clone(), true);
|
||||
|
||||
Ok(())
|
||||
let mut flavors = state.db_flavors.write().await;
|
||||
flavors.insert(config.id.clone(), flavor);
|
||||
|
||||
Ok(ConnectResult { version, flavor })
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
@@ -149,6 +178,9 @@ pub async fn disconnect(state: State<'_, Arc<AppState>>, id: String) -> TuskResu
|
||||
let mut ro = state.read_only.write().await;
|
||||
ro.remove(&id);
|
||||
|
||||
let mut flavors = state.db_flavors.write().await;
|
||||
flavors.remove(&id);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -170,3 +202,11 @@ pub async fn get_read_only(
|
||||
) -> TuskResult<bool> {
|
||||
Ok(state.is_read_only(&connection_id).await)
|
||||
}
|
||||
|
||||
#[tauri::command]
|
||||
pub async fn get_db_flavor(
|
||||
state: State<'_, Arc<AppState>>,
|
||||
connection_id: String,
|
||||
) -> TuskResult<DbFlavor> {
|
||||
Ok(state.get_flavor(&connection_id).await)
|
||||
}
|
||||
|
||||
@@ -82,7 +82,7 @@ async fn search_database_inner(
|
||||
"SELECT table_schema, table_name, data_type \
|
||||
FROM information_schema.columns \
|
||||
WHERE column_name = $1 \
|
||||
AND table_schema NOT IN ('pg_catalog', 'information_schema', 'pg_toast')",
|
||||
AND table_schema NOT IN ('pg_catalog', 'information_schema', 'pg_toast', 'gp_toolkit')",
|
||||
)
|
||||
.bind(column_name)
|
||||
.fetch_all(pool)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::error::{TuskError, TuskResult};
|
||||
use crate::models::management::*;
|
||||
use crate::state::AppState;
|
||||
use crate::state::{AppState, DbFlavor};
|
||||
use crate::utils::escape_ident;
|
||||
use sqlx::Row;
|
||||
use std::sync::Arc;
|
||||
@@ -514,22 +514,32 @@ pub async fn list_sessions(
|
||||
state: State<'_, Arc<AppState>>,
|
||||
connection_id: String,
|
||||
) -> TuskResult<Vec<SessionInfo>> {
|
||||
let flavor = state.get_flavor(&connection_id).await;
|
||||
let pools = state.pools.read().await;
|
||||
let pool = pools
|
||||
.get(&connection_id)
|
||||
.ok_or(TuskError::NotConnected(connection_id))?;
|
||||
|
||||
let rows = sqlx::query(
|
||||
let sql = if flavor == DbFlavor::Greenplum {
|
||||
"SELECT pid, usename, datname, state, query, \
|
||||
query_start::text, NULL::text as wait_event_type, NULL::text as wait_event, \
|
||||
client_addr::text \
|
||||
FROM pg_stat_activity \
|
||||
WHERE datname IS NOT NULL \
|
||||
ORDER BY query_start DESC NULLS LAST"
|
||||
} else {
|
||||
"SELECT pid, usename, datname, state, query, \
|
||||
query_start::text, wait_event_type, wait_event, \
|
||||
client_addr::text \
|
||||
FROM pg_stat_activity \
|
||||
WHERE datname IS NOT NULL \
|
||||
ORDER BY query_start DESC NULLS LAST",
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(TuskError::Database)?;
|
||||
ORDER BY query_start DESC NULLS LAST"
|
||||
};
|
||||
|
||||
let rows = sqlx::query(sql)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(TuskError::Database)?;
|
||||
|
||||
let sessions = rows
|
||||
.iter()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::error::{TuskError, TuskResult};
|
||||
use crate::models::schema::{ColumnDetail, ColumnInfo, ConstraintInfo, IndexInfo, SchemaObject};
|
||||
use crate::state::AppState;
|
||||
use crate::state::{AppState, DbFlavor};
|
||||
use sqlx::Row;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
@@ -37,14 +37,21 @@ pub async fn list_schemas_core(
|
||||
.get(connection_id)
|
||||
.ok_or_else(|| TuskError::NotConnected(connection_id.to_string()))?;
|
||||
|
||||
let rows = sqlx::query(
|
||||
let flavor = state.get_flavor(connection_id).await;
|
||||
let sql = if flavor == DbFlavor::Greenplum {
|
||||
"SELECT schema_name FROM information_schema.schemata \
|
||||
WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast', 'gp_toolkit') \
|
||||
ORDER BY schema_name"
|
||||
} else {
|
||||
"SELECT schema_name FROM information_schema.schemata \
|
||||
WHERE schema_name NOT IN ('pg_catalog', 'information_schema', 'pg_toast') \
|
||||
ORDER BY schema_name",
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(TuskError::Database)?;
|
||||
ORDER BY schema_name"
|
||||
};
|
||||
|
||||
let rows = sqlx::query(sql)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(TuskError::Database)?;
|
||||
|
||||
Ok(rows.iter().map(|r| r.get::<String, _>(0)).collect())
|
||||
}
|
||||
@@ -70,7 +77,7 @@ pub async fn list_tables_core(
|
||||
let rows = sqlx::query(
|
||||
"SELECT t.table_name, \
|
||||
c.reltuples::bigint as row_count, \
|
||||
pg_total_relation_size(quote_ident(t.table_schema) || '.' || quote_ident(t.table_name))::bigint as size_bytes \
|
||||
pg_total_relation_size(c.oid)::bigint as size_bytes \
|
||||
FROM information_schema.tables t \
|
||||
LEFT JOIN pg_class c ON c.relname = t.table_name \
|
||||
AND c.relnamespace = (SELECT oid FROM pg_namespace WHERE nspname = $1) \
|
||||
@@ -387,20 +394,28 @@ pub async fn get_completion_schema(
|
||||
state: State<'_, Arc<AppState>>,
|
||||
connection_id: String,
|
||||
) -> TuskResult<HashMap<String, HashMap<String, Vec<String>>>> {
|
||||
let flavor = state.get_flavor(&connection_id).await;
|
||||
let pools = state.pools.read().await;
|
||||
let pool = pools
|
||||
.get(&connection_id)
|
||||
.ok_or(TuskError::NotConnected(connection_id))?;
|
||||
|
||||
let rows = sqlx::query(
|
||||
let sql = if flavor == DbFlavor::Greenplum {
|
||||
"SELECT table_schema, table_name, column_name \
|
||||
FROM information_schema.columns \
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema', 'pg_toast', 'gp_toolkit') \
|
||||
ORDER BY table_schema, table_name, ordinal_position"
|
||||
} else {
|
||||
"SELECT table_schema, table_name, column_name \
|
||||
FROM information_schema.columns \
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema', 'pg_toast') \
|
||||
ORDER BY table_schema, table_name, ordinal_position",
|
||||
)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(TuskError::Database)?;
|
||||
ORDER BY table_schema, table_name, ordinal_position"
|
||||
};
|
||||
|
||||
let rows = sqlx::query(sql)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(TuskError::Database)?;
|
||||
|
||||
let mut result: HashMap<String, HashMap<String, Vec<String>>> = HashMap::new();
|
||||
for row in &rows {
|
||||
@@ -426,25 +441,36 @@ pub async fn get_column_details(
|
||||
schema: String,
|
||||
table: String,
|
||||
) -> TuskResult<Vec<ColumnDetail>> {
|
||||
let flavor = state.get_flavor(&connection_id).await;
|
||||
let pools = state.pools.read().await;
|
||||
let pool = pools
|
||||
.get(&connection_id)
|
||||
.ok_or(TuskError::NotConnected(connection_id))?;
|
||||
|
||||
let rows = sqlx::query(
|
||||
let sql = if flavor == DbFlavor::Greenplum {
|
||||
"SELECT c.column_name, c.data_type, \
|
||||
c.is_nullable = 'YES' as is_nullable, \
|
||||
c.column_default, \
|
||||
false as is_identity \
|
||||
FROM information_schema.columns c \
|
||||
WHERE c.table_schema = $1 AND c.table_name = $2 \
|
||||
ORDER BY c.ordinal_position"
|
||||
} else {
|
||||
"SELECT c.column_name, c.data_type, \
|
||||
c.is_nullable = 'YES' as is_nullable, \
|
||||
c.column_default, \
|
||||
c.is_identity = 'YES' as is_identity \
|
||||
FROM information_schema.columns c \
|
||||
WHERE c.table_schema = $1 AND c.table_name = $2 \
|
||||
ORDER BY c.ordinal_position",
|
||||
)
|
||||
.bind(&schema)
|
||||
.bind(&table)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(TuskError::Database)?;
|
||||
ORDER BY c.ordinal_position"
|
||||
};
|
||||
|
||||
let rows = sqlx::query(sql)
|
||||
.bind(&schema)
|
||||
.bind(&table)
|
||||
.fetch_all(pool)
|
||||
.await
|
||||
.map_err(TuskError::Database)?;
|
||||
|
||||
Ok(rows
|
||||
.iter()
|
||||
|
||||
@@ -20,6 +20,9 @@ pub enum TuskError {
|
||||
#[error("Connection is in read-only mode")]
|
||||
ReadOnly,
|
||||
|
||||
#[error("AI error: {0}")]
|
||||
Ai(String),
|
||||
|
||||
#[error("{0}")]
|
||||
Custom(String),
|
||||
}
|
||||
|
||||
@@ -43,6 +43,7 @@ pub fn run() {
|
||||
commands::connections::disconnect,
|
||||
commands::connections::set_read_only,
|
||||
commands::connections::get_read_only,
|
||||
commands::connections::get_db_flavor,
|
||||
// queries
|
||||
commands::queries::execute_query,
|
||||
// schema
|
||||
|
||||
44
src-tauri/src/models/ai.rs
Normal file
44
src-tauri/src/models/ai.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AiSettings {
|
||||
pub ollama_url: String,
|
||||
pub model: String,
|
||||
}
|
||||
|
||||
impl Default for AiSettings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
ollama_url: "http://localhost:11434".to_string(),
|
||||
model: String::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct OllamaChatMessage {
|
||||
pub role: String,
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct OllamaChatRequest {
|
||||
pub model: String,
|
||||
pub messages: Vec<OllamaChatMessage>,
|
||||
pub stream: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct OllamaChatResponse {
|
||||
pub message: OllamaChatMessage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct OllamaTagsResponse {
|
||||
pub models: Vec<OllamaModel>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct OllamaModel {
|
||||
pub name: String,
|
||||
}
|
||||
@@ -1,12 +1,21 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::PgPool;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum DbFlavor {
|
||||
PostgreSQL,
|
||||
Greenplum,
|
||||
}
|
||||
|
||||
pub struct AppState {
|
||||
pub pools: RwLock<HashMap<String, PgPool>>,
|
||||
pub config_path: RwLock<Option<PathBuf>>,
|
||||
pub read_only: RwLock<HashMap<String, bool>>,
|
||||
pub db_flavors: RwLock<HashMap<String, DbFlavor>>,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
@@ -15,6 +24,7 @@ impl AppState {
|
||||
pools: RwLock::new(HashMap::new()),
|
||||
config_path: RwLock::new(None),
|
||||
read_only: RwLock::new(HashMap::new()),
|
||||
db_flavors: RwLock::new(HashMap::new()),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,4 +32,9 @@ impl AppState {
|
||||
let map = self.read_only.read().await;
|
||||
map.get(id).copied().unwrap_or(true)
|
||||
}
|
||||
|
||||
pub async fn get_flavor(&self, id: &str) -> DbFlavor {
|
||||
let map = self.db_flavors.read().await;
|
||||
map.get(id).copied().unwrap_or(DbFlavor::PostgreSQL)
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user