Compare commits

..

9 Commits

Author SHA1 Message Date
d431352816 Merge pull request 'main' (#2) from main into master
Reviewed-on: #2
2026-04-08 10:04:43 +00:00
931e2b9408 ci: skip AppImage bundle, build only deb and rpm
All checks were successful
CI / lint-and-build (push) Successful in 9m8s
2026-04-08 12:24:23 +03:00
02ea9db25d ci: set APPIMAGE_EXTRACT_AND_RUN for linuxdeploy in container
Some checks failed
CI / lint-and-build (push) Failing after 9m42s
2026-04-08 12:02:46 +03:00
318210bdd8 ci: add xdg-utils for AppImage bundling
Some checks failed
CI / lint-and-build (push) Failing after 9m42s
2026-04-08 11:47:51 +03:00
11e35fcb5c chore: bump MSRV to 1.80.0 for LazyLock support
Some checks failed
CI / lint-and-build (push) Failing after 9m7s
2026-04-08 11:35:56 +03:00
50214fec0f perf: optimize backend — HTTP client, DB queries, error handling, and config cleanup
Some checks failed
CI / lint-and-build (push) Failing after 2m55s
2026-04-08 10:50:40 +03:00
28aa4ef8cc style: apply rustfmt to docker and snapshot commands
Some checks failed
CI / lint-and-build (push) Failing after 2m51s
2026-04-08 10:38:07 +03:00
ba9b58ff3a ci: replace actions/checkout with manual git clone for act runner
Some checks failed
CI / lint-and-build (push) Failing after 1m12s
The act-based Gitea runner executes JS actions inside the specified
container, but ubuntu:22.04 has no Node.js. Use git clone directly
to avoid the dependency.
2026-04-08 10:23:58 +03:00
33b07a31da ci: add workflow_dispatch trigger to CI workflow
Some checks failed
CI / lint-and-build (push) Failing after 2s
2026-04-08 10:09:30 +03:00
18 changed files with 152 additions and 41 deletions

View File

@@ -5,6 +5,7 @@ on:
branches: [main]
pull_request:
branches: [main]
workflow_dispatch:
jobs:
lint-and-build:
@@ -13,16 +14,19 @@ jobs:
image: ubuntu:22.04
env:
DEBIAN_FRONTEND: noninteractive
APPIMAGE_EXTRACT_AND_RUN: "1"
steps:
- uses: actions/checkout@v4
- name: Install system dependencies
run: |
apt-get update
apt-get install -y \
build-essential curl wget pkg-config \
build-essential curl wget pkg-config git ca-certificates \
libgtk-3-dev libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev \
libssl-dev git ca-certificates
libssl-dev xdg-utils
- name: Checkout
run: |
git clone --depth=1 --branch="${GITHUB_REF_NAME}" "${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}.git" .
- name: Install Node.js 22
run: |
@@ -58,4 +62,4 @@ jobs:
- name: Build Tauri app
run: |
. "$HOME/.cargo/env"
npm run tauri build
npm run tauri build -- --bundles deb,rpm

1
src-tauri/Cargo.lock generated
View File

@@ -4704,7 +4704,6 @@ dependencies = [
"bytes",
"libc",
"mio",
"parking_lot",
"pin-project-lite",
"signal-hook-registry",
"socket2",

View File

@@ -6,7 +6,7 @@ authors = ["you"]
license = ""
repository = ""
edition = "2021"
rust-version = "1.77.2"
rust-version = "1.80.0"
[lib]
name = "tusk_lib"
@@ -21,7 +21,7 @@ tauri-plugin-shell = "2"
tauri-plugin-dialog = "2"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
tokio = { version = "1", features = ["full"] }
tokio = { version = "1", features = ["rt-multi-thread", "sync", "time", "net", "macros", "process", "io-util"] }
sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "postgres", "json", "chrono", "uuid", "bigdecimal"] }
chrono = { version = "0.4", features = ["serde"] }
uuid = { version = "1", features = ["serde"] }

View File

@@ -36,7 +36,7 @@ fn get_ai_settings_path(app: &AppHandle) -> TuskResult<std::path::PathBuf> {
let dir = app
.path()
.app_data_dir()
.map_err(|e| TuskError::Custom(e.to_string()))?;
.map_err(|e| TuskError::Config(e.to_string()))?;
fs::create_dir_all(&dir)?;
Ok(dir.join("ai_settings.json"))
}
@@ -1037,7 +1037,7 @@ fn simplify_default(raw: &str) -> String {
fn validate_select_statement(sql: &str) -> TuskResult<()> {
let sql_upper = sql.trim().to_uppercase();
if !sql_upper.starts_with("SELECT") {
return Err(TuskError::Custom(
return Err(TuskError::Validation(
"Validation query must be a SELECT statement".to_string(),
));
}
@@ -1047,7 +1047,7 @@ fn validate_select_statement(sql: &str) -> TuskResult<()> {
fn validate_index_ddl(ddl: &str) -> TuskResult<()> {
let ddl_upper = ddl.trim().to_uppercase();
if !ddl_upper.starts_with("CREATE INDEX") && !ddl_upper.starts_with("DROP INDEX") {
return Err(TuskError::Custom(
return Err(TuskError::Validation(
"Only CREATE INDEX and DROP INDEX statements are allowed".to_string(),
));
}

View File

@@ -18,7 +18,7 @@ pub(crate) fn get_connections_path(app: &AppHandle) -> TuskResult<std::path::Pat
let dir = app
.path()
.app_data_dir()
.map_err(|e| TuskError::Custom(e.to_string()))?;
.map_err(|e| TuskError::Config(e.to_string()))?;
fs::create_dir_all(&dir)?;
Ok(dir.join("connections.json"))
}

View File

@@ -29,6 +29,7 @@ pub async fn get_table_data(
let mut where_clause = String::new();
if let Some(ref f) = filter {
if !f.trim().is_empty() {
validate_filter(f)?;
where_clause = format!(" WHERE {}", f);
}
}
@@ -285,6 +286,75 @@ pub async fn delete_rows(
Ok(total_affected)
}
/// Rejects filter strings that contain SQL statements capable of mutating data.
/// This blocks writable CTEs and other injection attempts that could bypass
/// SET TRANSACTION READ ONLY (which PostgreSQL does not enforce inside CTEs
/// in all versions).
fn validate_filter(filter: &str) -> TuskResult<()> {
let upper = filter.to_ascii_uppercase();
// Remove string literals to avoid false positives on keywords inside quoted values
let sanitized = remove_string_literals(&upper);
const FORBIDDEN: &[&str] = &[
"INSERT ",
"UPDATE ",
"DELETE ",
"DROP ",
"ALTER ",
"TRUNCATE ",
"CREATE ",
"GRANT ",
"REVOKE ",
"COPY ",
"EXECUTE ",
"CALL ",
];
for kw in FORBIDDEN {
if sanitized.contains(kw) {
return Err(TuskError::Validation(format!(
"Filter contains forbidden SQL keyword: {}",
kw.trim()
)));
}
}
if sanitized.contains("INTO ") && sanitized.contains("SELECT ") {
return Err(TuskError::Validation(
"Filter contains forbidden SELECT INTO clause".into(),
));
}
Ok(())
}
/// Replaces the contents of single-quoted string literals with spaces so that
/// keyword detection does not trigger on values like `status = 'DELETE_PENDING'`.
fn remove_string_literals(s: &str) -> String {
let mut result = String::with_capacity(s.len());
let mut in_quote = false;
let mut chars = s.chars().peekable();
while let Some(ch) = chars.next() {
if ch == '\'' {
if in_quote {
// Check for escaped quote ('')
if chars.peek() == Some(&'\'') {
chars.next();
result.push(' ');
continue;
}
in_quote = false;
result.push('\'');
} else {
in_quote = true;
result.push('\'');
}
} else if in_quote {
result.push(' ');
} else {
result.push(ch);
}
}
result
}
pub(crate) fn bind_json_value<'q>(
query: sqlx::query::Query<'q, sqlx::Postgres, sqlx::postgres::PgArguments>,
value: &'q Value,

View File

@@ -739,7 +739,17 @@ async fn transfer_schema_only(
docker_host: &Option<String>,
) -> TuskResult<()> {
let has_local = try_local_pg_dump().await;
transfer_schema_only_with(app, clone_id, source_url, container_name, database, pg_version, docker_host, has_local).await
transfer_schema_only_with(
app,
clone_id,
source_url,
container_name,
database,
pg_version,
docker_host,
has_local,
)
.await
}
#[allow(clippy::too_many_arguments)]

View File

@@ -13,7 +13,7 @@ pub async fn export_csv(
let mut wtr = csv::Writer::from_writer(file);
wtr.write_record(&columns)
.map_err(|e| TuskError::Custom(e.to_string()))?;
.map_err(|e| TuskError::Export(e.to_string()))?;
for row in &rows {
let record: Vec<String> = row
@@ -27,10 +27,10 @@ pub async fn export_csv(
})
.collect();
wtr.write_record(&record)
.map_err(|e| TuskError::Custom(e.to_string()))?;
.map_err(|e| TuskError::Export(e.to_string()))?;
}
wtr.flush().map_err(|e| TuskError::Custom(e.to_string()))?;
wtr.flush().map_err(|e| TuskError::Export(e.to_string()))?;
Ok(())
}

View File

@@ -7,7 +7,7 @@ fn get_history_path(app: &AppHandle) -> TuskResult<std::path::PathBuf> {
let dir = app
.path()
.app_data_dir()
.map_err(|e| TuskError::Custom(e.to_string()))?;
.map_err(|e| TuskError::Config(e.to_string()))?;
fs::create_dir_all(&dir)?;
Ok(dir.join("query_history.json"))
}

View File

@@ -110,11 +110,8 @@ pub async fn drop_database(
.ok_or(TuskError::NotConnected(connection_id))?;
// Terminate active connections to the target database
let terminate_sql = format!(
"SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = '{}' AND pid <> pg_backend_pid()",
name.replace('\'', "''")
);
sqlx::query(&terminate_sql)
sqlx::query("SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = $1::name AND pid <> pg_backend_pid()")
.bind(&name)
.execute(pool)
.await
.map_err(TuskError::Database)?;

View File

@@ -7,7 +7,7 @@ fn get_saved_queries_path(app: &AppHandle) -> TuskResult<std::path::PathBuf> {
let dir = app
.path()
.app_data_dir()
.map_err(|e| TuskError::Custom(e.to_string()))?;
.map_err(|e| TuskError::Config(e.to_string()))?;
fs::create_dir_all(&dir)?;
Ok(dir.join("saved_queries.json"))
}

View File

@@ -10,7 +10,7 @@ fn get_settings_path(app: &AppHandle) -> TuskResult<std::path::PathBuf> {
let dir = app
.path()
.app_data_dir()
.map_err(|e| TuskError::Custom(e.to_string()))?;
.map_err(|e| TuskError::Config(e.to_string()))?;
fs::create_dir_all(&dir)?;
Ok(dir.join("app_settings.json"))
}
@@ -61,7 +61,7 @@ pub async fn save_app_settings(
let connections_path = app
.path()
.app_data_dir()
.map_err(|e| TuskError::Custom(e.to_string()))?
.map_err(|e| TuskError::Config(e.to_string()))?
.join("connections.json");
let mcp_state = state.inner().clone();

View File

@@ -46,7 +46,10 @@ pub async fn create_snapshot(
if params.include_dependencies {
for fk in &fk_rows {
if target_tables.iter().any(|(s, t)| s == &fk.schema && t == &fk.table) {
if target_tables
.iter()
.any(|(s, t)| s == &fk.schema && t == &fk.table)
{
let parent = (fk.ref_schema.clone(), fk.ref_table.clone());
if !target_tables.contains(&parent) {
target_tables.push(parent);
@@ -322,7 +325,7 @@ pub async fn list_snapshots(app: AppHandle) -> TuskResult<Vec<SnapshotMetadata>>
let dir = app
.path()
.app_data_dir()
.map_err(|e| TuskError::Custom(e.to_string()))?
.map_err(|e| TuskError::Config(e.to_string()))?
.join("snapshots");
if !dir.exists() {

View File

@@ -26,6 +26,15 @@ pub enum TuskError {
#[error("Docker error: {0}")]
Docker(String),
#[error("Configuration error: {0}")]
Config(String),
#[error("Validation error: {0}")]
Validation(String),
#[error("Export error: {0}")]
Export(String),
#[error("{0}")]
Custom(String),
}

View File

@@ -31,6 +31,7 @@ pub struct AppState {
}
const SCHEMA_CACHE_TTL: Duration = Duration::from_secs(300); // 5 minutes
const SCHEMA_CACHE_MAX_SIZE: usize = 100;
impl AppState {
pub fn new() -> Self {
@@ -80,6 +81,16 @@ impl AppState {
let mut cache = self.schema_cache.write().await;
// Evict stale entries to prevent unbounded memory growth
cache.retain(|_, entry| entry.cached_at.elapsed() < SCHEMA_CACHE_TTL);
// If still at capacity, remove the oldest entry
if cache.len() >= SCHEMA_CACHE_MAX_SIZE {
if let Some(oldest_key) = cache
.iter()
.min_by_key(|(_, e)| e.cached_at)
.map(|(k, _)| k.clone())
{
cache.remove(&oldest_key);
}
}
cache.insert(
connection_id,
SchemaCacheEntry {

View File

@@ -1,4 +1,4 @@
use std::collections::{HashMap, HashSet};
use std::collections::{HashMap, HashSet, VecDeque};
pub fn escape_ident(name: &str) -> String {
format!("\"{}\"", name.replace('"', "\"\""))
@@ -44,27 +44,33 @@ pub fn topological_sort_tables(
}
// Kahn's algorithm
let mut queue: Vec<(String, String)> = in_degree
let mut initial: Vec<(String, String)> = in_degree
.iter()
.filter(|(_, &deg)| deg == 0)
.map(|(k, _)| k.clone())
.collect();
queue.sort(); // deterministic order
initial.sort(); // deterministic order
let mut queue: VecDeque<(String, String)> = VecDeque::from(initial);
let mut result = Vec::new();
while let Some(node) = queue.pop() {
while let Some(node) = queue.pop_front() {
result.push(node.clone());
if let Some(neighbors) = graph.get(&node) {
for neighbor in neighbors {
if let Some(deg) = in_degree.get_mut(neighbor) {
*deg -= 1;
if *deg == 0 {
queue.push(neighbor.clone());
queue.sort();
let mut new_ready: Vec<(String, String)> = neighbors
.iter()
.filter(|neighbor| {
if let Some(deg) = in_degree.get_mut(*neighbor) {
*deg -= 1;
*deg == 0
} else {
false
}
}
}
})
.cloned()
.collect();
new_ready.sort();
queue.extend(new_ready);
}
}

View File

@@ -22,7 +22,7 @@
}
],
"security": {
"csp": null
"csp": "default-src 'self'; style-src 'self' 'unsafe-inline'"
}
},
"bundle": {

View File

@@ -103,7 +103,9 @@ export function useReconnect() {
setPgVersion(version);
setDbFlavor(id, flavor);
setCurrentDatabase(database);
queryClient.invalidateQueries();
queryClient.invalidateQueries({ queryKey: ["databases"] });
queryClient.invalidateQueries({ queryKey: ["schemas"] });
queryClient.invalidateQueries({ queryKey: ["completion-schema"] });
},
});
}