Skip to content

Commit

Permalink
feat: add query metadata route
Browse files Browse the repository at this point in the history
  • Loading branch information
invm committed Oct 14, 2023
1 parent 508bdd5 commit 486788a
Show file tree
Hide file tree
Showing 7 changed files with 68 additions and 77 deletions.
1 change: 1 addition & 0 deletions src-tauri/src/bin/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ fn main() {
queries::get_procedures,
queries::get_triggers,
queries::get_table_structure,
queries::get_query_metadata,
queries::query_results,
])
.run(tauri::generate_context!())
Expand Down
22 changes: 14 additions & 8 deletions src-tauri/src/handlers/queries.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
use std::fs::read_to_string;

use crate::{
queues::query::{QueryTask, QueryTaskEnqueueResult, QueryTaskStatus},
state::{AsyncState, ServiceAccess},
utils::{
crypto::md5_hash,
error::{CommandResult, Error},
error::{CommandResult, Error}, fs::paginate_file,
},
};
use anyhow::anyhow;
use serde::{Deserialize, Serialize};
use serde_json::{Value, json};
use serde_json::{json, Value};
use sqlparser::{dialect::dialect_from_str, parser::Parser};
use tauri::{command, AppHandle, State};
use tracing::info;
Expand Down Expand Up @@ -76,8 +78,7 @@ pub async fn enqueue_query(

#[derive(Serialize, Deserialize)]
pub struct QueryResultParams {
pub conn_id: String,
pub query_hash: String,
pub path: String,
pub page: usize,
pub page_size: usize,
}
Expand All @@ -94,14 +95,19 @@ pub async fn execute_query(
Ok(json!({ "result": result }))
}

#[command]
pub async fn get_query_metadata(_app_handle: AppHandle, path: String) -> CommandResult<Value> {
let file = read_to_string(path + ".metadata").expect("Error reading file");
Ok(Value::from(file))
}

#[command]
pub async fn query_results(
_app_handle: AppHandle,
_params: QueryResultParams,
params: QueryResultParams,
) -> CommandResult<Value> {
// let connection = app_handle.acquire_connection(conn_id);
// let result = connection.execute_query(query).await?;
Ok(Value::Null)
let data = paginate_file(&params.path, params.page, params.page_size);
Ok(Value::from(data))
}

#[command]
Expand Down
2 changes: 1 addition & 1 deletion src-tauri/src/queues/query.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use crate::database::connections::ConnectedConnection;
use crate::utils::init::write_query;
use crate::utils::fs::write_query;
use anyhow::Result;
use serde::Deserialize;
use serde::Serialize;
Expand Down
58 changes: 45 additions & 13 deletions src-tauri/src/utils/fs.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,22 @@
use crate::database::connections::ResultSet;
use anyhow::Result;
use std::{fs, path::Path};
use serde_json::json;
use std::{fs, path::PathBuf};
use tauri::api::dir::with_temp_dir;
use tracing::{debug, error};

pub fn get_tmp_dir() -> Result<String> {
let mut temp_dir = PathBuf::from("/tmp/.noir");
with_temp_dir(|dir| {
temp_dir = PathBuf::from(dir.path());
return ();
})?;
let res = fs::create_dir(temp_dir.clone());
if let Err(res) = res {
error!("Error: {:?}", res);
}
return Ok(temp_dir.to_str().unwrap().to_string());
}

pub fn get_app_path() -> String {
let xdg_path = std::env::var("XDG_CONFIG_HOME");
Expand Down Expand Up @@ -30,20 +47,35 @@ pub fn create_app_config(app_path: &String) -> Result<()> {
Ok(fs::write(config_path, config)?)
}

pub fn paginate_file(path: &Path, page: usize, limit: usize) -> Vec<String> {
pub fn paginate_file(path: &str, page: usize, limit: usize) -> Vec<String> {
let file = fs::read_to_string(path).expect("Error reading file");
let lines = file.lines().skip(page * limit).take(limit);
return lines.into_iter().map(|s| s.to_string()).collect();
}

// fn main() {
// let path = "input.txt";
// let limit = 2;
// let page = 4;
// let lines = paginate_file(Path::new(path), page, limit);
//
// for line in lines {
// println!("{}", line);
// }
// }
//
pub fn write_file(path: &PathBuf, content: &str) -> Result<()> {
// let file_path = temp_dir.join(file_name);
debug!("Writing to file: {:?}", path);
let res = fs::write(&path, content);
if let Err(res) = res {
error!("Error: {:?}", res);
}
Ok(())
}

pub fn write_query(id: &str, result_set: ResultSet) -> Result<String> {
let rows = json!(result_set.rows).to_string();
let metadata = json!({
"rows": result_set.rows.len(),
"affected_rows": result_set.affected_rows,
"warnings": result_set.warnings,
"info": result_set.info,
})
.to_string();
let tmp_dir = get_tmp_dir()?;
let path = tmp_dir.clone() + "/" + id;
let metadata_path = tmp_dir + "/" + id + ".metadata";
write_file(&PathBuf::from(&path), &rows)?;
write_file(&PathBuf::from(&metadata_path), &metadata)?;
Ok(path)
}
50 changes: 3 additions & 47 deletions src-tauri/src/utils/init.rs
Original file line number Diff line number Diff line change
@@ -1,31 +1,14 @@
use std::{fs, path::PathBuf};

use crate::database::{database::create_app_db, connections::ResultSet};
use anyhow::Result;
use serde_json::json;
use tauri::api::dir::with_temp_dir;
use tracing::{debug, error};

use crate::database::database::create_app_db;

use super::{
crypto::create_app_key,
fs::{check_if_app_dir_exists, create_app_config, create_app_dir, get_app_path},
};

pub fn get_tmp_dir() -> Result<String> {
let mut temp_dir = PathBuf::from("/tmp/.noir");
with_temp_dir(|dir| {
temp_dir = PathBuf::from(dir.path());
return ();
})?;
let res = fs::create_dir(temp_dir.clone());
if let Err(res) = res {
error!("Error: {:?}", res);
}
return Ok(temp_dir.to_str().unwrap().to_string());
}

pub fn init_app() -> Result<()> {
// clear this shit
// TODO: clear this shit
let app_path = get_app_path();
if !check_if_app_dir_exists(&app_path) {
create_app_dir(&app_path)?;
Expand All @@ -35,30 +18,3 @@ pub fn init_app() -> Result<()> {
}
Ok(())
}

pub fn write_file(path: &PathBuf, content: &str) -> Result<()> {
// let file_path = temp_dir.join(file_name);
debug!("Writing to file: {:?}", path);
let res = fs::write(&path, content);
if let Err(res) = res {
error!("Error: {:?}", res);
}
Ok(())
}

pub fn write_query(id: &str, result_set: ResultSet) -> Result<String> {
let rows = json!(result_set.rows).to_string();
let metadata = json!({
"rows": result_set.rows.len(),
"affected_rows": result_set.affected_rows,
"warnings": result_set.warnings,
"info": result_set.info,
})
.to_string();
let tmp_dir = get_tmp_dir()?;
let path = tmp_dir.clone() + "/" + id;
let metadata_path = tmp_dir + "/" + id + ".metadata";
write_file(&PathBuf::from(&path), &rows)?;
write_file(&PathBuf::from(&metadata_path), &metadata)?;
Ok(path)
}
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import {
} from "services/Connections";

import { invoke } from "@tauri-apps/api";
import { QueryResult } from "interfaces";
import { ResultSet } from "interfaces";

export const TableColumnsCollapse = (props: {
title: string;
Expand Down Expand Up @@ -43,12 +43,12 @@ export const TableColumnsCollapse = (props: {
const listData = async (table: string) => {
try {
const query = "SELECT * from " + table + " LIMIT 1000";
const { result_sets } = await invoke<QueryResult>("execute_query", {
const res = await invoke<ResultSet>("execute_query", {
connId: getConnection().id,
query,
autoLimit: true,
});
const data = { query, executed: true, result_sets };
const data = { query, executed: true, result_sets: [res] };
addContentTab(newContentTab(table, "Query", data));
} catch (error) {
notify(error);
Expand All @@ -58,7 +58,7 @@ export const TableColumnsCollapse = (props: {
const truncateTable = async (table: string) => {
try {
const query = "TRUNCATE TABLE " + table;
await invoke<QueryResult>("execute_query", {
await invoke<ResultSet>("execute_query", {
connId: getConnection().id,
query,
autoLimit: false,
Expand Down
4 changes: 0 additions & 4 deletions src/interfaces.ts
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,6 @@ export type ResultSet = {
rows: Row[];
};

export type QueryResult = {
result_sets: ResultSet[];
};

const QueryTaskStatus = {
Queued: "Queued",
Progress: "Progress",
Expand Down

0 comments on commit 486788a

Please sign in to comment.