This commit is contained in:
Damien 2025-02-22 09:04:20 -05:00
parent e668c16901
commit 3e6a69ccbf
6 changed files with 1386 additions and 691 deletions

2056
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -9,7 +9,7 @@
"next-build": "next build src-next"
},
"dependencies": {
"@tauri-apps/api": "^2.2.0",
"@tauri-apps/api": "^1.6.0",
"@tauri-apps/plugin-clipboard-manager": "^2.2.1",
"@tauri-apps/plugin-dialog": "^2.2.0",
"@tauri-apps/plugin-fs": "^2.2.0",

View File

@ -1,7 +1,6 @@
import React from 'react';
import Tree from 'components/Tree';
import rawData from '../data.json';
interface TreeNodeData {
name: string;
type: string;
@ -25,6 +24,7 @@ const addFilesToData = (data: any): TreeNodeData[] => {
const data = addFilesToData(rawData);
export default function Index() {
return (
<div style={{ width: '100vw', height: '100vh', backgroundColor: "#222" }}>

View File

@ -1,5 +1,8 @@
import React from 'react';
"use client";
import React, {useEffect} from 'react';
import TreeNode from '../TreeNode';
import { invoke } from '@tauri-apps/api/tauri'
interface TreeNodeData {
name: string;
@ -18,6 +21,13 @@ interface TreeProps {
const Tree: React.FC<TreeProps> = ({ data }) => {
const rootSize = data[0].size;
useEffect(() => {
invoke('load_folders')
.then(() => console.log("Folders loaded"))
.catch(console.error)
}, [])
return (
<div className="tree">
{data.map((node) => (

View File

@ -3,7 +3,7 @@ use std::path::PathBuf;
use serde::Serialize;
#[derive(Serialize, Debug)]
struct FolderData {
pub struct FolderData {
name: String,
path: String,
#[serde(rename = "type")]

View File

@ -7,6 +7,7 @@ mod folder_crawler;
fn main() {
tauri::Builder::default()
.invoke_handler(tauri::generate_handler![load_folders])
.plugin(tauri_plugin_global_shortcut::Builder::new().build())
.plugin(tauri_plugin_http::init())
.plugin(tauri_plugin_os::init())
@ -22,6 +23,8 @@ fn main() {
#[tauri::command]
fn load_folders(){
println!("TEST");
let root = PathBuf::from("C:/Users/Damie/Desktop");
let crawler = folder_crawler::FolderCrawler::new(root);
for folder_data in crawler {