diff --git a/src/components/DataflowCanvas.jsx b/src/components/DataflowCanvas.jsx
index 6c9d5a2..5a7b9f5 100644
--- a/src/components/DataflowCanvas.jsx
+++ b/src/components/DataflowCanvas.jsx
@@ -136,8 +136,8 @@ import { AiFillFolder } from 'react-icons/ai';
import { TbTransform } from 'react-icons/tb';
import { CustomDatabaseIcon, CustomDocumentIcon, CustomDimensionIcon, CustomProcessIcon, CustomSchemaIcon } from './CustomIcons';
-// Import mock data
-import mockApiData from './mockData';
+// Import API data
+import mockApiData, { useApiData } from './mockData';
// Import ProcessForm component
import ProcessForm from './ProcessForm';
@@ -316,12 +316,12 @@ const CustomEdge = ({ id, source, target, sourceX, sourceY, targetX, targetY, so
const ProcessNode = ({ data, id }) => {
// Function to handle process edit
const handleProcessEdit = () => {
- // Find the process in the mock data
- const process = mockApiData.processes.find(p => p.slug === id);
- if (process) {
+ // Find the process in the data
+ // We'll use the data that's passed to the component directly
+ if (data && data.fullProcessData) {
// We'll use window.processEditCallback which will be set in the main component
if (window.processEditCallback) {
- window.processEditCallback(process);
+ window.processEditCallback(data.fullProcessData);
}
}
};
@@ -635,10 +635,16 @@ const TableNode = ({ data, id }) => {
};
const DataflowCanvas = () => {
+ // React Flow refs and state
const reactFlowWrapper = useRef(null);
+ // const popupRef = useRef(null);
const [reactFlowInstance, setReactFlowInstance] = useState(null);
const { fitView, setViewport, getViewport } = useReactFlow();
+ // Get data from API
+ const { data: apiData, loading, error } = useApiData();
+
+ // All state declarations must be at the top level
// State for infinite canvas
const [scale, setScale] = useState(1);
const [position, setPosition] = useState({ x: 0, y: 0 });
@@ -646,6 +652,25 @@ const DataflowCanvas = () => {
// State for selected database
const [selectedDatabase, setSelectedDatabase] = useState(null);
+ // State for connection mode
+ const [isConnectionMode, setIsConnectionMode] = useState(false);
+ const [connectionSource, setConnectionSource] = useState(null);
+ const [connectionType, setConnectionType] = useState('default');
+
+ // State for process form
+ const [showProcessForm, setShowProcessForm] = useState(false);
+ const [selectedProcessForEdit, setSelectedProcessForEdit] = useState(null);
+
+ // State for process details popup
+ const [showProcessPopup, setShowProcessPopup] = useState(false);
+ const [selectedProcess, setSelectedProcess] = useState(null);
+ const [popupPosition, setPopupPosition] = useState({ x: 0, y: 0 });
+
+ // State for table creation popup
+ const [showTablePopup, setShowTablePopup] = useState(false);
+ const [isDragging, setIsDragging] = useState(false);
+ const [dragOffset, setDragOffset] = useState({ x: 0, y: 0 });
+
// Read selected database from localStorage on component mount
useEffect(() => {
try {
@@ -660,14 +685,40 @@ const DataflowCanvas = () => {
}
}, []);
- // State for connection mode
- const [isConnectionMode, setIsConnectionMode] = useState(false);
- const [connectionSource, setConnectionSource] = useState(null);
- const [connectionType, setConnectionType] = useState('default');
+ // Use API data when available
+ useEffect(() => {
+ if (apiData && !loading) {
+ console.log('API data loaded successfully:', apiData);
+ }
+ }, [apiData, loading]);
- // State for process form
- const [showProcessForm, setShowProcessForm] = useState(false);
- const [selectedProcessForEdit, setSelectedProcessForEdit] = useState(null);
+ // Create a loading component
+ const LoadingComponent = () => (
+
- {/* Database Header */}
- {selectedDatabase && (
+ {/* Show loading state if data is still loading */}
+ {loading ? (
+
+ ) : (
+ <>
+ {/* Database Header */}
+ {selectedDatabase && (
{
isOpen={showProcessForm}
onClose={() => setShowProcessForm(false)}
onSave={handleSaveProcess}
- tables={mockApiData.tables}
+ tables={apiData?.tables || mockApiData.tables}
existingProcess={selectedProcessForEdit}
/>
@@ -2054,6 +2135,8 @@ const DataflowCanvas = () => {
onCreateTable={handleCreateTable}
/>
)}
+ >
+ )}
);
};
diff --git a/src/components/mockData.js b/src/components/mockData.js
index 3c14582..6104177 100644
--- a/src/components/mockData.js
+++ b/src/components/mockData.js
@@ -1,289 +1,154 @@
-// Mock API data for DataflowCanvas component
+// API data for DataflowCanvas component
+import { useState, useEffect } from 'react';
-export const mockApiData = {
- // Schema definitions
- schemas: [
- {
- name: "Schema_1",
- slug: "edw_schema",
- description: "Main enterprise data warehouse schema containing all tables",
- color: "#1890ff",
- position: { x: 50, y: 50 },
- width: 1200, // Increased width
- height: 700 // Adjusted height
- },
- {
- name: "Schema_2",
- slug: "analytics_schema",
- description: "Analytics schema for reporting and business intelligence",
- color: "#52c41a",
- position: { x: 1500, y: 50 }, // Increased gap from Schema_1
- width: 1500, // Further increased width to accommodate all tables
- height: 700 // Adjusted height
- }
- ],
-
- // Tables data with name, slug, and position - organized for left-to-right data flow
- tables: [
- // Schema_1 tables - Stage tables (first column)
- {
- name: "Customer_Stage",
- slug: "cst_stg",
- type: "stage",
- schema: "edw_schema",
- orientation: { x: 100, y: 100 },
- columns: ["customer_id", "customer_name", "email", "address", "phone", "raw_data", "load_date", "source_system"]
- },
- {
- name: "Order_Stage",
- slug: "ord_stg",
- type: "stage",
- schema: "edw_schema",
- orientation: { x: 100, y: 300 },
- columns: ["order_id", "customer_id", "order_date", "total_amount", "status", "raw_data", "load_date", "source_system"]
- },
- {
- name: "Product_Stage",
- slug: "prd_stg",
- type: "stage",
- schema: "edw_schema",
- orientation: { x: 100, y: 500 },
- columns: ["product_id", "product_name", "category", "price", "inventory", "raw_data", "load_date", "source_system"]
- },
-
- // Schema_1 tables - Dimension tables (third column)
- {
- name: "Customer_Dim",
- slug: "uty126",
- type: "dimension",
- schema: "edw_schema",
- orientation: { x: 500, y: 100 },
- columns: ["customer_id", "customer_name", "email", "address", "phone"]
- },
- {
- name: "Product_Dim",
- slug: "prd123",
- type: "dimension",
- schema: "edw_schema",
- orientation: { x: 500, y: 500 },
- columns: ["product_id", "product_name", "category", "price", "inventory"]
- },
- {
- name: "Time_Dim",
- slug: "tim123",
- type: "dimension",
- schema: "edw_schema",
- orientation: { x: 500, y: 300 },
- columns: ["date_id", "day", "month", "quarter", "year", "is_holiday"]
- },
-
- // Schema_1 tables - Fact table (fifth column)
- {
- name: "Order_Fact",
- slug: "ntz356",
- type: "fact",
- schema: "edw_schema",
- orientation: { x: 900, y: 300 },
- columns: ["order_id", "customer_id", "order_date", "total_amount", "status"]
- },
-
- // Schema_2 tables - organized for left-to-right data flow (similar to Schema_1)
- {
- name: "Sales_Stage",
- slug: "sls_stg",
- type: "stage",
- schema: "analytics_schema",
- orientation: { x: 1600, y: 300 },
- columns: ["sale_id", "product_id", "customer_id", "quantity", "sale_date", "revenue", "raw_data", "load_date", "source_system"]
- },
-
- // Schema_2 tables - Dimension tables (middle column)
- {
- name: "Product_Dim",
- slug: "prd789",
- type: "dimension",
- schema: "analytics_schema",
- orientation: { x: 2000, y: 100 },
- columns: ["product_id", "product_name", "category", "price", "inventory"]
- },
- {
- name: "Customer_Dim",
- slug: "cus789",
- type: "dimension",
- schema: "analytics_schema",
- orientation: { x: 2000, y: 300 },
- columns: ["customer_id", "customer_name", "email", "address", "phone"]
- },
- {
- name: "Time_Dim",
- slug: "tim567",
- type: "dimension",
- schema: "analytics_schema",
- orientation: { x: 2000, y: 500 },
- columns: ["date_id", "day", "month", "quarter", "year", "is_holiday"]
- },
-
- // Schema_2 tables - Fact table (right column)
- {
- name: "Sales_Fact",
- slug: "sls432",
- type: "fact",
- schema: "analytics_schema",
- orientation: { x: 2400, y: 300 },
- columns: ["sale_id", "product_id", "customer_id", "quantity", "sale_date", "revenue"]
- }
- ],
-
- // Processes that connect tables
- processes: [
- // Stage to Dimension/Fact processes
- {
- name: "Stage_to_Customer_Dim",
- slug: "process_stg_cust",
- source_table: ["cst_stg"],
- destination_table: ["uty126"],
- description: "ETL process to load customer data from stage to dimension table with data cleansing and validation",
- type: "ETL",
- status: "active",
- mappings: [
- { source: "customer_id", target: "customer_id", type: "direct" },
- { source: "customer_name", target: "customer_name", type: "direct" },
- { source: "email", target: "email", type: "transform" }
- ]
- },
- {
- name: "Stage_to_Order_Fact",
- slug: "process_stg_ord",
- source_table: ["ord_stg"],
- destination_table: ["ntz356"],
- description: "ETL process to load order data from stage to fact table with data transformation and aggregation",
- type: "ETL",
- status: "active",
- mappings: [
- { source: "order_id", target: "order_id", type: "direct" },
- { source: "customer_id", target: "customer_id", type: "direct" },
- { source: "order_date", target: "order_date", type: "transform" }
- ]
- },
- {
- name: "Stage_to_Product_Dim",
- slug: "process_stg_prod",
- source_table: ["prd_stg"],
- destination_table: ["prd123"],
- description: "ETL process to load product data from stage to dimension table with data cleansing and validation",
- type: "ETL",
- status: "active",
- mappings: [
- { source: "product_id", target: "product_id", type: "direct" },
- { source: "product_name", target: "product_name", type: "direct" },
- { source: "category", target: "category", type: "transform" }
- ]
- },
- // Schema_1 processes
- {
- name: "Customer_Order_Process",
- slug: "process_1",
- source_table: ["uty126"],
- destination_table: ["ntz356"],
- description: "Links customers to their orders through ETL pipeline that validates customer information and enriches order data with customer attributes",
- type: "ETL",
- status: "active",
- mappings: [
- { source: "customer_id", target: "customer_id", type: "direct" },
- { source: "customer_name", target: "customer_name", type: "direct" }
- ]
- },
- {
- name: "Product_Order_Process",
- slug: "process_2",
- source_table: ["prd123"],
- destination_table: ["ntz356"],
- description: "Links products to orders with inventory tracking and product categorization logic",
- type: "ETL",
- status: "active",
- mappings: [
- { source: "product_id", target: "product_id", type: "direct" },
- { source: "product_name", target: "product_details", type: "transform" }
- ]
- },
- {
- name: "Time_Order_Process",
- slug: "process_3",
- source_table: ["tim123"],
- destination_table: ["ntz356"],
- description: "Adds time dimension to order data for temporal analysis and reporting",
- type: "ETL",
- status: "inactive",
- mappings: [
- { source: "date_id", target: "order_date", type: "transform" },
- { source: "is_holiday", target: "is_holiday_order", type: "direct" }
- ]
- },
-
- // Schema_2 processes
- {
- name: "Stage_to_Sales_Fact",
- slug: "process_stg_sales",
- source_table: ["sls_stg"],
- destination_table: ["sls432"],
- description: "ETL process to load sales data from stage to fact table with data transformation and aggregation",
- type: "ETL",
- status: "active",
- mappings: [
- { source: "sale_id", target: "sale_id", type: "direct" },
- { source: "product_id", target: "product_id", type: "direct" },
- { source: "customer_id", target: "customer_id", type: "direct" }
- ]
- },
- {
- name: "Product_Sales_Process",
- slug: "process_4",
- source_table: ["prd789"],
- destination_table: ["sls432"],
- description: "Links products to sales data with inventory tracking and product categorization logic",
- type: "ETL",
- status: "active",
- mappings: [
- { source: "product_id", target: "product_id", type: "direct" },
- { source: "price", target: "unit_price", type: "transform" }
- ]
- },
- {
- name: "Customer_Sales_Process",
- slug: "process_5",
- source_table: ["cus789"],
- destination_table: ["sls432"],
- description: "Links customers to their purchases with customer segmentation and purchase history analysis",
- type: "ETL",
- status: "active",
- mappings: [
- { source: "customer_id", target: "customer_id", type: "direct" },
- { source: "customer_name", target: "buyer_name", type: "transform" }
- ]
- },
- {
- name: "Time_Sales_Process",
- slug: "process_6",
- source_table: ["tim567"],
- destination_table: ["sls432"],
- description: "Adds time dimension to sales data for temporal analysis and reporting",
- type: "ETL",
- status: "active",
- mappings: [
- { source: "date_id", target: "sale_date", type: "transform" },
- { source: "quarter", target: "fiscal_quarter", type: "direct" }
- ]
- }
- ],
-
- // Default viewport settings - adjusted for better initial view of both schemas
- viewportSettings: {
- x: 0,
- y: 0,
- zoom: 0.22 // Further reduced zoom to show both schemas completely
- }
+// Default viewport settings
+const defaultViewportSettings = {
+ x: 0,
+ y: 0,
+ zoom: 0.22
};
-export default mockApiData;
+// Function to transform API response to the format expected by the application
+const transformApiResponse = (apiData) => {
+ if (!apiData || !apiData.items || !apiData.items.database || apiData.items.database.length === 0) {
+ return {
+ schemas: [],
+ tables: [],
+ processes: [],
+ viewportSettings: defaultViewportSettings
+ };
+ }
-// export default mockApiData;
\ No newline at end of file
+ const database = apiData.items.database[0];
+
+ // Extract schemas
+ const schemas = database.schemas?.map(schema => ({
+ name: schema.name,
+ slug: schema.slug,
+ description: schema.description || "",
+ color: schema.color || "#1890ff",
+ position: schema.position || { x: 50, y: 50 },
+ width: schema.width || 1200,
+ height: schema.height || 700
+ })) || [];
+
+ // Extract tables
+ const tables = [];
+ database.schemas?.forEach(schema => {
+ if (schema.table && Array.isArray(schema.table)) {
+ schema.table.forEach(table => {
+ tables.push({
+ name: table.name,
+ slug: table.slug,
+ type: table.type || "stage",
+ schema: table.schema,
+ orientation: table.orientation || { x: 100, y: 100 },
+ columns: table.columns || []
+ });
+ });
+ }
+ });
+
+ // Extract processes
+ const processes = database.process?.map(process => ({
+ name: process.name,
+ slug: process.slug,
+ source_table: process.source_table || [],
+ destination_table: process.destination_table || [],
+ description: process.description || "",
+ type: process.type || "ETL",
+ status: process.status || "active",
+ mappings: process.mappings?.map(mapping => ({
+ source: mapping.source,
+ target: mapping.target,
+ type: mapping.type
+ })) || []
+ })) || [];
+
+ return {
+ schemas,
+ tables,
+ processes,
+ viewportSettings: defaultViewportSettings
+ };
+};
+
+// Create a custom hook to fetch and transform the data
+export const useApiData = () => {
+ const [data, setData] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+
+ useEffect(() => {
+ const fetchData = async () => {
+ try {
+ const response = await fetch('https://sandbox.kezel.io/api/qbt_metadata_list_get', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify({}) // Empty payload as requested
+ });
+
+ if (!response.ok) {
+ throw new Error(`API request failed with status ${response.status}`);
+ }
+
+ const apiData = await response.json();
+ console.log('API Response Structure:', JSON.stringify(apiData, null, 2));
+ const transformedData = transformApiResponse(apiData);
+ console.log('Transformed Data Structure:', JSON.stringify(transformedData, null, 2));
+ setData(transformedData);
+ setLoading(false);
+ } catch (err) {
+ console.error('Error fetching data:', err);
+ setError(err.message);
+ setLoading(false);
+ }
+ };
+
+ fetchData();
+ }, []);
+
+ return { data, loading, error };
+};
+
+// For backward compatibility, provide a mockApiData object
+// This will be populated with data from the API when available
+export const mockApiData = {
+ schemas: [],
+ tables: [],
+ processes: [],
+ viewportSettings: defaultViewportSettings
+};
+
+// Fetch data immediately to populate mockApiData
+fetch('https://sandbox.kezel.io/api/qbt_metadata_list_get', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify({}) // Empty payload as requested
+})
+.then(response => {
+ if (!response.ok) {
+ throw new Error(`API request failed with status ${response.status}`);
+ }
+ return response.json();
+})
+.then(apiData => {
+ const transformedData = transformApiResponse(apiData);
+
+ // Update mockApiData properties
+ mockApiData.schemas = transformedData.schemas;
+ mockApiData.tables = transformedData.tables;
+ mockApiData.processes = transformedData.processes;
+ mockApiData.viewportSettings = transformedData.viewportSettings;
+
+ console.log('API data loaded successfully');
+})
+.catch(error => {
+ console.error('Error fetching API data:', error);
+});
+
+export default mockApiData;
\ No newline at end of file