replaced the mock API data with real API integration--Updated the components to consume and render data from the API endpoint
This commit is contained in:
parent
25bf284e39
commit
247ffe10c7
|
|
@ -136,8 +136,8 @@ import { AiFillFolder } from 'react-icons/ai';
|
|||
import { TbTransform } from 'react-icons/tb';
|
||||
import { CustomDatabaseIcon, CustomDocumentIcon, CustomDimensionIcon, CustomProcessIcon, CustomSchemaIcon } from './CustomIcons';
|
||||
|
||||
// Import mock data
|
||||
import mockApiData from './mockData';
|
||||
// Import API data
|
||||
import mockApiData, { useApiData } from './mockData';
|
||||
|
||||
// Import ProcessForm component
|
||||
import ProcessForm from './ProcessForm';
|
||||
|
|
@ -316,12 +316,12 @@ const CustomEdge = ({ id, source, target, sourceX, sourceY, targetX, targetY, so
|
|||
const ProcessNode = ({ data, id }) => {
|
||||
// Function to handle process edit
|
||||
const handleProcessEdit = () => {
|
||||
// Find the process in the mock data
|
||||
const process = mockApiData.processes.find(p => p.slug === id);
|
||||
if (process) {
|
||||
// Find the process in the data
|
||||
// We'll use the data that's passed to the component directly
|
||||
if (data && data.fullProcessData) {
|
||||
// We'll use window.processEditCallback which will be set in the main component
|
||||
if (window.processEditCallback) {
|
||||
window.processEditCallback(process);
|
||||
window.processEditCallback(data.fullProcessData);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
@ -635,10 +635,16 @@ const TableNode = ({ data, id }) => {
|
|||
};
|
||||
|
||||
const DataflowCanvas = () => {
|
||||
// React Flow refs and state
|
||||
const reactFlowWrapper = useRef(null);
|
||||
// const popupRef = useRef(null);
|
||||
const [reactFlowInstance, setReactFlowInstance] = useState(null);
|
||||
const { fitView, setViewport, getViewport } = useReactFlow();
|
||||
|
||||
// Get data from API
|
||||
const { data: apiData, loading, error } = useApiData();
|
||||
|
||||
// All state declarations must be at the top level
|
||||
// State for infinite canvas
|
||||
const [scale, setScale] = useState(1);
|
||||
const [position, setPosition] = useState({ x: 0, y: 0 });
|
||||
|
|
@ -646,6 +652,25 @@ const DataflowCanvas = () => {
|
|||
// State for selected database
|
||||
const [selectedDatabase, setSelectedDatabase] = useState(null);
|
||||
|
||||
// State for connection mode
|
||||
const [isConnectionMode, setIsConnectionMode] = useState(false);
|
||||
const [connectionSource, setConnectionSource] = useState(null);
|
||||
const [connectionType, setConnectionType] = useState('default');
|
||||
|
||||
// State for process form
|
||||
const [showProcessForm, setShowProcessForm] = useState(false);
|
||||
const [selectedProcessForEdit, setSelectedProcessForEdit] = useState(null);
|
||||
|
||||
// State for process details popup
|
||||
const [showProcessPopup, setShowProcessPopup] = useState(false);
|
||||
const [selectedProcess, setSelectedProcess] = useState(null);
|
||||
const [popupPosition, setPopupPosition] = useState({ x: 0, y: 0 });
|
||||
|
||||
// State for table creation popup
|
||||
const [showTablePopup, setShowTablePopup] = useState(false);
|
||||
const [isDragging, setIsDragging] = useState(false);
|
||||
const [dragOffset, setDragOffset] = useState({ x: 0, y: 0 });
|
||||
|
||||
// Read selected database from localStorage on component mount
|
||||
useEffect(() => {
|
||||
try {
|
||||
|
|
@ -660,14 +685,40 @@ const DataflowCanvas = () => {
|
|||
}
|
||||
}, []);
|
||||
|
||||
// State for connection mode
|
||||
const [isConnectionMode, setIsConnectionMode] = useState(false);
|
||||
const [connectionSource, setConnectionSource] = useState(null);
|
||||
const [connectionType, setConnectionType] = useState('default');
|
||||
// Use API data when available
|
||||
useEffect(() => {
|
||||
if (apiData && !loading) {
|
||||
console.log('API data loaded successfully:', apiData);
|
||||
}
|
||||
}, [apiData, loading]);
|
||||
|
||||
// State for process form
|
||||
const [showProcessForm, setShowProcessForm] = useState(false);
|
||||
const [selectedProcessForEdit, setSelectedProcessForEdit] = useState(null);
|
||||
// Create a loading component
|
||||
const LoadingComponent = () => (
|
||||
<div style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', height: '100%' }}>
|
||||
<div>Loading data from API...</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
// Log error but continue with mockApiData as fallback
|
||||
useEffect(() => {
|
||||
if (error) {
|
||||
console.error('Error loading API data:', error);
|
||||
}
|
||||
}, [error]);
|
||||
|
||||
// Create a loading component
|
||||
// const LoadingComponent = () => (
|
||||
// <div style={{ display: 'flex', justifyContent: 'center', alignItems: 'center', height: '100%' }}>
|
||||
// <div>Loading data from API...</div>
|
||||
// </div>
|
||||
// );
|
||||
|
||||
// Log error but continue with mockApiData as fallback
|
||||
useEffect(() => {
|
||||
if (error) {
|
||||
console.error('Error loading API data:', error);
|
||||
}
|
||||
}, [error]);
|
||||
|
||||
// Set up a global callback for process editing
|
||||
// This is used by the ProcessNode component
|
||||
|
|
@ -700,8 +751,12 @@ const DataflowCanvas = () => {
|
|||
// Calculate schema boundaries based on their tables
|
||||
const schemaBoundaries = {};
|
||||
|
||||
// Use API data when available, otherwise fall back to mock data
|
||||
const schemas = apiData?.schemas || mockApiData.schemas;
|
||||
const tables = apiData?.tables || mockApiData.tables;
|
||||
|
||||
// Initialize with default values from schema definitions
|
||||
mockApiData.schemas.forEach(schema => {
|
||||
schemas.forEach(schema => {
|
||||
schemaBoundaries[schema.slug] = {
|
||||
minX: schema.position.x,
|
||||
minY: schema.position.y,
|
||||
|
|
@ -711,7 +766,7 @@ const DataflowCanvas = () => {
|
|||
});
|
||||
|
||||
// Update boundaries based on table positions
|
||||
mockApiData.tables.forEach(table => {
|
||||
tables.forEach(table => {
|
||||
const schemaSlug = table.schema;
|
||||
if (schemaBoundaries[schemaSlug]) {
|
||||
// Add more padding around tables (250px on each side for better spacing)
|
||||
|
|
@ -728,7 +783,7 @@ const DataflowCanvas = () => {
|
|||
});
|
||||
|
||||
// Schema background nodes (add these first so they appear behind other nodes)
|
||||
const schemaBackgroundNodes = mockApiData.schemas.map(schema => {
|
||||
const schemaBackgroundNodes = schemas.map(schema => {
|
||||
const bounds = schemaBoundaries[schema.slug];
|
||||
const width = bounds.maxX - bounds.minX;
|
||||
const height = bounds.maxY - bounds.minY;
|
||||
|
|
@ -761,7 +816,7 @@ const DataflowCanvas = () => {
|
|||
});
|
||||
|
||||
// Table nodes with additional spacing
|
||||
const tableNodes = mockApiData.tables.map(table => ({
|
||||
const tableNodes = tables.map(table => ({
|
||||
id: table.slug,
|
||||
type: 'table',
|
||||
data: {
|
||||
|
|
@ -786,10 +841,14 @@ const DataflowCanvas = () => {
|
|||
|
||||
// Create process nodes
|
||||
const processNodes = useMemo(() => {
|
||||
return mockApiData.processes.map((process, index) => {
|
||||
// Use API data when available, otherwise fall back to mock data
|
||||
const processes = apiData?.processes || mockApiData.processes;
|
||||
const tables = apiData?.tables || mockApiData.tables;
|
||||
|
||||
return processes.map((process, index) => {
|
||||
// Calculate position between source and destination tables
|
||||
const sourceTable = mockApiData.tables.find(t => t.slug === process.source_table[0]);
|
||||
const destTable = mockApiData.tables.find(t => t.slug === process.destination_table[0]);
|
||||
const sourceTable = tables.find(t => t.slug === process.source_table[0]);
|
||||
const destTable = tables.find(t => t.slug === process.destination_table[0]);
|
||||
|
||||
let x = 300;
|
||||
let y = 200;
|
||||
|
|
@ -841,7 +900,8 @@ const DataflowCanvas = () => {
|
|||
description: process.description,
|
||||
type: process.type,
|
||||
status: process.status,
|
||||
processType: process.type // Pass process type to the component
|
||||
processType: process.type, // Pass process type to the component
|
||||
fullProcessData: process // Include the full process data for editing
|
||||
},
|
||||
position: { x, y },
|
||||
parentNode: parentSchema ? `schema-bg-${parentSchema}` : undefined,
|
||||
|
|
@ -862,7 +922,10 @@ const DataflowCanvas = () => {
|
|||
const initialEdges = useMemo(() => {
|
||||
const edges = [];
|
||||
|
||||
mockApiData.processes.forEach(process => {
|
||||
// Use API data when available, otherwise fall back to mock data
|
||||
const processes = apiData?.processes || mockApiData.processes;
|
||||
|
||||
processes.forEach(process => {
|
||||
// Determine if process is active or inactive
|
||||
const isActive = process.status === 'active';
|
||||
|
||||
|
|
@ -1054,8 +1117,9 @@ const DataflowCanvas = () => {
|
|||
const onInit = (instance) => {
|
||||
setReactFlowInstance(instance);
|
||||
|
||||
// Set the viewport from the mock data
|
||||
const { x, y, zoom } = mockApiData.viewportSettings;
|
||||
// Set the viewport from the API data or fallback to mock data
|
||||
const viewportSettings = apiData?.viewportSettings || mockApiData.viewportSettings;
|
||||
const { x, y, zoom } = viewportSettings;
|
||||
instance.setViewport({ x, y, zoom });
|
||||
|
||||
setTimeout(() => {
|
||||
|
|
@ -1063,16 +1127,6 @@ const DataflowCanvas = () => {
|
|||
fitView({ padding: 1.0, maxZoom: 0.4 });
|
||||
}, 500);
|
||||
};
|
||||
|
||||
// State for process details popup
|
||||
const [showProcessPopup, setShowProcessPopup] = useState(false);
|
||||
const [selectedProcess, setSelectedProcess] = useState(null);
|
||||
const [popupPosition, setPopupPosition] = useState({ x: 0, y: 0 });
|
||||
|
||||
// State for table creation popup
|
||||
const [showTablePopup, setShowTablePopup] = useState(false);
|
||||
const [isDragging, setIsDragging] = useState(false);
|
||||
const [dragOffset, setDragOffset] = useState({ x: 0, y: 0 });
|
||||
const popupRef = useRef(null);
|
||||
|
||||
// Drag handlers for the popup
|
||||
|
|
@ -1126,9 +1180,13 @@ const DataflowCanvas = () => {
|
|||
const onNodeClick = (event, node) => {
|
||||
console.log('Node clicked:', node);
|
||||
|
||||
// Use API data when available, otherwise fall back to mock data
|
||||
const tables = apiData?.tables || mockApiData.tables;
|
||||
const processes = apiData?.processes || mockApiData.processes;
|
||||
|
||||
// If it's a table node, show details
|
||||
if (node.type === 'table') {
|
||||
const table = mockApiData.tables.find(t => t.slug === node.id);
|
||||
const table = tables.find(t => t.slug === node.id);
|
||||
if (table) {
|
||||
alert(`Table: ${table.name}\nType: ${table.type}\nColumns: ${table.columns.join(', ')}`);
|
||||
}
|
||||
|
|
@ -1136,16 +1194,17 @@ const DataflowCanvas = () => {
|
|||
|
||||
// If it's a process node, show details in a custom popup
|
||||
if (node.type === 'process') {
|
||||
const process = mockApiData.processes.find(p => p.slug === node.id);
|
||||
// Use the full process data from the node data if available
|
||||
const process = node.data.fullProcessData || processes.find(p => p.slug === node.id);
|
||||
if (process) {
|
||||
// Get source and destination table names
|
||||
const sourceTables = process.source_table.map(slug => {
|
||||
const table = mockApiData.tables.find(t => t.slug === slug);
|
||||
const table = tables.find(t => t.slug === slug);
|
||||
return table ? table.name : slug;
|
||||
});
|
||||
|
||||
const destTables = process.destination_table.map(slug => {
|
||||
const table = mockApiData.tables.find(t => t.slug === slug);
|
||||
const table = tables.find(t => t.slug === slug);
|
||||
return table ? table.name : slug;
|
||||
});
|
||||
|
||||
|
|
@ -1292,7 +1351,8 @@ const DataflowCanvas = () => {
|
|||
setNodes(updatedNodes);
|
||||
|
||||
// Update edges if source or destination tables have changed
|
||||
const oldProcess = mockApiData.processes.find(p => p.slug === selectedProcessForEdit.slug);
|
||||
const processes = apiData?.processes || mockApiData.processes;
|
||||
const oldProcess = processes.find(p => p.slug === selectedProcessForEdit.slug);
|
||||
|
||||
// Remove old edges
|
||||
if (oldProcess) {
|
||||
|
|
@ -1377,11 +1437,21 @@ const DataflowCanvas = () => {
|
|||
|
||||
setEdges(eds => [...eds, ...newEdges]);
|
||||
|
||||
// Update the mock data
|
||||
// Update the data in memory
|
||||
// Note: In a real application, you would make an API call to update the data on the server
|
||||
// For now, we'll just update the mockApiData for consistency
|
||||
const processIndex = mockApiData.processes.findIndex(p => p.slug === processData.slug);
|
||||
if (processIndex !== -1) {
|
||||
mockApiData.processes[processIndex] = processData;
|
||||
}
|
||||
|
||||
// If we have API data, update it too
|
||||
if (apiData && apiData.processes) {
|
||||
const apiProcessIndex = apiData.processes.findIndex(p => p.slug === processData.slug);
|
||||
if (apiProcessIndex !== -1) {
|
||||
apiData.processes[apiProcessIndex] = processData;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Create a new process node
|
||||
const newProcess = {
|
||||
|
|
@ -1470,6 +1540,11 @@ const DataflowCanvas = () => {
|
|||
|
||||
// Add to mock data
|
||||
mockApiData.processes.push(processData);
|
||||
|
||||
// If we have API data, update it too
|
||||
if (apiData && apiData.processes) {
|
||||
apiData.processes.push(processData);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -1505,7 +1580,7 @@ const DataflowCanvas = () => {
|
|||
}
|
||||
|
||||
// Check if the drop position is within any schema
|
||||
const schemas = mockApiData.schemas;
|
||||
const schemas = apiData?.schemas || mockApiData.schemas;
|
||||
let targetSchema = null;
|
||||
|
||||
for (const schema of schemas) {
|
||||
|
|
@ -1567,12 +1642,18 @@ const DataflowCanvas = () => {
|
|||
// Create a style element with our custom styles
|
||||
const customStyles = useMemo(() => generateCustomStyles(), []);
|
||||
|
||||
// Render the component
|
||||
return (
|
||||
<div style={{ width: '100%', height: '100%', background: '#ffffff' }} ref={reactFlowWrapper}>
|
||||
<style>{customStyles}</style>
|
||||
|
||||
{/* Database Header */}
|
||||
{selectedDatabase && (
|
||||
{/* Show loading state if data is still loading */}
|
||||
{loading ? (
|
||||
<LoadingComponent />
|
||||
) : (
|
||||
<>
|
||||
{/* Database Header */}
|
||||
{selectedDatabase && (
|
||||
<div style={{
|
||||
padding: '10px 15px',
|
||||
background: 'linear-gradient(90deg, #00a99d, #52c41a)',
|
||||
|
|
@ -2043,7 +2124,7 @@ const DataflowCanvas = () => {
|
|||
isOpen={showProcessForm}
|
||||
onClose={() => setShowProcessForm(false)}
|
||||
onSave={handleSaveProcess}
|
||||
tables={mockApiData.tables}
|
||||
tables={apiData?.tables || mockApiData.tables}
|
||||
existingProcess={selectedProcessForEdit}
|
||||
/>
|
||||
|
||||
|
|
@ -2054,6 +2135,8 @@ const DataflowCanvas = () => {
|
|||
onCreateTable={handleCreateTable}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,289 +1,154 @@
|
|||
// Mock API data for DataflowCanvas component
|
||||
// API data for DataflowCanvas component
|
||||
import { useState, useEffect } from 'react';
|
||||
|
||||
export const mockApiData = {
|
||||
// Schema definitions
|
||||
schemas: [
|
||||
{
|
||||
name: "Schema_1",
|
||||
slug: "edw_schema",
|
||||
description: "Main enterprise data warehouse schema containing all tables",
|
||||
color: "#1890ff",
|
||||
position: { x: 50, y: 50 },
|
||||
width: 1200, // Increased width
|
||||
height: 700 // Adjusted height
|
||||
},
|
||||
{
|
||||
name: "Schema_2",
|
||||
slug: "analytics_schema",
|
||||
description: "Analytics schema for reporting and business intelligence",
|
||||
color: "#52c41a",
|
||||
position: { x: 1500, y: 50 }, // Increased gap from Schema_1
|
||||
width: 1500, // Further increased width to accommodate all tables
|
||||
height: 700 // Adjusted height
|
||||
}
|
||||
],
|
||||
|
||||
// Tables data with name, slug, and position - organized for left-to-right data flow
|
||||
tables: [
|
||||
// Schema_1 tables - Stage tables (first column)
|
||||
{
|
||||
name: "Customer_Stage",
|
||||
slug: "cst_stg",
|
||||
type: "stage",
|
||||
schema: "edw_schema",
|
||||
orientation: { x: 100, y: 100 },
|
||||
columns: ["customer_id", "customer_name", "email", "address", "phone", "raw_data", "load_date", "source_system"]
|
||||
},
|
||||
{
|
||||
name: "Order_Stage",
|
||||
slug: "ord_stg",
|
||||
type: "stage",
|
||||
schema: "edw_schema",
|
||||
orientation: { x: 100, y: 300 },
|
||||
columns: ["order_id", "customer_id", "order_date", "total_amount", "status", "raw_data", "load_date", "source_system"]
|
||||
},
|
||||
{
|
||||
name: "Product_Stage",
|
||||
slug: "prd_stg",
|
||||
type: "stage",
|
||||
schema: "edw_schema",
|
||||
orientation: { x: 100, y: 500 },
|
||||
columns: ["product_id", "product_name", "category", "price", "inventory", "raw_data", "load_date", "source_system"]
|
||||
},
|
||||
|
||||
// Schema_1 tables - Dimension tables (third column)
|
||||
{
|
||||
name: "Customer_Dim",
|
||||
slug: "uty126",
|
||||
type: "dimension",
|
||||
schema: "edw_schema",
|
||||
orientation: { x: 500, y: 100 },
|
||||
columns: ["customer_id", "customer_name", "email", "address", "phone"]
|
||||
},
|
||||
{
|
||||
name: "Product_Dim",
|
||||
slug: "prd123",
|
||||
type: "dimension",
|
||||
schema: "edw_schema",
|
||||
orientation: { x: 500, y: 500 },
|
||||
columns: ["product_id", "product_name", "category", "price", "inventory"]
|
||||
},
|
||||
{
|
||||
name: "Time_Dim",
|
||||
slug: "tim123",
|
||||
type: "dimension",
|
||||
schema: "edw_schema",
|
||||
orientation: { x: 500, y: 300 },
|
||||
columns: ["date_id", "day", "month", "quarter", "year", "is_holiday"]
|
||||
},
|
||||
|
||||
// Schema_1 tables - Fact table (fifth column)
|
||||
{
|
||||
name: "Order_Fact",
|
||||
slug: "ntz356",
|
||||
type: "fact",
|
||||
schema: "edw_schema",
|
||||
orientation: { x: 900, y: 300 },
|
||||
columns: ["order_id", "customer_id", "order_date", "total_amount", "status"]
|
||||
},
|
||||
|
||||
// Schema_2 tables - organized for left-to-right data flow (similar to Schema_1)
|
||||
{
|
||||
name: "Sales_Stage",
|
||||
slug: "sls_stg",
|
||||
type: "stage",
|
||||
schema: "analytics_schema",
|
||||
orientation: { x: 1600, y: 300 },
|
||||
columns: ["sale_id", "product_id", "customer_id", "quantity", "sale_date", "revenue", "raw_data", "load_date", "source_system"]
|
||||
},
|
||||
|
||||
// Schema_2 tables - Dimension tables (middle column)
|
||||
{
|
||||
name: "Product_Dim",
|
||||
slug: "prd789",
|
||||
type: "dimension",
|
||||
schema: "analytics_schema",
|
||||
orientation: { x: 2000, y: 100 },
|
||||
columns: ["product_id", "product_name", "category", "price", "inventory"]
|
||||
},
|
||||
{
|
||||
name: "Customer_Dim",
|
||||
slug: "cus789",
|
||||
type: "dimension",
|
||||
schema: "analytics_schema",
|
||||
orientation: { x: 2000, y: 300 },
|
||||
columns: ["customer_id", "customer_name", "email", "address", "phone"]
|
||||
},
|
||||
{
|
||||
name: "Time_Dim",
|
||||
slug: "tim567",
|
||||
type: "dimension",
|
||||
schema: "analytics_schema",
|
||||
orientation: { x: 2000, y: 500 },
|
||||
columns: ["date_id", "day", "month", "quarter", "year", "is_holiday"]
|
||||
},
|
||||
|
||||
// Schema_2 tables - Fact table (right column)
|
||||
{
|
||||
name: "Sales_Fact",
|
||||
slug: "sls432",
|
||||
type: "fact",
|
||||
schema: "analytics_schema",
|
||||
orientation: { x: 2400, y: 300 },
|
||||
columns: ["sale_id", "product_id", "customer_id", "quantity", "sale_date", "revenue"]
|
||||
}
|
||||
],
|
||||
|
||||
// Processes that connect tables
|
||||
processes: [
|
||||
// Stage to Dimension/Fact processes
|
||||
{
|
||||
name: "Stage_to_Customer_Dim",
|
||||
slug: "process_stg_cust",
|
||||
source_table: ["cst_stg"],
|
||||
destination_table: ["uty126"],
|
||||
description: "ETL process to load customer data from stage to dimension table with data cleansing and validation",
|
||||
type: "ETL",
|
||||
status: "active",
|
||||
mappings: [
|
||||
{ source: "customer_id", target: "customer_id", type: "direct" },
|
||||
{ source: "customer_name", target: "customer_name", type: "direct" },
|
||||
{ source: "email", target: "email", type: "transform" }
|
||||
]
|
||||
},
|
||||
{
|
||||
name: "Stage_to_Order_Fact",
|
||||
slug: "process_stg_ord",
|
||||
source_table: ["ord_stg"],
|
||||
destination_table: ["ntz356"],
|
||||
description: "ETL process to load order data from stage to fact table with data transformation and aggregation",
|
||||
type: "ETL",
|
||||
status: "active",
|
||||
mappings: [
|
||||
{ source: "order_id", target: "order_id", type: "direct" },
|
||||
{ source: "customer_id", target: "customer_id", type: "direct" },
|
||||
{ source: "order_date", target: "order_date", type: "transform" }
|
||||
]
|
||||
},
|
||||
{
|
||||
name: "Stage_to_Product_Dim",
|
||||
slug: "process_stg_prod",
|
||||
source_table: ["prd_stg"],
|
||||
destination_table: ["prd123"],
|
||||
description: "ETL process to load product data from stage to dimension table with data cleansing and validation",
|
||||
type: "ETL",
|
||||
status: "active",
|
||||
mappings: [
|
||||
{ source: "product_id", target: "product_id", type: "direct" },
|
||||
{ source: "product_name", target: "product_name", type: "direct" },
|
||||
{ source: "category", target: "category", type: "transform" }
|
||||
]
|
||||
},
|
||||
// Schema_1 processes
|
||||
{
|
||||
name: "Customer_Order_Process",
|
||||
slug: "process_1",
|
||||
source_table: ["uty126"],
|
||||
destination_table: ["ntz356"],
|
||||
description: "Links customers to their orders through ETL pipeline that validates customer information and enriches order data with customer attributes",
|
||||
type: "ETL",
|
||||
status: "active",
|
||||
mappings: [
|
||||
{ source: "customer_id", target: "customer_id", type: "direct" },
|
||||
{ source: "customer_name", target: "customer_name", type: "direct" }
|
||||
]
|
||||
},
|
||||
{
|
||||
name: "Product_Order_Process",
|
||||
slug: "process_2",
|
||||
source_table: ["prd123"],
|
||||
destination_table: ["ntz356"],
|
||||
description: "Links products to orders with inventory tracking and product categorization logic",
|
||||
type: "ETL",
|
||||
status: "active",
|
||||
mappings: [
|
||||
{ source: "product_id", target: "product_id", type: "direct" },
|
||||
{ source: "product_name", target: "product_details", type: "transform" }
|
||||
]
|
||||
},
|
||||
{
|
||||
name: "Time_Order_Process",
|
||||
slug: "process_3",
|
||||
source_table: ["tim123"],
|
||||
destination_table: ["ntz356"],
|
||||
description: "Adds time dimension to order data for temporal analysis and reporting",
|
||||
type: "ETL",
|
||||
status: "inactive",
|
||||
mappings: [
|
||||
{ source: "date_id", target: "order_date", type: "transform" },
|
||||
{ source: "is_holiday", target: "is_holiday_order", type: "direct" }
|
||||
]
|
||||
},
|
||||
|
||||
// Schema_2 processes
|
||||
{
|
||||
name: "Stage_to_Sales_Fact",
|
||||
slug: "process_stg_sales",
|
||||
source_table: ["sls_stg"],
|
||||
destination_table: ["sls432"],
|
||||
description: "ETL process to load sales data from stage to fact table with data transformation and aggregation",
|
||||
type: "ETL",
|
||||
status: "active",
|
||||
mappings: [
|
||||
{ source: "sale_id", target: "sale_id", type: "direct" },
|
||||
{ source: "product_id", target: "product_id", type: "direct" },
|
||||
{ source: "customer_id", target: "customer_id", type: "direct" }
|
||||
]
|
||||
},
|
||||
{
|
||||
name: "Product_Sales_Process",
|
||||
slug: "process_4",
|
||||
source_table: ["prd789"],
|
||||
destination_table: ["sls432"],
|
||||
description: "Links products to sales data with inventory tracking and product categorization logic",
|
||||
type: "ETL",
|
||||
status: "active",
|
||||
mappings: [
|
||||
{ source: "product_id", target: "product_id", type: "direct" },
|
||||
{ source: "price", target: "unit_price", type: "transform" }
|
||||
]
|
||||
},
|
||||
{
|
||||
name: "Customer_Sales_Process",
|
||||
slug: "process_5",
|
||||
source_table: ["cus789"],
|
||||
destination_table: ["sls432"],
|
||||
description: "Links customers to their purchases with customer segmentation and purchase history analysis",
|
||||
type: "ETL",
|
||||
status: "active",
|
||||
mappings: [
|
||||
{ source: "customer_id", target: "customer_id", type: "direct" },
|
||||
{ source: "customer_name", target: "buyer_name", type: "transform" }
|
||||
]
|
||||
},
|
||||
{
|
||||
name: "Time_Sales_Process",
|
||||
slug: "process_6",
|
||||
source_table: ["tim567"],
|
||||
destination_table: ["sls432"],
|
||||
description: "Adds time dimension to sales data for temporal analysis and reporting",
|
||||
type: "ETL",
|
||||
status: "active",
|
||||
mappings: [
|
||||
{ source: "date_id", target: "sale_date", type: "transform" },
|
||||
{ source: "quarter", target: "fiscal_quarter", type: "direct" }
|
||||
]
|
||||
}
|
||||
],
|
||||
|
||||
// Default viewport settings - adjusted for better initial view of both schemas
|
||||
viewportSettings: {
|
||||
x: 0,
|
||||
y: 0,
|
||||
zoom: 0.22 // Further reduced zoom to show both schemas completely
|
||||
}
|
||||
// Default viewport settings
|
||||
const defaultViewportSettings = {
|
||||
x: 0,
|
||||
y: 0,
|
||||
zoom: 0.22
|
||||
};
|
||||
|
||||
export default mockApiData;
|
||||
// Function to transform API response to the format expected by the application
|
||||
const transformApiResponse = (apiData) => {
|
||||
if (!apiData || !apiData.items || !apiData.items.database || apiData.items.database.length === 0) {
|
||||
return {
|
||||
schemas: [],
|
||||
tables: [],
|
||||
processes: [],
|
||||
viewportSettings: defaultViewportSettings
|
||||
};
|
||||
}
|
||||
|
||||
// export default mockApiData;
|
||||
const database = apiData.items.database[0];
|
||||
|
||||
// Extract schemas
|
||||
const schemas = database.schemas?.map(schema => ({
|
||||
name: schema.name,
|
||||
slug: schema.slug,
|
||||
description: schema.description || "",
|
||||
color: schema.color || "#1890ff",
|
||||
position: schema.position || { x: 50, y: 50 },
|
||||
width: schema.width || 1200,
|
||||
height: schema.height || 700
|
||||
})) || [];
|
||||
|
||||
// Extract tables
|
||||
const tables = [];
|
||||
database.schemas?.forEach(schema => {
|
||||
if (schema.table && Array.isArray(schema.table)) {
|
||||
schema.table.forEach(table => {
|
||||
tables.push({
|
||||
name: table.name,
|
||||
slug: table.slug,
|
||||
type: table.type || "stage",
|
||||
schema: table.schema,
|
||||
orientation: table.orientation || { x: 100, y: 100 },
|
||||
columns: table.columns || []
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Extract processes
|
||||
const processes = database.process?.map(process => ({
|
||||
name: process.name,
|
||||
slug: process.slug,
|
||||
source_table: process.source_table || [],
|
||||
destination_table: process.destination_table || [],
|
||||
description: process.description || "",
|
||||
type: process.type || "ETL",
|
||||
status: process.status || "active",
|
||||
mappings: process.mappings?.map(mapping => ({
|
||||
source: mapping.source,
|
||||
target: mapping.target,
|
||||
type: mapping.type
|
||||
})) || []
|
||||
})) || [];
|
||||
|
||||
return {
|
||||
schemas,
|
||||
tables,
|
||||
processes,
|
||||
viewportSettings: defaultViewportSettings
|
||||
};
|
||||
};
|
||||
|
||||
// Create a custom hook to fetch and transform the data
|
||||
export const useApiData = () => {
|
||||
const [data, setData] = useState(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState(null);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchData = async () => {
|
||||
try {
|
||||
const response = await fetch('https://sandbox.kezel.io/api/qbt_metadata_list_get', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({}) // Empty payload as requested
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`API request failed with status ${response.status}`);
|
||||
}
|
||||
|
||||
const apiData = await response.json();
|
||||
console.log('API Response Structure:', JSON.stringify(apiData, null, 2));
|
||||
const transformedData = transformApiResponse(apiData);
|
||||
console.log('Transformed Data Structure:', JSON.stringify(transformedData, null, 2));
|
||||
setData(transformedData);
|
||||
setLoading(false);
|
||||
} catch (err) {
|
||||
console.error('Error fetching data:', err);
|
||||
setError(err.message);
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
fetchData();
|
||||
}, []);
|
||||
|
||||
return { data, loading, error };
|
||||
};
|
||||
|
||||
// For backward compatibility, provide a mockApiData object
|
||||
// This will be populated with data from the API when available
|
||||
export const mockApiData = {
|
||||
schemas: [],
|
||||
tables: [],
|
||||
processes: [],
|
||||
viewportSettings: defaultViewportSettings
|
||||
};
|
||||
|
||||
// Fetch data immediately to populate mockApiData
|
||||
fetch('https://sandbox.kezel.io/api/qbt_metadata_list_get', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({}) // Empty payload as requested
|
||||
})
|
||||
.then(response => {
|
||||
if (!response.ok) {
|
||||
throw new Error(`API request failed with status ${response.status}`);
|
||||
}
|
||||
return response.json();
|
||||
})
|
||||
.then(apiData => {
|
||||
const transformedData = transformApiResponse(apiData);
|
||||
|
||||
// Update mockApiData properties
|
||||
mockApiData.schemas = transformedData.schemas;
|
||||
mockApiData.tables = transformedData.tables;
|
||||
mockApiData.processes = transformedData.processes;
|
||||
mockApiData.viewportSettings = transformedData.viewportSettings;
|
||||
|
||||
console.log('API data loaded successfully');
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error fetching API data:', error);
|
||||
});
|
||||
|
||||
export default mockApiData;
|
||||
Loading…
Reference in New Issue