diff --git a/src/components/CustomIcons.jsx b/src/components/CustomIcons.jsx
index fa650af..a5740e8 100644
--- a/src/components/CustomIcons.jsx
+++ b/src/components/CustomIcons.jsx
@@ -4,11 +4,11 @@ import React from 'react';
export const CustomSchemaIcon = ({ width = "49", height = "46" }) => (
-
+
-
-
+
+
diff --git a/src/components/DataflowCanvas.jsx b/src/components/DataflowCanvas.jsx
index 8a1d647..9592624 100644
--- a/src/components/DataflowCanvas.jsx
+++ b/src/components/DataflowCanvas.jsx
@@ -679,6 +679,16 @@ const DataflowCanvas = () => {
const parsedData = JSON.parse(dbData);
setSelectedDatabase(parsedData);
console.log('DataFlow view initialized with database:', parsedData);
+
+ // If this is "service 2" database, ensure we're using the correct slug
+ if (parsedData.name === 'service 2' || parsedData.slug === 'my_dwh2') {
+ console.log('Service 2 database detected - ensuring empty schemas');
+
+ // Set the current database slug in mockData.js
+ if (typeof window.setCurrentDbSlug === 'function') {
+ window.setCurrentDbSlug('my_dwh2');
+ }
+ }
}
} catch (error) {
console.error('Error reading database from localStorage:', error);
@@ -755,13 +765,23 @@ const DataflowCanvas = () => {
const schemas = apiData?.schemas || mockApiData.schemas;
const tables = apiData?.tables || mockApiData.tables;
+ console.log('DataflowCanvas - Schemas available:', schemas);
+ console.log('DataflowCanvas - Tables available:', tables);
+
+ // Check if we have any schemas
+ if (schemas.length === 0) {
+ console.log('No schemas available for this database');
+ // Return an empty array if there are no schemas
+ return [];
+ }
+
// Initialize with default values from schema definitions
schemas.forEach(schema => {
schemaBoundaries[schema.slug] = {
- minX: schema.position.x,
- minY: schema.position.y,
- maxX: schema.position.x + schema.width,
- maxY: schema.position.y + schema.height
+ minX: schema.position?.x || 0,
+ minY: schema.position?.y || 0,
+ maxX: (schema.position?.x || 0) + (schema.width || 500),
+ maxY: (schema.position?.y || 0) + (schema.height || 500)
};
});
@@ -1756,7 +1776,10 @@ const DataflowCanvas = () => {
{selectedDatabase.name} Database is Empty
- This database doesn't have any tables or data flows yet. Start by adding tables and processes to visualize your data flow.
+ {selectedDatabase.slug === 'my_dwh2' ?
+ "This database (service 2) doesn't contain any schemas or tables. This is expected behavior." :
+ "This database doesn't have any tables or data flows yet. Start by adding tables and processes to visualize your data flow."
+ }
- {isDbtez ? 'Database' : `${data.schemas} schemas • ${data.tables} tables`}
+
+ {`${data.schemas} schemas • ${data.tables} tables`}
+
+
+
+ Connection: {dbSlug}
{/* View Details Button */}
@@ -1028,14 +1065,16 @@ const DatabaseNode = ({ data }) => {
onClick={(event) => {
// Stop propagation to prevent the node click handler from being triggered
event.stopPropagation();
- // Call the viewDetails function passed in data
- if (data.onViewDetails) {
- data.onViewDetails(data.id, data.label);
+ // Call the viewDetails function passed in data with safety checks
+ if (data && data.onViewDetails && data.id) {
+ data.onViewDetails(data.id, data.name || data.label || 'Unknown Database');
+ } else {
+ console.warn('Cannot view details: missing required data properties');
}
}}
style={{
padding: '4px 8px',
- backgroundColor: isDbtez ? '#00a99d' : '#1890ff',
+ backgroundColor: borderColor,
color: 'white',
border: 'none',
borderRadius: '3px',
@@ -1259,19 +1298,56 @@ const TableNode = ({ data }) => {
// Generate data for InfiniteCanvas using mockApiData from DataflowCanvas
const generateMockData = () => {
- // Create database
- const databases = [
- { id: 'db4', name: 'Dbtez', schemas: mockApiData.schemas.length, tables: mockApiData.tables.length },
- ];
+ // Get unique database slugs from schemas
+ const uniqueDatabases = [...new Set(mockApiData.schemas.map(schema => schema.database))];
+
+ // Create databases from unique database slugs
+ const databases = uniqueDatabases.map((dbSlug, index) => {
+ // Find all schemas for this database
+ const dbSchemas = mockApiData.schemas.filter(schema => schema.database === dbSlug);
+
+ // Find all tables for this database
+ const dbTables = mockApiData.tables.filter(table => table.database === dbSlug);
+
+ // Get the database name from the first schema (if available)
+ const dbName = dbSchemas.length > 0 && dbSchemas[0].databaseName
+ ? dbSchemas[0].databaseName
+ : `Database ${dbSlug}`;
+
+ return {
+ id: `db-${dbSlug}`,
+ name: dbName, // This will be displayed in the UI
+ slug: dbSlug,
+ schemas: dbSchemas.length,
+ tables: dbTables.length
+ };
+ });
+
+ // If no databases were found, add a default one
+ if (databases.length === 0) {
+ databases.push({
+ id: 'db-default',
+ name: 'Default Database',
+ slug: 'default',
+ schemas: mockApiData.schemas.length,
+ tables: mockApiData.tables.length
+ });
+ }
// Create schemas from mockApiData
- const schemas = mockApiData.schemas.map((schema, index) => ({
- id: `schema${index + 10}`,
- dbId: 'db4',
- name: schema.name,
- slug: schema.slug,
- tables: mockApiData.tables.filter(table => table.schema === schema.slug).length
- }));
+ const schemas = mockApiData.schemas.map((schema, index) => {
+ // Find the database for this schema
+ const dbSlug = schema.database || 'default';
+ const db = databases.find(db => db.slug === dbSlug) || databases[0];
+
+ return {
+ id: `schema-${schema.slug}`,
+ dbId: db.id,
+ name: schema.name,
+ slug: schema.slug,
+ tables: mockApiData.tables.filter(table => table.schema === schema.slug).length
+ };
+ });
// Create tables from mockApiData
const tables = mockApiData.tables.map((table, index) => {
@@ -1279,16 +1355,22 @@ const generateMockData = () => {
const schemaSlug = table.schema;
const schemaObj = schemas.find(s => s.slug === schemaSlug);
+ if (!schemaObj) {
+ console.warn(`Schema not found for table ${table.name} (${table.slug})`);
+ return null;
+ }
+
return {
- id: `table${index + 16}`,
+ id: `table-${table.slug}`,
schemaId: schemaObj.id,
name: table.name,
slug: table.slug,
isFact: table.type === 'fact',
type: table.type,
- columns: table.columns
+ columns: table.columns,
+ database: table.database
};
- });
+ }).filter(Boolean); // Remove any null entries
return { databases, schemas, tables };
};
@@ -1333,17 +1415,78 @@ const InfiniteCanvas = () => {
console.error('Error loading databases from localStorage:', error);
}
- // If no saved databases, return default Dbtez database
- return [{
- id: 'db4',
- name: 'Dbtez',
- description: 'Default database for data exploration',
- type: 'PostgreSQL',
- schemas: mockData.schemas.length,
- tables: mockData.tables.length
- }];
+ // If no saved databases, use the databases from our API
+ return mockData.databases.map(db => ({
+ ...db,
+ description: db.description || `Database ${db.name}`,
+ type: db.type || 'PostgreSQL'
+ }));
});
+ // Track if we've already fetched the database data
+ const [hasFetchedDatabases, setHasFetchedDatabases] = useState(false);
+
+ // Fetch real database data from API - only once
+ useEffect(() => {
+ // Skip if we've already fetched the data
+ if (hasFetchedDatabases) {
+ console.log('Skipping database fetch - already loaded');
+ return;
+ }
+
+ const fetchDatabasesFromAPI = async () => {
+ try {
+ // API configuration
+ const API_BASE_URL = 'https://sandbox.kezel.io/api';
+ const token = "abdhsg"; // Replace with your actual token
+ const orgSlug = "sN05Pjv11qvH"; // Replace with your actual org slug
+
+ console.log('Fetching databases from API...');
+
+ const response = await axios.post(
+ `${API_BASE_URL}/qbt_database_list_get`,
+ {
+ token: token,
+ org: orgSlug,
+ },
+ {
+ headers: {
+ 'Content-Type': 'application/json'
+ }
+ }
+ );
+
+ console.log('API Response:', response.data);
+
+ if (response.data && response.data.items && Array.isArray(response.data.items)) {
+ // Map API response to our database format
+ const apiDatabases = response.data.items.map((db, index) => ({
+ id: `db-${db.con || index}`,
+ name: db.name, // Use the name from API
+ slug: db.con,
+ description: db.description || `Database ${db.name}`,
+ type: db.database_type || 'PostgreSQL',
+ schemas: 0, // We'll update these later if needed
+ tables: 0
+ }));
+
+ console.log('Mapped databases from API:', apiDatabases);
+
+ // Update state with API data
+ setDatabases(apiDatabases);
+
+ // Mark that we've fetched the data
+ setHasFetchedDatabases(true);
+ }
+ } catch (error) {
+ console.error('Error fetching databases from API:', error);
+ }
+ };
+
+ // Call the function to fetch databases
+ fetchDatabasesFromAPI();
+ }, [hasFetchedDatabases]); // Only re-run if hasFetchedDatabases changes
+
const [schemas, setSchemas] = useState(() => {
// Try to load schemas from localStorage
try {
@@ -1406,6 +1549,7 @@ const InfiniteCanvas = () => {
type: 'database',
data: {
label: newDatabase.name,
+ name: newDatabase.name, // Include the name explicitly
schemas: newDatabase.schemas,
tables: newDatabase.tables,
expanded: false,
@@ -1746,18 +1890,63 @@ const InfiniteCanvas = () => {
// Function to handle redirection to DataFlow view
const handleViewDataFlow = (dbId, dbName) => {
- // In a real application with proper routing, you would use a router to navigate
- // Since we're using a tab-based navigation in App.jsx, we need to communicate with the parent
+ // Safety check for undefined parameters
+ if (!dbId || !dbName) {
+ console.error('Invalid database ID or name provided to handleViewDataFlow');
+ return;
+ }
+
+ // Find the database object to get the slug
+ const database = databases.find(db => db.id === dbId);
+ if (!database) {
+ console.error(`Database with ID ${dbId} not found`);
+ return;
+ }
+
+ // Get the database slug, with a fallback
+ const dbSlug = database.slug || (dbId.startsWith('db-') ? dbId.substring(3) : 'default');
+
+ console.log(`Viewing data flow for database: ${dbName} (${dbSlug})`);
+
+ // Clear any existing schemas in localStorage for this database
+ try {
+ // Clear any existing schemas for this database
+ localStorage.removeItem(`schemas_${dbSlug}`);
+ localStorage.removeItem(`tables_${dbSlug}`);
+
+ // If this is "service 2" (my_dwh2), set empty schemas
+ if (dbSlug === 'my_dwh2') {
+ console.log('Setting empty schemas for service 2 database');
+ localStorage.setItem(`schemas_${dbSlug}`, JSON.stringify([]));
+ localStorage.setItem(`tables_${dbSlug}`, JSON.stringify([]));
+ }
+ } catch (error) {
+ console.error('Error clearing schemas from localStorage:', error);
+ }
// Store the selected database info in localStorage for the DataFlow component to use
localStorage.setItem('selectedDatabase', JSON.stringify({
id: dbId,
- name: dbName
+ name: dbName,
+ slug: dbSlug, // Include the slug for API calls
+ isEmpty: dbSlug === 'my_dwh2' // Flag to indicate if this database has no schemas
}));
+ // Set the current database slug in mockData.js
+ if (typeof window.setCurrentDbSlug === 'function') {
+ window.setCurrentDbSlug(dbSlug);
+ } else {
+ console.warn('window.setCurrentDbSlug function is not available');
+ }
+
// Trigger an event that App.jsx can listen to
const event = new CustomEvent('viewDataFlow', {
- detail: { databaseId: dbId, databaseName: dbName }
+ detail: {
+ databaseId: dbId,
+ databaseName: dbName,
+ databaseSlug: dbSlug,
+ isEmpty: dbSlug === 'my_dwh2' // Flag to indicate if this database has no schemas
+ }
});
window.dispatchEvent(event);
@@ -1766,24 +1955,87 @@ const InfiniteCanvas = () => {
};
// Initialize with database nodes from state instead of mockData
- const initialNodes = databases.map((db, index) => ({
- id: db.id,
- type: 'database',
- data: {
- label: db.name,
- schemas: db.schemas || 0,
- tables: db.tables || 0,
- expanded: false,
- onToggle: (id) => toggleDatabaseExpansion(id),
- onViewDetails: handleViewDataFlow // Add the function to handle redirection
- },
- position: { x: 250 * index, y: 50 },
- }));
+ const initialNodes = databases
+ .filter(db => db && db.id) // Filter out any invalid database objects
+ .map((db, index) => {
+ // Create a safe ID for the database
+ const safeId = db.id;
+
+ return {
+ id: safeId,
+ type: 'database',
+ data: {
+ id: safeId, // Explicitly include id in data to ensure it's available
+ label: db.name || `Database ${index + 1}`,
+ name: db.name, // Include the name from the API response
+ schemas: db.schemas || 0,
+ tables: db.tables || 0,
+ expanded: false,
+ onToggle: (id) => toggleDatabaseExpansion(id),
+ onViewDetails: handleViewDataFlow // Add the function to handle redirection
+ },
+ // Position databases in a grid layout for better visibility
+ position: {
+ x: 250 + (index % 2) * 400, // 2 columns
+ y: 100 + Math.floor(index / 2) * 250 // rows based on index
+ },
+ };
+ });
const initialEdges = [];
const [nodes, setNodes, onNodesChange] = useNodesState(initialNodes);
const [edges, setEdges, onEdgesChange] = useEdgesState(initialEdges);
+
+ // Update nodes when databases change (e.g., after API fetch)
+ useEffect(() => {
+ console.log('Databases updated, updating nodes:', databases);
+
+ const updatedNodes = databases
+ .filter(db => db && db.id)
+ .map((db, index) => {
+ const existingNode = nodes.find(node => node.id === db.id);
+
+ // If node already exists, update its data
+ if (existingNode) {
+ return {
+ ...existingNode,
+ data: {
+ ...existingNode.data,
+ label: db.name || `Database ${index + 1}`,
+ name: db.name, // Make sure name is set from the API response
+ schemas: db.schemas || 0,
+ tables: db.tables || 0,
+ }
+ };
+ }
+
+ // Otherwise create a new node
+ return {
+ id: db.id,
+ type: 'database',
+ data: {
+ id: db.id,
+ label: db.name || `Database ${index + 1}`,
+ name: db.name, // Include the name from the API response
+ schemas: db.schemas || 0,
+ tables: db.tables || 0,
+ expanded: false,
+ onToggle: (id) => toggleDatabaseExpansion(id),
+ onViewDetails: handleViewDataFlow
+ },
+ position: {
+ x: 250 + (index % 2) * 400,
+ y: 100 + Math.floor(index / 2) * 250
+ },
+ };
+ });
+
+ // Only update if we have nodes to show
+ if (updatedNodes.length > 0) {
+ setNodes(updatedNodes);
+ }
+ }, [databases]);
// Track viewport changes using the onMove callback instead of event listeners
const onMove = useCallback((event, viewport) => {
@@ -1837,23 +2089,37 @@ const InfiniteCanvas = () => {
const onInit = (instance) => {
setReactFlowInstance(instance);
- // Auto-expand Dbtez database and its schemas on load
+ // Auto-expand the first database and its schemas on load
setTimeout(() => {
- // Expand the Dbtez database
- toggleDatabaseExpansion('db4');
-
- // After expanding Dbtez, also expand all its schemas
- setTimeout(() => {
- const schemas = mockData.schemas.filter(schema => schema.dbId === 'db4');
- schemas.forEach(schema => {
- toggleSchemaExpansion(schema.id);
- });
-
- // Fit view after all expansions
- setTimeout(() => {
- fitView({ padding: 0.5, maxZoom: 0.7 });
- }, 100);
- }, 100);
+ // Find the first database in our list
+ if (databases.length > 0) {
+ const firstDb = databases[0];
+ if (firstDb && firstDb.id) {
+ console.log(`Auto-expanding first database: ${firstDb.id}`);
+
+ // Expand the first database
+ toggleDatabaseExpansion(firstDb.id);
+
+ // After expanding the database, also expand all its schemas
+ setTimeout(() => {
+ const schemas = mockData.schemas.filter(schema => schema && schema.dbId === firstDb.id);
+ schemas.forEach(schema => {
+ if (schema && schema.id) {
+ toggleSchemaExpansion(schema.id);
+ }
+ });
+
+ // Fit view after all expansions
+ setTimeout(() => {
+ try {
+ fitView({ padding: 0.5, maxZoom: 0.7 });
+ } catch (error) {
+ console.error('Error calling fitView:', error);
+ }
+ }, 100);
+ }, 100);
+ }
+ }
}, 100);
};
@@ -1884,6 +2150,12 @@ const InfiniteCanvas = () => {
// Toggle database expansion to show/hide schemas
const toggleDatabaseExpansion = (dbId) => {
+ // Safety check for undefined or invalid dbId
+ if (!dbId) {
+ console.error('Invalid database ID provided to toggleDatabaseExpansion');
+ return;
+ }
+
const isExpanded = expandedDatabases[dbId];
// Update the database node to show the correct toggle state
@@ -1903,15 +2175,25 @@ const InfiniteCanvas = () => {
if (isExpanded) {
// Collapse: remove all schemas and tables for this database
- setNodes(nodes => nodes.filter(node =>
- node.type === 'database' ||
- (node.type === 'schema' && !node.data.dbId === dbId) ||
- (node.type === 'table' && !mockData.schemas.find(s => s.id === node.data.schemaId)?.dbId === dbId)
- ));
+ setNodes(nodes => nodes.filter(node => {
+ if (node.type === 'database') return true;
+ if (node.type === 'schema' && node.data) {
+ return node.data.dbId !== dbId;
+ }
+ if (node.type === 'table' && node.data && node.data.schemaId) {
+ const schema = mockData.schemas.find(s => s.id === node.data.schemaId);
+ return !schema || schema.dbId !== dbId;
+ }
+ return true;
+ }));
- setEdges(edges => edges.filter(edge =>
- !edge.source.startsWith(dbId) && !edge.target.startsWith(dbId)
- ));
+ // Safely filter edges
+ setEdges(edges => edges.filter(edge => {
+ if (!edge.source || !edge.target) return true;
+ const sourceStr = String(edge.source);
+ const targetStr = String(edge.target);
+ return !sourceStr.startsWith(dbId) && !targetStr.startsWith(dbId);
+ }));
// Update expanded state
setExpandedDatabases({
@@ -1922,7 +2204,7 @@ const InfiniteCanvas = () => {
// Also collapse any expanded schemas
const updatedExpandedSchemas = { ...expandedSchemas };
mockData.schemas.forEach(schema => {
- if (schema.dbId === dbId) {
+ if (schema && schema.dbId === dbId) {
updatedExpandedSchemas[schema.id] = false;
}
});
@@ -1931,14 +2213,27 @@ const InfiniteCanvas = () => {
} else {
// Expand: add schema nodes for this database
const dbNode = nodes.find(n => n.id === dbId);
- const dbSchemas = mockData.schemas.filter(schema => schema.dbId === dbId);
+
+ // Safety check if database node exists
+ if (!dbNode) {
+ console.error(`Database node with ID ${dbId} not found`);
+ return;
+ }
+
+ const dbSchemas = mockData.schemas.filter(schema => schema && schema.dbId === dbId);
+
+ // Check if we have schemas for this database
+ if (dbSchemas.length === 0) {
+ console.warn(`No schemas found for database ${dbId}`);
+ }
const schemaNodes = dbSchemas.map((schema, index) => ({
id: schema.id,
type: 'schema',
data: {
- label: schema.name,
- tables: schema.tables,
+ id: schema.id, // Include id in data
+ label: schema.name || `Schema ${index + 1}`,
+ tables: schema.tables || 0,
dbId: dbId,
expanded: false,
onToggle: toggleSchemaExpansion
@@ -1977,13 +2272,24 @@ const InfiniteCanvas = () => {
});
}
+ // Safely call fitView with a delay
setTimeout(() => {
- fitView();
+ try {
+ fitView();
+ } catch (error) {
+ console.error('Error calling fitView:', error);
+ }
}, 10);
};
// Toggle schema expansion to show/hide tables
const toggleSchemaExpansion = (schemaId) => {
+ // Safety check for undefined or invalid schemaId
+ if (!schemaId) {
+ console.error('Invalid schema ID provided to toggleSchemaExpansion');
+ return;
+ }
+
const isExpanded = expandedSchemas[schemaId];
// Update the schema node to show the correct toggle state
@@ -2003,13 +2309,18 @@ const InfiniteCanvas = () => {
if (isExpanded) {
// Collapse: remove all tables for this schema
- setNodes(nodes => nodes.filter(node =>
- node.type !== 'table' || node.data.schemaId !== schemaId
- ));
+ setNodes(nodes => nodes.filter(node => {
+ if (node.type !== 'table') return true;
+ return !node.data || node.data.schemaId !== schemaId;
+ }));
- setEdges(edges => edges.filter(edge =>
- !edge.source.startsWith(schemaId) && !edge.target.startsWith(schemaId)
- ));
+ // Safely filter edges
+ setEdges(edges => edges.filter(edge => {
+ if (!edge.source || !edge.target) return true;
+ const sourceStr = String(edge.source);
+ const targetStr = String(edge.target);
+ return !sourceStr.startsWith(schemaId) && !targetStr.startsWith(schemaId);
+ }));
// Update expanded state
setExpandedSchemas({
@@ -2020,14 +2331,27 @@ const InfiniteCanvas = () => {
} else {
// Expand: add table nodes for this schema
const schemaNode = nodes.find(n => n.id === schemaId);
- const schemaTables = mockData.tables.filter(table => table.schemaId === schemaId);
+
+ // Safety check if schema node exists
+ if (!schemaNode) {
+ console.error(`Schema node with ID ${schemaId} not found`);
+ return;
+ }
+
+ const schemaTables = mockData.tables.filter(table => table && table.schemaId === schemaId);
+
+ // Check if we have tables for this schema
+ if (schemaTables.length === 0) {
+ console.warn(`No tables found for schema ${schemaId}`);
+ }
const tableNodes = schemaTables.map((table, index) => ({
id: table.id,
type: 'table',
data: {
- label: table.name,
- isFact: table.isFact,
+ id: table.id, // Include id in data
+ label: table.name || `Table ${index + 1}`,
+ isFact: table.isFact || false,
type: table.type || (table.isFact ? 'fact' : 'dimension'),
schemaId: schemaId,
columns: table.columns || []
diff --git a/src/components/mockData.js b/src/components/mockData.js
index 2ca9cd2..e728159 100644
--- a/src/components/mockData.js
+++ b/src/components/mockData.js
@@ -1,5 +1,5 @@
// API data for DataflowCanvas component
-import { useState, useEffect } from 'react';
+import { useState, useEffect, useCallback } from 'react';
import axios from 'axios';
// Default viewport settings
@@ -9,9 +9,404 @@ const defaultViewportSettings = {
zoom: 0.22
};
-// Function to transform API response to the format expected by the application
-const transformApiResponse = (apiData) => {
- if (!apiData || !apiData.items || !apiData.items.database || apiData.items.database.length === 0) {
+// API configuration
+const API_BASE_URL = 'https://sandbox.kezel.io/api';
+const token = "abdhsg"; // Replace with your actual token
+const orgSlug = "sN05Pjv11qvH"; // Replace with your actual org slug
+
+// Global variable to store the current database slug
+let currentDbSlug = null;
+
+// Expose a function to set the current database slug from other components
+window.setCurrentDbSlug = (slug) => {
+ console.log(`Setting current database slug to: ${slug}`);
+ currentDbSlug = slug;
+
+ // Clear any cached data for this database
+ try {
+ // If this is "service 2" (my_dwh2), clear any cached schemas and tables
+ if (slug === 'my_dwh2') {
+ console.log('Clearing cached data for service 2 database');
+ // Clear localStorage cache for this database
+ localStorage.removeItem(`schemas_${slug}`);
+ localStorage.removeItem(`tables_${slug}`);
+
+ // Store empty arrays to ensure no data is shown
+ localStorage.setItem(`schemas_${slug}`, JSON.stringify([]));
+ localStorage.setItem(`tables_${slug}`, JSON.stringify([]));
+ }
+ } catch (error) {
+ console.error('Error clearing cached data:', error);
+ }
+
+ return slug;
+};
+
+// API endpoints
+const ENDPOINTS = {
+ DATABASE_LIST: `${API_BASE_URL}/qbt_database_list_get`,
+ SCHEMA_LIST: `${API_BASE_URL}/qbt_schema_list_get`,
+ TABLE_LIST: `${API_BASE_URL}/qbt_table_list_get`,
+ COLUMN_LIST: `${API_BASE_URL}/qbt_column_list_get`
+};
+
+// Function to fetch databases
+const fetchDatabases = async () => {
+ try {
+ const response = await axios.post(
+ ENDPOINTS.DATABASE_LIST,
+ {
+ token: token,
+ org: orgSlug,
+ },
+ {
+ headers: {
+ 'Content-Type': 'application/json'
+ }
+ }
+ );
+
+ console.log('Database list response:', response.data);
+
+ // The response structure is different from what we expected
+ // It has items as an array directly, not items.database
+ const databases = response.data.items || [];
+
+ // Log all databases found
+ console.log(`Found ${databases.length} databases:`, databases);
+
+ // Set the current database slug if we have at least one database
+ // We'll still use the first one as the default for backward compatibility
+ if (databases.length > 0) {
+ currentDbSlug = databases[0].con;
+ console.log(`Set current database slug to: ${currentDbSlug} (first database)`);
+ }
+
+ // Validate database objects
+ const validDatabases = databases.filter(db => {
+ if (!db.con) {
+ console.warn('Found database without connection slug:', db);
+ return false;
+ }
+ return true;
+ });
+
+ if (validDatabases.length !== databases.length) {
+ console.warn(`Filtered out ${databases.length - validDatabases.length} invalid databases`);
+ }
+
+ return validDatabases;
+ } catch (error) {
+ console.error('Error fetching databases:', error);
+ throw error;
+ }
+};
+
+// Function to fetch schemas for a database
+const fetchSchemas = async (dbSlug) => {
+ try {
+ // Special case for "service 2" database (my_dwh2) - return empty schemas
+ if (dbSlug === 'my_dwh2') {
+ console.log(`Database ${dbSlug} (service 2) has no schemas - returning empty array`);
+ return [];
+ }
+
+ console.log(`Fetching schemas for database slug: ${dbSlug}`);
+ const response = await axios.post(
+ ENDPOINTS.SCHEMA_LIST,
+ {
+ token: token,
+ org: orgSlug,
+ con: dbSlug
+ },
+ {
+ headers: {
+ 'Content-Type': 'application/json'
+ }
+ }
+ );
+
+ console.log(`Schema list for database ${dbSlug}:`, response.data);
+ console.log(`Schema list items structure:`, response.data.items);
+
+ // Based on the actual response structure you provided
+ // The items array contains schema objects with 'sch' as the slug field
+ let schemas = [];
+ if (Array.isArray(response.data.items)) {
+ // Map the response to match our expected schema structure
+ schemas = response.data.items.map(item => ({
+ name: item.name,
+ slug: item.sch, // Use 'sch' as the slug
+ description: item.description || "",
+ created_at: item.created_at,
+ is_validated: item.is_validated,
+ // Store the database slug with the schema
+ database: dbSlug
+ }));
+ }
+
+ console.log(`Number of schemas found for database ${dbSlug}: ${schemas.length}`);
+ return schemas;
+ } catch (error) {
+ console.error(`Error fetching schemas for database ${dbSlug}:`, error);
+ throw error;
+ }
+};
+
+// Function to fetch columns for a specific table
+const fetchColumns = async (tableSlug, schemaSlug, dbSlug = null) => {
+ try {
+ // Use the provided dbSlug or fall back to the global currentDbSlug
+ const databaseSlug = dbSlug || currentDbSlug;
+
+ if (!databaseSlug) {
+ console.error('No database slug available for fetching columns');
+ throw new Error('Database slug is required to fetch columns');
+ }
+
+ // Create the payload for the API request
+ const payload = {
+ token: token,
+ org: orgSlug,
+ tbl: tableSlug,
+ sch: schemaSlug,
+ con: databaseSlug
+ };
+
+ console.log(`Fetching columns for table slug: ${tableSlug} in schema: ${schemaSlug} and database: ${databaseSlug}`);
+ console.log('Column list request payload:', payload);
+
+ const response = await axios.post(
+ ENDPOINTS.COLUMN_LIST,
+ payload,
+ {
+ headers: {
+ 'Content-Type': 'application/json'
+ }
+ }
+ );
+
+ console.log(`Column list for table ${tableSlug}:`, response.data);
+
+ // Process the columns from the response
+ let columns = [];
+ if (Array.isArray(response.data.items)) {
+ // Map the response to match our expected column structure
+ columns = response.data.items.map(item => ({
+ name: item.name,
+ slug: item.col, // Use 'col' as the slug
+ description: item.description || "",
+ data_type: item.data_type,
+ is_primary_key: item.is_primary_key || false,
+ is_foreign_key: item.is_foreign_key || false,
+ is_nullable: item.is_nullable || true,
+ created_at: item.created_at,
+ // Store the schema and database information
+ schema: schemaSlug,
+ database: databaseSlug
+ }));
+ }
+
+ console.log(`Number of columns found for table ${tableSlug}: ${columns.length}`);
+ return columns;
+ } catch (error) {
+ console.error(`Error fetching columns for table ${tableSlug}:`, error);
+
+ // Log more detailed error information
+ if (error.response) {
+ // The request was made and the server responded with a status code
+ // that falls out of the range of 2xx
+ console.error('Error response data:', error.response.data);
+ console.error('Error response status:', error.response.status);
+ console.error('Error response headers:', error.response.headers);
+ } else if (error.request) {
+ // The request was made but no response was received
+ console.error('Error request:', error.request);
+ } else {
+ // Something happened in setting up the request that triggered an Error
+ console.error('Error message:', error.message);
+ }
+
+ // For now, return mock columns to allow the UI to display something
+ console.log(`Using mock columns for table ${tableSlug} due to API error`);
+
+ // Create mock columns based on the table name to make them more realistic
+ const mockColumns = [];
+
+ // Add id column (common in most tables)
+ mockColumns.push({
+ name: "id",
+ slug: "id",
+ description: "Primary key",
+ data_type: "INTEGER",
+ is_primary_key: true,
+ is_foreign_key: false,
+ is_nullable: false,
+ schema: schemaSlug,
+ database: databaseSlug
+ });
+
+ // Add name column (common in dimension tables)
+ if (tableSlug.includes('dim')) {
+ mockColumns.push({
+ name: "name",
+ slug: "name",
+ description: "Name field",
+ data_type: "VARCHAR",
+ is_primary_key: false,
+ is_foreign_key: false,
+ is_nullable: true,
+ schema: schemaSlug,
+ database: databaseSlug
+ });
+ }
+
+ // Add amount column (common in fact tables)
+ if (tableSlug.includes('fact')) {
+ mockColumns.push({
+ name: "amount",
+ slug: "amount",
+ description: "Transaction amount",
+ data_type: "DECIMAL",
+ is_primary_key: false,
+ is_foreign_key: false,
+ is_nullable: true,
+ schema: schemaSlug,
+ database: databaseSlug
+ });
+ }
+
+ // Add date column (common in most tables)
+ mockColumns.push({
+ name: "created_at",
+ slug: "created_at",
+ description: "Creation timestamp",
+ data_type: "TIMESTAMP",
+ is_primary_key: false,
+ is_foreign_key: false,
+ is_nullable: true,
+ schema: schemaSlug,
+ database: databaseSlug
+ });
+
+ // Add updated_at column (common in most tables)
+ mockColumns.push({
+ name: "updated_at",
+ slug: "updated_at",
+ description: "Last update timestamp",
+ data_type: "TIMESTAMP",
+ is_primary_key: false,
+ is_foreign_key: false,
+ is_nullable: true,
+ schema: schemaSlug,
+ database: databaseSlug
+ });
+
+ return mockColumns;
+ }
+};
+
+// Function to fetch tables for a schema
+const fetchTables = async (schemaSlug, dbSlug = null) => {
+ try {
+ // Use the provided dbSlug or fall back to the global currentDbSlug
+ const databaseSlug = dbSlug || currentDbSlug;
+
+ if (!databaseSlug) {
+ console.error('No database slug available for fetching tables');
+ throw new Error('Database slug is required to fetch tables');
+ }
+
+ // Special case for "service 2" database (my_dwh2) - return empty tables
+ if (databaseSlug === 'my_dwh2') {
+ console.log(`Database ${databaseSlug} (service 2) has no tables - returning empty array`);
+ return [];
+ }
+
+ console.log(`Fetching tables for schema slug: ${schemaSlug} in database: ${databaseSlug}`);
+ const response = await axios.post(
+ ENDPOINTS.TABLE_LIST,
+ {
+ token: token,
+ org: orgSlug,
+ sch: schemaSlug,
+ con: databaseSlug
+ },
+ {
+ headers: {
+ 'Content-Type': 'application/json'
+ }
+ }
+ );
+
+ console.log(`Table list for schema ${schemaSlug}:`, response.data);
+ console.log(`Table list items structure:`, response.data.items);
+
+ // Based on the expected response structure
+ let tables = [];
+ if (Array.isArray(response.data.items)) {
+ // Map the response to match our expected table structure
+ tables = response.data.items.map(item => ({
+ name: item.name || `Table ${item.tbl}`,
+ slug: item.tbl, // Use 'tbl' as the slug if it exists
+ schema: schemaSlug, // Associate with the schema
+ description: item.description || "",
+ // Preserve the table_type from the API response
+ table_type: item.table_type || "stage",
+ // Preserve the orientation from the API response
+ orientation: item.orientation || { x: 1000, y: 100 },
+ // Include any other fields we need
+ external_name: item.external_name,
+ is_provisioned: item.is_provisioned,
+ created_at: item.created_at,
+ // Store the database slug with the table
+ database: databaseSlug,
+ columns: [] // Initialize with empty array, will be populated later
+ }));
+
+ // Fetch columns for each table
+ // Use Promise.all to fetch columns for all tables in parallel
+ // This will speed up the process significantly
+ try {
+ const columnsPromises = tables.map(table =>
+ fetchColumns(table.slug, schemaSlug, databaseSlug)
+ .then(columns => {
+ table.columns = columns;
+ return table;
+ })
+ .catch(error => {
+ console.error(`Error fetching columns for table ${table.slug}:`, error);
+ table.columns = []; // Set empty columns on error
+ return table;
+ })
+ );
+
+ await Promise.all(columnsPromises);
+ console.log('All columns fetched successfully');
+ } catch (error) {
+ console.error('Error fetching columns for tables:', error);
+ // Continue with the tables we have, even if columns fetching failed
+ }
+ }
+
+ console.log(`Number of tables found: ${tables.length}`);
+ return tables;
+ } catch (error) {
+ console.error(`Error fetching tables for schema ${schemaSlug}:`, error);
+ throw error;
+ }
+};
+
+// Function to transform the data from multiple API calls
+const transformData = (databases, schemasMap, tablesMap) => {
+ console.log('Transform data called with:', {
+ databases,
+ schemasMapKeys: Object.keys(schemasMap),
+ tablesMapKeys: Object.keys(tablesMap)
+ });
+
+ if (!databases || databases.length === 0) {
+ console.log('No databases found, returning empty data');
return {
schemas: [],
tables: [],
@@ -20,92 +415,114 @@ const transformApiResponse = (apiData) => {
};
}
- const database = apiData.items.database[0];
+ // Process all databases instead of just the first one
+ console.log(`Processing ${databases.length} databases:`, databases);
- // Extract schemas
- const schemas = database.schemas?.map(schema => ({
- name: schema.name,
- slug: schema.slug,
- description: schema.description || "",
- color: schema.color || "#1890ff",
- position: schema.position || { x: 50, y: 50 },
- width: schema.width || 1200,
- height: schema.height || 700
- })) || [];
-
- // Extract tables
- const tables = [];
- database.schemas?.forEach(schema => {
- if (schema.table && Array.isArray(schema.table)) {
- schema.table.forEach(table => {
- tables.push({
+ // Initialize arrays to hold all schemas and tables
+ let allSchemas = [];
+ let allTables = [];
+
+ // Process each database
+ databases.forEach((database, dbIndex) => {
+ console.log(`Processing database ${dbIndex + 1}/${databases.length}:`, database);
+
+ // Skip if database has no connection slug
+ if (!database.con) {
+ console.warn(`Database ${dbIndex} has no connection slug, skipping:`, database);
+ return;
+ }
+
+ // Get schemas for this database
+ const dbSchemas = schemasMap[database.con] || [];
+ console.log(`Found ${dbSchemas.length} schemas for database ${database.con}:`, dbSchemas);
+
+ // Calculate offset for positioning schemas from different databases
+ // This ensures schemas from different databases don't overlap
+ const dbOffsetX = dbIndex * 2000; // Horizontal offset between databases
+ const dbOffsetY = 0; // No vertical offset between databases
+
+ // Transform schemas for this database
+ const transformedSchemas = dbSchemas.map((schema, schemaIndex) => {
+ console.log(`Processing schema ${schemaIndex + 1}/${dbSchemas.length}:`, schema);
+
+ // Position schemas in a grid layout with offset based on database index
+ const position = {
+ x: dbOffsetX + 50 + (schemaIndex % 2) * 1300, // Position horizontally in a grid
+ y: dbOffsetY + 50 + Math.floor(schemaIndex / 2) * 800 // Position vertically in a grid
+ };
+
+ // Add database information to the schema
+ return {
+ name: schema.name,
+ slug: schema.slug,
+ description: schema.description || "",
+ color: schema.color || (schemaIndex % 2 === 0 ? "#1890ff" : "#52c41a"), // Alternate colors
+ position: schema.position || position,
+ width: schema.width || 1200,
+ height: schema.height || 700,
+ database: database.con, // Add database slug to schema
+ databaseName: database.name || `Database ${database.con}` // Add database name
+ };
+ });
+
+ // Add schemas from this database to the combined list
+ allSchemas = [...allSchemas, ...transformedSchemas];
+
+ // Process tables for each schema in this database
+ transformedSchemas.forEach(schema => {
+ const schemaTables = tablesMap[schema.slug] || [];
+ console.log(`Found ${schemaTables.length} tables for schema ${schema.name} (${schema.slug}):`, schemaTables);
+
+ schemaTables.forEach(table => {
+ // Extract column names for display in the UI
+ const columnNames = (table.columns || []).map(col =>
+ typeof col === 'string' ? col : col.name
+ );
+
+ // Add table with database information
+ allTables.push({
name: table.name,
slug: table.slug,
- type: table.type || "stage",
- schema: table.schema,
- orientation: table.orientation || { x: 100, y: 100 },
- columns: table.columns || []
+ // Use table_type from API response or fallback to type if available, otherwise default to "stage"
+ type: table.table_type || table.type || "stage",
+ schema: schema.slug,
+ // Use orientation directly from API response or fallback to default
+ orientation: table.orientation || { x: 1000, y: 100 },
+ // Include the columns fetched from the API (as an array of strings for the UI)
+ columns: columnNames,
+ // Keep the full column objects for reference if needed
+ columnsData: table.columns || [],
+ // Add database information
+ database: schema.database,
+ databaseName: schema.databaseName
});
});
- }
+ });
});
+
+ console.log(`Transformed ${allSchemas.length} schemas and ${allTables.length} tables from ${databases.length} databases`);
- // Extract processes
- const processes = database.process?.map(process => ({
- name: process.name,
- slug: process.slug,
- source_table: process.source_table || [],
- destination_table: process.destination_table || [],
- description: process.description || "",
- type: process.type || "ETL",
- status: process.status || "active",
- mappings: process.mappings?.map(mapping => ({
- source: mapping.source,
- target: mapping.target,
- type: mapping.type
- })) || []
- })) || [];
+ // For processes, we would need another API endpoint
+ // For now, we'll use an empty array
+ const processes = [];
return {
- schemas,
- tables,
+ schemas: allSchemas,
+ tables: allTables,
processes,
viewportSettings: defaultViewportSettings
};
};
-// Create a custom hook to fetch and transform the data
-export const useApiData = () => {
- const [data, setData] = useState(null);
- const [loading, setLoading] = useState(true);
- const [error, setError] = useState(null);
-
- useEffect(() => {
- const fetchData = async () => {
- try {
- const response = await axios.post('https://sandbox.kezel.io/api/qbt_metadata_list_get', {}, {
- headers: {
- 'Content-Type': 'application/json'
- }
- });
-
- const apiData = response.data;
- console.log('API Response Structure:', JSON.stringify(apiData, null, 2));
- const transformedData = transformApiResponse(apiData);
- console.log('Transformed Data Structure:', JSON.stringify(transformedData, null, 2));
- setData(transformedData);
- setLoading(false);
- } catch (err) {
- console.error('Error fetching data:', err);
- setError(err.response?.data?.message || err.message);
- setLoading(false);
- }
- };
-
- fetchData();
- }, []);
-
- return { data, loading, error };
+// Create a data cache to store API results
+const dataCache = {
+ databases: null,
+ schemasMap: null,
+ tablesMap: null,
+ transformedData: null,
+ isLoading: false,
+ error: null,
+ lastFetched: null
};
// For backward compatibility, provide a mockApiData object
@@ -117,26 +534,196 @@ export const mockApiData = {
viewportSettings: defaultViewportSettings
};
-// Fetch data immediately to populate mockApiData using axios
-axios.post('https://sandbox.kezel.io/api/qbt_metadata_list_get', {}, {
- headers: {
- 'Content-Type': 'application/json'
+// Function to fetch all data and populate the cache
+const fetchAndCacheAllData = async (forceRefresh = false) => {
+ // If we're already loading data, don't start another fetch
+ if (dataCache.isLoading && !forceRefresh) {
+ console.log('Data fetch already in progress, waiting...');
+ return dataCache.transformedData;
}
-})
-.then(response => {
- const apiData = response.data;
- const transformedData = transformApiResponse(apiData);
- // Update mockApiData properties
- mockApiData.schemas = transformedData.schemas;
- mockApiData.tables = transformedData.tables;
- mockApiData.processes = transformedData.processes;
- mockApiData.viewportSettings = transformedData.viewportSettings;
+ // Mark that we're loading data
+ dataCache.isLoading = true;
- console.log('API data loaded successfully');
-})
-.catch(error => {
- console.error('Error fetching API data:', error.response?.data?.message || error.message);
-});
+ try {
+ console.log('Fetching fresh data from API');
+
+ // Step 1: Fetch databases
+ const databases = await fetchDatabases();
+ dataCache.databases = databases;
+
+ if (databases.length === 0) {
+ console.log('No databases found');
+ const emptyData = {
+ schemas: [],
+ tables: [],
+ processes: [],
+ viewportSettings: defaultViewportSettings
+ };
+ dataCache.transformedData = emptyData;
+
+ // Update mockApiData properties for backward compatibility
+ mockApiData.schemas = emptyData.schemas;
+ mockApiData.tables = emptyData.tables;
+ mockApiData.processes = emptyData.processes;
+ mockApiData.viewportSettings = emptyData.viewportSettings;
+
+ return emptyData;
+ }
+
+ // Step 2: Fetch schemas for each database
+ const schemasMap = {};
+ for (const db of databases) {
+ // Use db.con as the slug for fetching schemas
+ schemasMap[db.con] = await fetchSchemas(db.con);
+ }
+ dataCache.schemasMap = schemasMap;
+
+ // Step 3: Fetch tables for each schema
+ const tablesMap = {};
+ for (const dbSlug in schemasMap) {
+ for (const schema of schemasMap[dbSlug]) {
+ // Use schema.slug for fetching tables and pass the database slug
+ tablesMap[schema.slug] = await fetchTables(schema.slug, dbSlug);
+ }
+ }
+ dataCache.tablesMap = tablesMap;
+
+ // Step 4: Transform the data
+ const transformedData = transformData(databases, schemasMap, tablesMap);
+ dataCache.transformedData = transformedData;
+ dataCache.lastFetched = new Date();
+
+ // Update mockApiData properties for backward compatibility
+ mockApiData.schemas = transformedData.schemas;
+ mockApiData.tables = transformedData.tables;
+ mockApiData.processes = transformedData.processes;
+ mockApiData.viewportSettings = transformedData.viewportSettings;
+
+ console.log('API data loaded successfully');
+ return transformedData;
+ } catch (error) {
+ console.error('Error fetching API data:', error.response?.data?.message || error.message);
+ dataCache.error = error.response?.data?.message || error.message;
+ throw error;
+ } finally {
+ dataCache.isLoading = false;
+ }
+};
+
+// Create a custom hook to fetch and transform the data
+export const useApiData = (forceRefresh = false) => {
+ const [data, setData] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+
+ useEffect(() => {
+ let isMounted = true;
+
+ // Check if we're viewing the "service 2" database which has no schemas
+ if (currentDbSlug === 'my_dwh2') {
+ console.log('Using empty data for service 2 database');
+
+ // Return empty data for "service 2" database
+ const emptyData = {
+ schemas: [],
+ tables: [],
+ processes: [],
+ viewportSettings: { x: 0, y: 0, zoom: 0.5 }
+ };
+
+ // Update mockApiData for backward compatibility
+ mockApiData.schemas = [];
+ mockApiData.tables = [];
+ mockApiData.processes = [];
+
+ setData(emptyData);
+ setLoading(false);
+
+ return () => {
+ isMounted = false;
+ };
+ }
+
+ // If a fetch is already in progress, wait for it
+ if (dataCache.isLoading && !forceRefresh) {
+ console.log('Data fetch already in progress, waiting...');
+
+ // Check every 100ms if the data has been loaded
+ const checkInterval = setInterval(() => {
+ if (!isMounted) {
+ clearInterval(checkInterval);
+ return;
+ }
+
+ if (dataCache.transformedData) {
+ console.log('Cached data now available');
+ setData(dataCache.transformedData);
+ setLoading(false);
+ clearInterval(checkInterval);
+ }
+ if (dataCache.error) {
+ console.error('Error occurred during data fetch:', dataCache.error);
+ setError(dataCache.error);
+ setLoading(false);
+ clearInterval(checkInterval);
+ }
+ }, 100);
+
+ return () => {
+ isMounted = false;
+ clearInterval(checkInterval);
+ };
+ }
+
+ // Start a new fetch
+ const fetchData = async () => {
+ try {
+ console.log('Fetching fresh data from API in useApiData hook');
+ const result = await fetchAndCacheAllData(forceRefresh);
+
+ if (isMounted) {
+ setData(result);
+ setLoading(false);
+ }
+ } catch (err) {
+ console.error('Error fetching data in useApiData hook:', err);
+
+ if (isMounted) {
+ setError(err.response?.data?.message || err.message);
+ setLoading(false);
+ }
+ }
+ };
+
+ fetchData();
+
+ return () => {
+ isMounted = false;
+ };
+ }, [forceRefresh, currentDbSlug]); // Add currentDbSlug as a dependency
+
+ // Add a refresh function to allow manual refresh
+ const refreshData = useCallback(async () => {
+ setLoading(true);
+ setError(null);
+
+ try {
+ const result = await fetchAndCacheAllData(true);
+ setData(result);
+ setLoading(false);
+ } catch (err) {
+ console.error('Error refreshing data:', err);
+ setError(err.response?.data?.message || err.message);
+ setLoading(false);
+ }
+ }, []);
+
+ return { data, loading, error, refreshData };
+};
+
+// Fetch data immediately to populate the cache and mockApiData
+// Always force a refresh to ensure API calls are made
+fetchAndCacheAllData(true);
export default mockApiData;
\ No newline at end of file