Compare commits

..

No commits in common. "77d729545f96a2f8e8e93a0d2543b2bc47ee1adc" and "1d6bc635891e0e87811d36e979f389cda4ae5859" have entirely different histories.

4 changed files with 199 additions and 1133 deletions

View File

@ -4,11 +4,11 @@ import React from 'react';
export const CustomSchemaIcon = ({ width = "49", height = "46" }) => ( export const CustomSchemaIcon = ({ width = "49", height = "46" }) => (
<svg width={width} height={height} viewBox="0 0 49 46" fill="none" xmlns="http://www.w3.org/2000/svg"> <svg width={width} height={height} viewBox="0 0 49 46" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect x="0.86377" y="0.0722656" width="48.048" height="45.0864" rx="4.224" fill="url(#paint0_linear_892_4172)"/> <rect x="0.86377" y="0.0722656" width="48.048" height="45.0864" rx="4.224" fill="url(#paint0_linear_892_4172)"/>
<path fillRule="evenodd" clipRule="evenodd" d="M41.4524 29.6995H38.6536C38.4553 27.918 37.6756 26.2385 36.3718 24.8796C34.5615 22.9936 32.0024 21.9549 29.1667 21.9549H25.5936V13.5075H32.0566C32.4084 13.5075 32.6934 13.2222 32.6934 12.8708V8.0723C32.6934 7.72053 32.4084 7.43555 32.0566 7.43555H17.9481C17.5959 7.43555 17.311 7.72053 17.311 8.0723V12.8708C17.311 13.2222 17.5959 13.5075 17.9481 13.5075H24.4002V21.9549H20.8141C15.7173 21.9549 11.9295 25.1589 11.3636 29.7182C9.30036 29.9044 7.68359 31.6361 7.68359 33.7475C7.68359 35.9829 9.49629 37.7955 11.7316 37.7955C13.9673 37.7955 15.7796 35.9829 15.7796 33.7475C15.7796 31.7928 14.394 30.1622 12.5517 29.7829C13.0711 25.8284 16.3123 23.1483 20.8141 23.1483H24.4002V29.6995H20.268C20.0446 29.6995 19.913 29.9594 20.0401 30.1497L24.7743 37.6713C24.8844 37.8368 25.1199 37.8368 25.2305 37.6713L29.9646 30.1497C30.0917 29.9594 29.9597 29.6995 29.7367 29.6995H25.5936V23.1483H29.1667C31.6749 23.1483 33.928 24.0566 35.5104 25.7058C36.5981 26.8388 37.2611 28.2269 37.4546 29.6995H34.6348C34.2818 29.6995 33.9956 29.9857 33.9956 30.3387V37.1564C33.9956 37.5094 34.2818 37.7955 34.6348 37.7955H41.4524C41.8054 37.7955 42.0916 37.5094 42.0916 37.1564V30.3387C42.0916 29.9857 41.8054 29.6995 41.4524 29.6995Z" fill="white"/> <path fill-rule="evenodd" clip-rule="evenodd" d="M41.4524 29.6995H38.6536C38.4553 27.918 37.6756 26.2385 36.3718 24.8796C34.5615 22.9936 32.0024 21.9549 29.1667 21.9549H25.5936V13.5075H32.0566C32.4084 13.5075 32.6934 13.2222 32.6934 12.8708V8.0723C32.6934 7.72053 32.4084 7.43555 32.0566 7.43555H17.9481C17.5959 7.43555 17.311 7.72053 17.311 8.0723V12.8708C17.311 13.2222 17.5959 13.5075 17.9481 13.5075H24.4002V21.9549H20.8141C15.7173 21.9549 11.9295 25.1589 11.3636 29.7182C9.30036 29.9044 7.68359 31.6361 7.68359 33.7475C7.68359 35.9829 9.49629 37.7955 11.7316 37.7955C13.9673 37.7955 15.7796 35.9829 15.7796 33.7475C15.7796 31.7928 14.394 30.1622 12.5517 29.7829C13.0711 25.8284 16.3123 23.1483 20.8141 23.1483H24.4002V29.6995H20.268C20.0446 29.6995 19.913 29.9594 20.0401 30.1497L24.7743 37.6713C24.8844 37.8368 25.1199 37.8368 25.2305 37.6713L29.9646 30.1497C30.0917 29.9594 29.9597 29.6995 29.7367 29.6995H25.5936V23.1483H29.1667C31.6749 23.1483 33.928 24.0566 35.5104 25.7058C36.5981 26.8388 37.2611 28.2269 37.4546 29.6995H34.6348C34.2818 29.6995 33.9956 29.9857 33.9956 30.3387V37.1564C33.9956 37.5094 34.2818 37.7955 34.6348 37.7955H41.4524C41.8054 37.7955 42.0916 37.5094 42.0916 37.1564V30.3387C42.0916 29.9857 41.8054 29.6995 41.4524 29.6995Z" fill="white"/>
<defs> <defs>
<linearGradient id="paint0_linear_892_4172" x1="0.86377" y1="22.6155" x2="48.9118" y2="22.6155" gradientUnits="userSpaceOnUse"> <linearGradient id="paint0_linear_892_4172" x1="0.86377" y1="22.6155" x2="48.9118" y2="22.6155" gradientUnits="userSpaceOnUse">
<stop stopColor="#FF9D2C"/> <stop stop-color="#FF9D2C"/>
<stop offset="1" stopColor="#CD750F"/> <stop offset="1" stop-color="#CD750F"/>
</linearGradient> </linearGradient>
</defs> </defs>
</svg> </svg>

View File

@ -679,16 +679,6 @@ const DataflowCanvas = () => {
const parsedData = JSON.parse(dbData); const parsedData = JSON.parse(dbData);
setSelectedDatabase(parsedData); setSelectedDatabase(parsedData);
console.log('DataFlow view initialized with database:', parsedData); console.log('DataFlow view initialized with database:', parsedData);
// If this is "service 2" database, ensure we're using the correct slug
if (parsedData.name === 'service 2' || parsedData.slug === 'my_dwh2') {
console.log('Service 2 database detected - ensuring empty schemas');
// Set the current database slug in mockData.js
if (typeof window.setCurrentDbSlug === 'function') {
window.setCurrentDbSlug('my_dwh2');
}
}
} }
} catch (error) { } catch (error) {
console.error('Error reading database from localStorage:', error); console.error('Error reading database from localStorage:', error);
@ -765,23 +755,13 @@ const DataflowCanvas = () => {
const schemas = apiData?.schemas || mockApiData.schemas; const schemas = apiData?.schemas || mockApiData.schemas;
const tables = apiData?.tables || mockApiData.tables; const tables = apiData?.tables || mockApiData.tables;
console.log('DataflowCanvas - Schemas available:', schemas);
console.log('DataflowCanvas - Tables available:', tables);
// Check if we have any schemas
if (schemas.length === 0) {
console.log('No schemas available for this database');
// Return an empty array if there are no schemas
return [];
}
// Initialize with default values from schema definitions // Initialize with default values from schema definitions
schemas.forEach(schema => { schemas.forEach(schema => {
schemaBoundaries[schema.slug] = { schemaBoundaries[schema.slug] = {
minX: schema.position?.x || 0, minX: schema.position.x,
minY: schema.position?.y || 0, minY: schema.position.y,
maxX: (schema.position?.x || 0) + (schema.width || 500), maxX: schema.position.x + schema.width,
maxY: (schema.position?.y || 0) + (schema.height || 500) maxY: schema.position.y + schema.height
}; };
}); });
@ -1776,10 +1756,7 @@ const DataflowCanvas = () => {
{selectedDatabase.name} Database is Empty {selectedDatabase.name} Database is Empty
</h3> </h3>
<p style={{ color: '#666', marginBottom: '20px' }}> <p style={{ color: '#666', marginBottom: '20px' }}>
{selectedDatabase.slug === 'my_dwh2' ? This database doesn't have any tables or data flows yet. Start by adding tables and processes to visualize your data flow.
"This database (service 2) doesn't contain any schemas or tables. This is expected behavior." :
"This database doesn't have any tables or data flows yet. Start by adding tables and processes to visualize your data flow."
}
</p> </p>
<div style={{ display: 'flex', justifyContent: 'center', gap: '10px' }}> <div style={{ display: 'flex', justifyContent: 'center', gap: '10px' }}>
<button <button

View File

@ -17,7 +17,6 @@ import ReactFlow, {
getBezierPath getBezierPath
} from 'reactflow'; } from 'reactflow';
import 'reactflow/dist/style.css'; import 'reactflow/dist/style.css';
import axios from 'axios';
// Import icons from react-icons // Import icons from react-icons
import { FaDatabase, FaTable, FaFlask, FaArrowRight, FaPlus, FaTimes } from 'react-icons/fa'; import { FaDatabase, FaTable, FaFlask, FaArrowRight, FaPlus, FaTimes } from 'react-icons/fa';
@ -583,7 +582,7 @@ const EntitySelector = ({
</linearGradient> </linearGradient>
</defs> </defs>
</svg> </svg>
{/* <span>Database</span> */} <span>Database</span>
</button> </button>
</div> </div>
@ -945,51 +944,22 @@ const CustomEdge = ({ id, source, target, sourceX, sourceY, targetX, targetY, so
}; };
// Custom node types // Custom node types
const DatabaseNode = ({ data = {} }) => { const DatabaseNode = ({ data }) => {
// Safety check for undefined data // Use teal color for Dbtez
if (!data) { const isDbtez = data.label === 'Dbtez';
console.error('DatabaseNode received undefined data'); const bgColor = isDbtez ? '#1a1a1a' : '#1a1a1a';
data = {}; // Provide a default empty object const borderColor = isDbtez ? '#00a99d' : '#1890ff';
} const handleColor = isDbtez ? '#00a99d' : '#1890ff';
// Debug: Log the data being received by the component
// console.log('DatabaseNode data:', data);
// Assign different colors to different databases
const colorMap = {
'my_dwh': '#00a99d', // First database - teal
'my_dwh2': '#1890ff', // Second database - blue
'default': '#9c27b0' // Default - purple
};
// Get the database slug from the id (format: db-{slug})
// Add a safety check for undefined data.id
// Use data.id if available, otherwise fall back to the node's id
const nodeId = (data && data.id) ? data.id : 'default';
const dbSlug = (nodeId && typeof nodeId === 'string' && nodeId.startsWith && nodeId.startsWith('db-'))
? nodeId.substring(3)
: 'default';
// Use the color from the map or default to a fallback color
const borderColor = colorMap[dbSlug] || '#ff9800';
const handleColor = borderColor;
const bgColor = '#1a1a1a';
// Create a safe gradient ID by removing any special characters
const safeDbSlug = (dbSlug && typeof dbSlug === 'string')
? dbSlug.replace(/[^a-zA-Z0-9]/g, '_')
: 'default';
const gradientId = `db_paint0_linear_${safeDbSlug}`;
// Database SVG icon // Database SVG icon
const DatabaseIcon = () => ( const DatabaseIcon = () => (
<svg width="30" height="28" viewBox="0 0 41 39" fill="none" xmlns="http://www.w3.org/2000/svg"> <svg width="30" height="28" viewBox="0 0 41 39" fill="none" xmlns="http://www.w3.org/2000/svg">
<rect width="40.5" height="38.7" rx="3.6" fill={`url(#${gradientId})`}/> <rect width="40.5" height="38.7" rx="3.6" fill="url(#db_paint0_linear)"/>
<path d="M19.8845 24.789C17.0714 24.789 14.2465 24.0672 12.912 22.5097C12.8725 22.683 12.8462 22.8607 12.8462 23.0493V25.8844C12.8462 28.3962 16.4937 29.5392 19.8845 29.5392C23.2753 29.5392 26.9228 28.3962 26.9228 25.8844V23.0493C26.9228 22.8607 26.8964 22.6837 26.8569 22.5104C25.5217 24.068 22.6976 24.7904 19.8837 24.7904L19.8845 24.789ZM19.8845 19.3083C17.0714 19.3083 14.2465 18.5865 12.9127 17.0289C12.8706 17.2053 12.8486 17.3858 12.8469 17.5671V20.4022C12.8469 22.9133 16.4937 24.0563 19.8845 24.0563C23.2753 24.0563 26.9228 22.9133 26.9228 20.4015V17.5657C26.9228 17.3778 26.8964 17.2001 26.8569 17.0268C25.5217 18.5843 22.6976 19.3068 19.8837 19.3068L19.8845 19.3083ZM19.8845 8.42944C16.4937 8.42944 12.8462 9.57385 12.8462 12.0857V14.9208C12.8462 17.4326 16.4937 18.5755 19.8845 18.5755C23.2753 18.5755 26.9228 17.4333 26.9228 14.9215V12.0864C26.9228 9.57458 23.2753 8.43017 19.8845 8.43017V8.42944ZM19.8845 14.2794C16.8059 14.2794 14.3087 13.2974 14.3087 12.0857C14.3087 10.8747 16.8052 9.89194 19.8845 9.89194C22.9638 9.89194 25.4603 10.8747 25.4603 12.0857C25.4603 13.2981 22.9638 14.2794 19.8845 14.2794Z" fill="white"/> <path d="M19.8845 24.789C17.0714 24.789 14.2465 24.0672 12.912 22.5097C12.8725 22.683 12.8462 22.8607 12.8462 23.0493V25.8844C12.8462 28.3962 16.4937 29.5392 19.8845 29.5392C23.2753 29.5392 26.9228 28.3962 26.9228 25.8844V23.0493C26.9228 22.8607 26.8964 22.6837 26.8569 22.5104C25.5217 24.068 22.6976 24.7904 19.8837 24.7904L19.8845 24.789ZM19.8845 19.3083C17.0714 19.3083 14.2465 18.5865 12.9127 17.0289C12.8706 17.2053 12.8486 17.3858 12.8469 17.5671V20.4022C12.8469 22.9133 16.4937 24.0563 19.8845 24.0563C23.2753 24.0563 26.9228 22.9133 26.9228 20.4015V17.5657C26.9228 17.3778 26.8964 17.2001 26.8569 17.0268C25.5217 18.5843 22.6976 19.3068 19.8837 19.3068L19.8845 19.3083ZM19.8845 8.42944C16.4937 8.42944 12.8462 9.57385 12.8462 12.0857V14.9208C12.8462 17.4326 16.4937 18.5755 19.8845 18.5755C23.2753 18.5755 26.9228 17.4333 26.9228 14.9215V12.0864C26.9228 9.57458 23.2753 8.43017 19.8845 8.43017V8.42944ZM19.8845 14.2794C16.8059 14.2794 14.3087 13.2974 14.3087 12.0857C14.3087 10.8747 16.8052 9.89194 19.8845 9.89194C22.9638 9.89194 25.4603 10.8747 25.4603 12.0857C25.4603 13.2981 22.9638 14.2794 19.8845 14.2794Z" fill="white"/>
<defs> <defs>
<linearGradient id={gradientId} x1="40.5" y1="19.35" x2="0" y2="19.35" gradientUnits="userSpaceOnUse"> <linearGradient id="db_paint0_linear" x1="40.5" y1="19.35" x2="0" y2="19.35" gradientUnits="userSpaceOnUse">
<stop stopColor={borderColor}/> <stop stopColor="#006064"/>
<stop offset="0.711538" stopColor={handleColor}/> <stop offset="0.711538" stopColor="#00A1A8"/>
</linearGradient> </linearGradient>
</defs> </defs>
</svg> </svg>
@ -1003,7 +973,7 @@ const DatabaseNode = ({ data = {} }) => {
border: `1px solid ${borderColor}`, border: `1px solid ${borderColor}`,
width: '180px', width: '180px',
position: 'relative', position: 'relative',
boxShadow: '0 0 10px rgba(0, 0, 0, 0.3)', boxShadow: isDbtez ? '0 0 10px rgba(0, 169, 157, 0.3)' : 'none',
color: '#ffffff' color: '#ffffff'
}}> }}>
{/* Connection handles */} {/* Connection handles */}
@ -1045,18 +1015,11 @@ const DatabaseNode = ({ data = {} }) => {
<span style={{ marginRight: '8px' }}> <span style={{ marginRight: '8px' }}>
<DatabaseIcon /> <DatabaseIcon />
</span> </span>
{/* Display the database name from the API response */} {data.label}
<span title={data.name || data.label || 'Unknown Database'} style={{ overflow: 'hidden', textOverflow: 'ellipsis', whiteSpace: 'nowrap', maxWidth: '120px' }}>
{data.name || data.label || 'Unknown Database'}
</span>
</div> </div>
<div style={{ fontSize: '0.8em', color: '#aaa', marginBottom: '5px' }}> <div style={{ fontSize: '0.8em', color: '#aaa', marginBottom: '10px' }}>
{`${data.schemas} schemas • ${data.tables} tables`} {isDbtez ? 'Database' : `${data.schemas} schemas • ${data.tables} tables`}
</div>
<div style={{ fontSize: '0.7em', color: '#888', marginBottom: '10px' }}>
Connection: {dbSlug}
</div> </div>
{/* View Details Button */} {/* View Details Button */}
@ -1065,16 +1028,14 @@ const DatabaseNode = ({ data = {} }) => {
onClick={(event) => { onClick={(event) => {
// Stop propagation to prevent the node click handler from being triggered // Stop propagation to prevent the node click handler from being triggered
event.stopPropagation(); event.stopPropagation();
// Call the viewDetails function passed in data with safety checks // Call the viewDetails function passed in data
if (data && data.onViewDetails && data.id) { if (data.onViewDetails) {
data.onViewDetails(data.id, data.name || data.label || 'Unknown Database'); data.onViewDetails(data.id, data.label);
} else {
console.warn('Cannot view details: missing required data properties');
} }
}} }}
style={{ style={{
padding: '4px 8px', padding: '4px 8px',
backgroundColor: borderColor, backgroundColor: isDbtez ? '#00a99d' : '#1890ff',
color: 'white', color: 'white',
border: 'none', border: 'none',
borderRadius: '3px', borderRadius: '3px',
@ -1298,56 +1259,19 @@ const TableNode = ({ data }) => {
// Generate data for InfiniteCanvas using mockApiData from DataflowCanvas // Generate data for InfiniteCanvas using mockApiData from DataflowCanvas
const generateMockData = () => { const generateMockData = () => {
// Get unique database slugs from schemas // Create database
const uniqueDatabases = [...new Set(mockApiData.schemas.map(schema => schema.database))]; const databases = [
{ id: 'db4', name: 'Dbtez', schemas: mockApiData.schemas.length, tables: mockApiData.tables.length },
// Create databases from unique database slugs ];
const databases = uniqueDatabases.map((dbSlug, index) => {
// Find all schemas for this database
const dbSchemas = mockApiData.schemas.filter(schema => schema.database === dbSlug);
// Find all tables for this database
const dbTables = mockApiData.tables.filter(table => table.database === dbSlug);
// Get the database name from the first schema (if available)
const dbName = dbSchemas.length > 0 && dbSchemas[0].databaseName
? dbSchemas[0].databaseName
: `Database ${dbSlug}`;
return {
id: `db-${dbSlug}`,
name: dbName, // This will be displayed in the UI
slug: dbSlug,
schemas: dbSchemas.length,
tables: dbTables.length
};
});
// If no databases were found, add a default one
if (databases.length === 0) {
databases.push({
id: 'db-default',
name: 'Default Database',
slug: 'default',
schemas: mockApiData.schemas.length,
tables: mockApiData.tables.length
});
}
// Create schemas from mockApiData // Create schemas from mockApiData
const schemas = mockApiData.schemas.map((schema, index) => { const schemas = mockApiData.schemas.map((schema, index) => ({
// Find the database for this schema id: `schema${index + 10}`,
const dbSlug = schema.database || 'default'; dbId: 'db4',
const db = databases.find(db => db.slug === dbSlug) || databases[0]; name: schema.name,
slug: schema.slug,
return { tables: mockApiData.tables.filter(table => table.schema === schema.slug).length
id: `schema-${schema.slug}`, }));
dbId: db.id,
name: schema.name,
slug: schema.slug,
tables: mockApiData.tables.filter(table => table.schema === schema.slug).length
};
});
// Create tables from mockApiData // Create tables from mockApiData
const tables = mockApiData.tables.map((table, index) => { const tables = mockApiData.tables.map((table, index) => {
@ -1355,22 +1279,16 @@ const generateMockData = () => {
const schemaSlug = table.schema; const schemaSlug = table.schema;
const schemaObj = schemas.find(s => s.slug === schemaSlug); const schemaObj = schemas.find(s => s.slug === schemaSlug);
if (!schemaObj) {
console.warn(`Schema not found for table ${table.name} (${table.slug})`);
return null;
}
return { return {
id: `table-${table.slug}`, id: `table${index + 16}`,
schemaId: schemaObj.id, schemaId: schemaObj.id,
name: table.name, name: table.name,
slug: table.slug, slug: table.slug,
isFact: table.type === 'fact', isFact: table.type === 'fact',
type: table.type, type: table.type,
columns: table.columns, columns: table.columns
database: table.database
}; };
}).filter(Boolean); // Remove any null entries });
return { databases, schemas, tables }; return { databases, schemas, tables };
}; };
@ -1415,78 +1333,17 @@ const InfiniteCanvas = () => {
console.error('Error loading databases from localStorage:', error); console.error('Error loading databases from localStorage:', error);
} }
// If no saved databases, use the databases from our API // If no saved databases, return default Dbtez database
return mockData.databases.map(db => ({ return [{
...db, id: 'db4',
description: db.description || `Database ${db.name}`, name: 'Dbtez',
type: db.type || 'PostgreSQL' description: 'Default database for data exploration',
})); type: 'PostgreSQL',
schemas: mockData.schemas.length,
tables: mockData.tables.length
}];
}); });
// Track if we've already fetched the database data
const [hasFetchedDatabases, setHasFetchedDatabases] = useState(false);
// Fetch real database data from API - only once
useEffect(() => {
// Skip if we've already fetched the data
if (hasFetchedDatabases) {
console.log('Skipping database fetch - already loaded');
return;
}
const fetchDatabasesFromAPI = async () => {
try {
// API configuration
const API_BASE_URL = 'https://sandbox.kezel.io/api';
const token = "abdhsg"; // Replace with your actual token
const orgSlug = "sN05Pjv11qvH"; // Replace with your actual org slug
console.log('Fetching databases from API...');
const response = await axios.post(
`${API_BASE_URL}/qbt_database_list_get`,
{
token: token,
org: orgSlug,
},
{
headers: {
'Content-Type': 'application/json'
}
}
);
console.log('API Response:', response.data);
if (response.data && response.data.items && Array.isArray(response.data.items)) {
// Map API response to our database format
const apiDatabases = response.data.items.map((db, index) => ({
id: `db-${db.con || index}`,
name: db.name, // Use the name from API
slug: db.con,
description: db.description || `Database ${db.name}`,
type: db.database_type || 'PostgreSQL',
schemas: 0, // We'll update these later if needed
tables: 0
}));
console.log('Mapped databases from API:', apiDatabases);
// Update state with API data
setDatabases(apiDatabases);
// Mark that we've fetched the data
setHasFetchedDatabases(true);
}
} catch (error) {
console.error('Error fetching databases from API:', error);
}
};
// Call the function to fetch databases
fetchDatabasesFromAPI();
}, [hasFetchedDatabases]); // Only re-run if hasFetchedDatabases changes
const [schemas, setSchemas] = useState(() => { const [schemas, setSchemas] = useState(() => {
// Try to load schemas from localStorage // Try to load schemas from localStorage
try { try {
@ -1549,7 +1406,6 @@ const InfiniteCanvas = () => {
type: 'database', type: 'database',
data: { data: {
label: newDatabase.name, label: newDatabase.name,
name: newDatabase.name, // Include the name explicitly
schemas: newDatabase.schemas, schemas: newDatabase.schemas,
tables: newDatabase.tables, tables: newDatabase.tables,
expanded: false, expanded: false,
@ -1890,63 +1746,18 @@ const InfiniteCanvas = () => {
// Function to handle redirection to DataFlow view // Function to handle redirection to DataFlow view
const handleViewDataFlow = (dbId, dbName) => { const handleViewDataFlow = (dbId, dbName) => {
// Safety check for undefined parameters // In a real application with proper routing, you would use a router to navigate
if (!dbId || !dbName) { // Since we're using a tab-based navigation in App.jsx, we need to communicate with the parent
console.error('Invalid database ID or name provided to handleViewDataFlow');
return;
}
// Find the database object to get the slug
const database = databases.find(db => db.id === dbId);
if (!database) {
console.error(`Database with ID ${dbId} not found`);
return;
}
// Get the database slug, with a fallback
const dbSlug = database.slug || (dbId.startsWith('db-') ? dbId.substring(3) : 'default');
console.log(`Viewing data flow for database: ${dbName} (${dbSlug})`);
// Clear any existing schemas in localStorage for this database
try {
// Clear any existing schemas for this database
localStorage.removeItem(`schemas_${dbSlug}`);
localStorage.removeItem(`tables_${dbSlug}`);
// If this is "service 2" (my_dwh2), set empty schemas
if (dbSlug === 'my_dwh2') {
console.log('Setting empty schemas for service 2 database');
localStorage.setItem(`schemas_${dbSlug}`, JSON.stringify([]));
localStorage.setItem(`tables_${dbSlug}`, JSON.stringify([]));
}
} catch (error) {
console.error('Error clearing schemas from localStorage:', error);
}
// Store the selected database info in localStorage for the DataFlow component to use // Store the selected database info in localStorage for the DataFlow component to use
localStorage.setItem('selectedDatabase', JSON.stringify({ localStorage.setItem('selectedDatabase', JSON.stringify({
id: dbId, id: dbId,
name: dbName, name: dbName
slug: dbSlug, // Include the slug for API calls
isEmpty: dbSlug === 'my_dwh2' // Flag to indicate if this database has no schemas
})); }));
// Set the current database slug in mockData.js
if (typeof window.setCurrentDbSlug === 'function') {
window.setCurrentDbSlug(dbSlug);
} else {
console.warn('window.setCurrentDbSlug function is not available');
}
// Trigger an event that App.jsx can listen to // Trigger an event that App.jsx can listen to
const event = new CustomEvent('viewDataFlow', { const event = new CustomEvent('viewDataFlow', {
detail: { detail: { databaseId: dbId, databaseName: dbName }
databaseId: dbId,
databaseName: dbName,
databaseSlug: dbSlug,
isEmpty: dbSlug === 'my_dwh2' // Flag to indicate if this database has no schemas
}
}); });
window.dispatchEvent(event); window.dispatchEvent(event);
@ -1955,88 +1766,25 @@ const InfiniteCanvas = () => {
}; };
// Initialize with database nodes from state instead of mockData // Initialize with database nodes from state instead of mockData
const initialNodes = databases const initialNodes = databases.map((db, index) => ({
.filter(db => db && db.id) // Filter out any invalid database objects id: db.id,
.map((db, index) => { type: 'database',
// Create a safe ID for the database data: {
const safeId = db.id; label: db.name,
schemas: db.schemas || 0,
return { tables: db.tables || 0,
id: safeId, expanded: false,
type: 'database', onToggle: (id) => toggleDatabaseExpansion(id),
data: { onViewDetails: handleViewDataFlow // Add the function to handle redirection
id: safeId, // Explicitly include id in data to ensure it's available },
label: db.name || `Database ${index + 1}`, position: { x: 250 * index, y: 50 },
name: db.name, // Include the name from the API response }));
schemas: db.schemas || 0,
tables: db.tables || 0,
expanded: false,
onToggle: (id) => toggleDatabaseExpansion(id),
onViewDetails: handleViewDataFlow // Add the function to handle redirection
},
// Position databases in a grid layout for better visibility
position: {
x: 250 + (index % 2) * 400, // 2 columns
y: 100 + Math.floor(index / 2) * 250 // rows based on index
},
};
});
const initialEdges = []; const initialEdges = [];
const [nodes, setNodes, onNodesChange] = useNodesState(initialNodes); const [nodes, setNodes, onNodesChange] = useNodesState(initialNodes);
const [edges, setEdges, onEdgesChange] = useEdgesState(initialEdges); const [edges, setEdges, onEdgesChange] = useEdgesState(initialEdges);
// Update nodes when databases change (e.g., after API fetch)
useEffect(() => {
console.log('Databases updated, updating nodes:', databases);
const updatedNodes = databases
.filter(db => db && db.id)
.map((db, index) => {
const existingNode = nodes.find(node => node.id === db.id);
// If node already exists, update its data
if (existingNode) {
return {
...existingNode,
data: {
...existingNode.data,
label: db.name || `Database ${index + 1}`,
name: db.name, // Make sure name is set from the API response
schemas: db.schemas || 0,
tables: db.tables || 0,
}
};
}
// Otherwise create a new node
return {
id: db.id,
type: 'database',
data: {
id: db.id,
label: db.name || `Database ${index + 1}`,
name: db.name, // Include the name from the API response
schemas: db.schemas || 0,
tables: db.tables || 0,
expanded: false,
onToggle: (id) => toggleDatabaseExpansion(id),
onViewDetails: handleViewDataFlow
},
position: {
x: 250 + (index % 2) * 400,
y: 100 + Math.floor(index / 2) * 250
},
};
});
// Only update if we have nodes to show
if (updatedNodes.length > 0) {
setNodes(updatedNodes);
}
}, [databases]);
// Track viewport changes using the onMove callback instead of event listeners // Track viewport changes using the onMove callback instead of event listeners
const onMove = useCallback((event, viewport) => { const onMove = useCallback((event, viewport) => {
setScale(viewport.zoom); setScale(viewport.zoom);
@ -2089,37 +1837,23 @@ const InfiniteCanvas = () => {
const onInit = (instance) => { const onInit = (instance) => {
setReactFlowInstance(instance); setReactFlowInstance(instance);
// Auto-expand the first database and its schemas on load // Auto-expand Dbtez database and its schemas on load
setTimeout(() => { setTimeout(() => {
// Find the first database in our list // Expand the Dbtez database
if (databases.length > 0) { toggleDatabaseExpansion('db4');
const firstDb = databases[0];
if (firstDb && firstDb.id) {
console.log(`Auto-expanding first database: ${firstDb.id}`);
// Expand the first database // After expanding Dbtez, also expand all its schemas
toggleDatabaseExpansion(firstDb.id); setTimeout(() => {
const schemas = mockData.schemas.filter(schema => schema.dbId === 'db4');
schemas.forEach(schema => {
toggleSchemaExpansion(schema.id);
});
// After expanding the database, also expand all its schemas // Fit view after all expansions
setTimeout(() => { setTimeout(() => {
const schemas = mockData.schemas.filter(schema => schema && schema.dbId === firstDb.id); fitView({ padding: 0.5, maxZoom: 0.7 });
schemas.forEach(schema => { }, 100);
if (schema && schema.id) { }, 100);
toggleSchemaExpansion(schema.id);
}
});
// Fit view after all expansions
setTimeout(() => {
try {
fitView({ padding: 0.5, maxZoom: 0.7 });
} catch (error) {
console.error('Error calling fitView:', error);
}
}, 100);
}, 100);
}
}
}, 100); }, 100);
}; };
@ -2150,12 +1884,6 @@ const InfiniteCanvas = () => {
// Toggle database expansion to show/hide schemas // Toggle database expansion to show/hide schemas
const toggleDatabaseExpansion = (dbId) => { const toggleDatabaseExpansion = (dbId) => {
// Safety check for undefined or invalid dbId
if (!dbId) {
console.error('Invalid database ID provided to toggleDatabaseExpansion');
return;
}
const isExpanded = expandedDatabases[dbId]; const isExpanded = expandedDatabases[dbId];
// Update the database node to show the correct toggle state // Update the database node to show the correct toggle state
@ -2175,25 +1903,15 @@ const InfiniteCanvas = () => {
if (isExpanded) { if (isExpanded) {
// Collapse: remove all schemas and tables for this database // Collapse: remove all schemas and tables for this database
setNodes(nodes => nodes.filter(node => { setNodes(nodes => nodes.filter(node =>
if (node.type === 'database') return true; node.type === 'database' ||
if (node.type === 'schema' && node.data) { (node.type === 'schema' && !node.data.dbId === dbId) ||
return node.data.dbId !== dbId; (node.type === 'table' && !mockData.schemas.find(s => s.id === node.data.schemaId)?.dbId === dbId)
} ));
if (node.type === 'table' && node.data && node.data.schemaId) {
const schema = mockData.schemas.find(s => s.id === node.data.schemaId);
return !schema || schema.dbId !== dbId;
}
return true;
}));
// Safely filter edges setEdges(edges => edges.filter(edge =>
setEdges(edges => edges.filter(edge => { !edge.source.startsWith(dbId) && !edge.target.startsWith(dbId)
if (!edge.source || !edge.target) return true; ));
const sourceStr = String(edge.source);
const targetStr = String(edge.target);
return !sourceStr.startsWith(dbId) && !targetStr.startsWith(dbId);
}));
// Update expanded state // Update expanded state
setExpandedDatabases({ setExpandedDatabases({
@ -2204,7 +1922,7 @@ const InfiniteCanvas = () => {
// Also collapse any expanded schemas // Also collapse any expanded schemas
const updatedExpandedSchemas = { ...expandedSchemas }; const updatedExpandedSchemas = { ...expandedSchemas };
mockData.schemas.forEach(schema => { mockData.schemas.forEach(schema => {
if (schema && schema.dbId === dbId) { if (schema.dbId === dbId) {
updatedExpandedSchemas[schema.id] = false; updatedExpandedSchemas[schema.id] = false;
} }
}); });
@ -2213,27 +1931,14 @@ const InfiniteCanvas = () => {
} else { } else {
// Expand: add schema nodes for this database // Expand: add schema nodes for this database
const dbNode = nodes.find(n => n.id === dbId); const dbNode = nodes.find(n => n.id === dbId);
const dbSchemas = mockData.schemas.filter(schema => schema.dbId === dbId);
// Safety check if database node exists
if (!dbNode) {
console.error(`Database node with ID ${dbId} not found`);
return;
}
const dbSchemas = mockData.schemas.filter(schema => schema && schema.dbId === dbId);
// Check if we have schemas for this database
if (dbSchemas.length === 0) {
console.warn(`No schemas found for database ${dbId}`);
}
const schemaNodes = dbSchemas.map((schema, index) => ({ const schemaNodes = dbSchemas.map((schema, index) => ({
id: schema.id, id: schema.id,
type: 'schema', type: 'schema',
data: { data: {
id: schema.id, // Include id in data label: schema.name,
label: schema.name || `Schema ${index + 1}`, tables: schema.tables,
tables: schema.tables || 0,
dbId: dbId, dbId: dbId,
expanded: false, expanded: false,
onToggle: toggleSchemaExpansion onToggle: toggleSchemaExpansion
@ -2272,24 +1977,13 @@ const InfiniteCanvas = () => {
}); });
} }
// Safely call fitView with a delay
setTimeout(() => { setTimeout(() => {
try { fitView();
fitView();
} catch (error) {
console.error('Error calling fitView:', error);
}
}, 10); }, 10);
}; };
// Toggle schema expansion to show/hide tables // Toggle schema expansion to show/hide tables
const toggleSchemaExpansion = (schemaId) => { const toggleSchemaExpansion = (schemaId) => {
// Safety check for undefined or invalid schemaId
if (!schemaId) {
console.error('Invalid schema ID provided to toggleSchemaExpansion');
return;
}
const isExpanded = expandedSchemas[schemaId]; const isExpanded = expandedSchemas[schemaId];
// Update the schema node to show the correct toggle state // Update the schema node to show the correct toggle state
@ -2309,18 +2003,13 @@ const InfiniteCanvas = () => {
if (isExpanded) { if (isExpanded) {
// Collapse: remove all tables for this schema // Collapse: remove all tables for this schema
setNodes(nodes => nodes.filter(node => { setNodes(nodes => nodes.filter(node =>
if (node.type !== 'table') return true; node.type !== 'table' || node.data.schemaId !== schemaId
return !node.data || node.data.schemaId !== schemaId; ));
}));
// Safely filter edges setEdges(edges => edges.filter(edge =>
setEdges(edges => edges.filter(edge => { !edge.source.startsWith(schemaId) && !edge.target.startsWith(schemaId)
if (!edge.source || !edge.target) return true; ));
const sourceStr = String(edge.source);
const targetStr = String(edge.target);
return !sourceStr.startsWith(schemaId) && !targetStr.startsWith(schemaId);
}));
// Update expanded state // Update expanded state
setExpandedSchemas({ setExpandedSchemas({
@ -2331,27 +2020,14 @@ const InfiniteCanvas = () => {
} else { } else {
// Expand: add table nodes for this schema // Expand: add table nodes for this schema
const schemaNode = nodes.find(n => n.id === schemaId); const schemaNode = nodes.find(n => n.id === schemaId);
const schemaTables = mockData.tables.filter(table => table.schemaId === schemaId);
// Safety check if schema node exists
if (!schemaNode) {
console.error(`Schema node with ID ${schemaId} not found`);
return;
}
const schemaTables = mockData.tables.filter(table => table && table.schemaId === schemaId);
// Check if we have tables for this schema
if (schemaTables.length === 0) {
console.warn(`No tables found for schema ${schemaId}`);
}
const tableNodes = schemaTables.map((table, index) => ({ const tableNodes = schemaTables.map((table, index) => ({
id: table.id, id: table.id,
type: 'table', type: 'table',
data: { data: {
id: table.id, // Include id in data label: table.name,
label: table.name || `Table ${index + 1}`, isFact: table.isFact,
isFact: table.isFact || false,
type: table.type || (table.isFact ? 'fact' : 'dimension'), type: table.type || (table.isFact ? 'fact' : 'dimension'),
schemaId: schemaId, schemaId: schemaId,
columns: table.columns || [] columns: table.columns || []

View File

@ -1,5 +1,5 @@
// API data for DataflowCanvas component // API data for DataflowCanvas component
import { useState, useEffect, useCallback } from 'react'; import { useState, useEffect } from 'react';
import axios from 'axios'; import axios from 'axios';
// Default viewport settings // Default viewport settings
@ -9,404 +9,9 @@ const defaultViewportSettings = {
zoom: 0.22 zoom: 0.22
}; };
// API configuration // Function to transform API response to the format expected by the application
const API_BASE_URL = 'https://sandbox.kezel.io/api'; const transformApiResponse = (apiData) => {
const token = "abdhsg"; // Replace with your actual token if (!apiData || !apiData.items || !apiData.items.database || apiData.items.database.length === 0) {
const orgSlug = "sN05Pjv11qvH"; // Replace with your actual org slug
// Global variable to store the current database slug
let currentDbSlug = null;
// Expose a function to set the current database slug from other components
window.setCurrentDbSlug = (slug) => {
console.log(`Setting current database slug to: ${slug}`);
currentDbSlug = slug;
// Clear any cached data for this database
try {
// If this is "service 2" (my_dwh2), clear any cached schemas and tables
if (slug === 'my_dwh2') {
console.log('Clearing cached data for service 2 database');
// Clear localStorage cache for this database
localStorage.removeItem(`schemas_${slug}`);
localStorage.removeItem(`tables_${slug}`);
// Store empty arrays to ensure no data is shown
localStorage.setItem(`schemas_${slug}`, JSON.stringify([]));
localStorage.setItem(`tables_${slug}`, JSON.stringify([]));
}
} catch (error) {
console.error('Error clearing cached data:', error);
}
return slug;
};
// API endpoints
const ENDPOINTS = {
DATABASE_LIST: `${API_BASE_URL}/qbt_database_list_get`,
SCHEMA_LIST: `${API_BASE_URL}/qbt_schema_list_get`,
TABLE_LIST: `${API_BASE_URL}/qbt_table_list_get`,
COLUMN_LIST: `${API_BASE_URL}/qbt_column_list_get`
};
// Function to fetch databases
const fetchDatabases = async () => {
try {
const response = await axios.post(
ENDPOINTS.DATABASE_LIST,
{
token: token,
org: orgSlug,
},
{
headers: {
'Content-Type': 'application/json'
}
}
);
console.log('Database list response:', response.data);
// The response structure is different from what we expected
// It has items as an array directly, not items.database
const databases = response.data.items || [];
// Log all databases found
console.log(`Found ${databases.length} databases:`, databases);
// Set the current database slug if we have at least one database
// We'll still use the first one as the default for backward compatibility
if (databases.length > 0) {
currentDbSlug = databases[0].con;
console.log(`Set current database slug to: ${currentDbSlug} (first database)`);
}
// Validate database objects
const validDatabases = databases.filter(db => {
if (!db.con) {
console.warn('Found database without connection slug:', db);
return false;
}
return true;
});
if (validDatabases.length !== databases.length) {
console.warn(`Filtered out ${databases.length - validDatabases.length} invalid databases`);
}
return validDatabases;
} catch (error) {
console.error('Error fetching databases:', error);
throw error;
}
};
// Function to fetch schemas for a database
const fetchSchemas = async (dbSlug) => {
try {
// Special case for "service 2" database (my_dwh2) - return empty schemas
if (dbSlug === 'my_dwh2') {
console.log(`Database ${dbSlug} (service 2) has no schemas - returning empty array`);
return [];
}
console.log(`Fetching schemas for database slug: ${dbSlug}`);
const response = await axios.post(
ENDPOINTS.SCHEMA_LIST,
{
token: token,
org: orgSlug,
con: dbSlug
},
{
headers: {
'Content-Type': 'application/json'
}
}
);
console.log(`Schema list for database ${dbSlug}:`, response.data);
console.log(`Schema list items structure:`, response.data.items);
// Based on the actual response structure you provided
// The items array contains schema objects with 'sch' as the slug field
let schemas = [];
if (Array.isArray(response.data.items)) {
// Map the response to match our expected schema structure
schemas = response.data.items.map(item => ({
name: item.name,
slug: item.sch, // Use 'sch' as the slug
description: item.description || "",
created_at: item.created_at,
is_validated: item.is_validated,
// Store the database slug with the schema
database: dbSlug
}));
}
console.log(`Number of schemas found for database ${dbSlug}: ${schemas.length}`);
return schemas;
} catch (error) {
console.error(`Error fetching schemas for database ${dbSlug}:`, error);
throw error;
}
};
// Function to fetch columns for a specific table
const fetchColumns = async (tableSlug, schemaSlug, dbSlug = null) => {
try {
// Use the provided dbSlug or fall back to the global currentDbSlug
const databaseSlug = dbSlug || currentDbSlug;
if (!databaseSlug) {
console.error('No database slug available for fetching columns');
throw new Error('Database slug is required to fetch columns');
}
// Create the payload for the API request
const payload = {
token: token,
org: orgSlug,
tbl: tableSlug,
sch: schemaSlug,
con: databaseSlug
};
console.log(`Fetching columns for table slug: ${tableSlug} in schema: ${schemaSlug} and database: ${databaseSlug}`);
console.log('Column list request payload:', payload);
const response = await axios.post(
ENDPOINTS.COLUMN_LIST,
payload,
{
headers: {
'Content-Type': 'application/json'
}
}
);
console.log(`Column list for table ${tableSlug}:`, response.data);
// Process the columns from the response
let columns = [];
if (Array.isArray(response.data.items)) {
// Map the response to match our expected column structure
columns = response.data.items.map(item => ({
name: item.name,
slug: item.col, // Use 'col' as the slug
description: item.description || "",
data_type: item.data_type,
is_primary_key: item.is_primary_key || false,
is_foreign_key: item.is_foreign_key || false,
is_nullable: item.is_nullable || true,
created_at: item.created_at,
// Store the schema and database information
schema: schemaSlug,
database: databaseSlug
}));
}
console.log(`Number of columns found for table ${tableSlug}: ${columns.length}`);
return columns;
} catch (error) {
console.error(`Error fetching columns for table ${tableSlug}:`, error);
// Log more detailed error information
if (error.response) {
// The request was made and the server responded with a status code
// that falls out of the range of 2xx
console.error('Error response data:', error.response.data);
console.error('Error response status:', error.response.status);
console.error('Error response headers:', error.response.headers);
} else if (error.request) {
// The request was made but no response was received
console.error('Error request:', error.request);
} else {
// Something happened in setting up the request that triggered an Error
console.error('Error message:', error.message);
}
// For now, return mock columns to allow the UI to display something
console.log(`Using mock columns for table ${tableSlug} due to API error`);
// Create mock columns based on the table name to make them more realistic
const mockColumns = [];
// Add id column (common in most tables)
mockColumns.push({
name: "id",
slug: "id",
description: "Primary key",
data_type: "INTEGER",
is_primary_key: true,
is_foreign_key: false,
is_nullable: false,
schema: schemaSlug,
database: databaseSlug
});
// Add name column (common in dimension tables)
if (tableSlug.includes('dim')) {
mockColumns.push({
name: "name",
slug: "name",
description: "Name field",
data_type: "VARCHAR",
is_primary_key: false,
is_foreign_key: false,
is_nullable: true,
schema: schemaSlug,
database: databaseSlug
});
}
// Add amount column (common in fact tables)
if (tableSlug.includes('fact')) {
mockColumns.push({
name: "amount",
slug: "amount",
description: "Transaction amount",
data_type: "DECIMAL",
is_primary_key: false,
is_foreign_key: false,
is_nullable: true,
schema: schemaSlug,
database: databaseSlug
});
}
// Add date column (common in most tables)
mockColumns.push({
name: "created_at",
slug: "created_at",
description: "Creation timestamp",
data_type: "TIMESTAMP",
is_primary_key: false,
is_foreign_key: false,
is_nullable: true,
schema: schemaSlug,
database: databaseSlug
});
// Add updated_at column (common in most tables)
mockColumns.push({
name: "updated_at",
slug: "updated_at",
description: "Last update timestamp",
data_type: "TIMESTAMP",
is_primary_key: false,
is_foreign_key: false,
is_nullable: true,
schema: schemaSlug,
database: databaseSlug
});
return mockColumns;
}
};
// Function to fetch tables for a schema
const fetchTables = async (schemaSlug, dbSlug = null) => {
try {
// Use the provided dbSlug or fall back to the global currentDbSlug
const databaseSlug = dbSlug || currentDbSlug;
if (!databaseSlug) {
console.error('No database slug available for fetching tables');
throw new Error('Database slug is required to fetch tables');
}
// Special case for "service 2" database (my_dwh2) - return empty tables
if (databaseSlug === 'my_dwh2') {
console.log(`Database ${databaseSlug} (service 2) has no tables - returning empty array`);
return [];
}
console.log(`Fetching tables for schema slug: ${schemaSlug} in database: ${databaseSlug}`);
const response = await axios.post(
ENDPOINTS.TABLE_LIST,
{
token: token,
org: orgSlug,
sch: schemaSlug,
con: databaseSlug
},
{
headers: {
'Content-Type': 'application/json'
}
}
);
console.log(`Table list for schema ${schemaSlug}:`, response.data);
console.log(`Table list items structure:`, response.data.items);
// Based on the expected response structure
let tables = [];
if (Array.isArray(response.data.items)) {
// Map the response to match our expected table structure
tables = response.data.items.map(item => ({
name: item.name || `Table ${item.tbl}`,
slug: item.tbl, // Use 'tbl' as the slug if it exists
schema: schemaSlug, // Associate with the schema
description: item.description || "",
// Preserve the table_type from the API response
table_type: item.table_type || "stage",
// Preserve the orientation from the API response
orientation: item.orientation || { x: 1000, y: 100 },
// Include any other fields we need
external_name: item.external_name,
is_provisioned: item.is_provisioned,
created_at: item.created_at,
// Store the database slug with the table
database: databaseSlug,
columns: [] // Initialize with empty array, will be populated later
}));
// Fetch columns for each table
// Use Promise.all to fetch columns for all tables in parallel
// This will speed up the process significantly
try {
const columnsPromises = tables.map(table =>
fetchColumns(table.slug, schemaSlug, databaseSlug)
.then(columns => {
table.columns = columns;
return table;
})
.catch(error => {
console.error(`Error fetching columns for table ${table.slug}:`, error);
table.columns = []; // Set empty columns on error
return table;
})
);
await Promise.all(columnsPromises);
console.log('All columns fetched successfully');
} catch (error) {
console.error('Error fetching columns for tables:', error);
// Continue with the tables we have, even if columns fetching failed
}
}
console.log(`Number of tables found: ${tables.length}`);
return tables;
} catch (error) {
console.error(`Error fetching tables for schema ${schemaSlug}:`, error);
throw error;
}
};
// Function to transform the data from multiple API calls
const transformData = (databases, schemasMap, tablesMap) => {
console.log('Transform data called with:', {
databases,
schemasMapKeys: Object.keys(schemasMap),
tablesMapKeys: Object.keys(tablesMap)
});
if (!databases || databases.length === 0) {
console.log('No databases found, returning empty data');
return { return {
schemas: [], schemas: [],
tables: [], tables: [],
@ -415,114 +20,92 @@ const transformData = (databases, schemasMap, tablesMap) => {
}; };
} }
// Process all databases instead of just the first one const database = apiData.items.database[0];
console.log(`Processing ${databases.length} databases:`, databases);
// Initialize arrays to hold all schemas and tables // Extract schemas
let allSchemas = []; const schemas = database.schemas?.map(schema => ({
let allTables = []; name: schema.name,
slug: schema.slug,
description: schema.description || "",
color: schema.color || "#1890ff",
position: schema.position || { x: 50, y: 50 },
width: schema.width || 1200,
height: schema.height || 700
})) || [];
// Process each database // Extract tables
databases.forEach((database, dbIndex) => { const tables = [];
console.log(`Processing database ${dbIndex + 1}/${databases.length}:`, database); database.schemas?.forEach(schema => {
if (schema.table && Array.isArray(schema.table)) {
// Skip if database has no connection slug schema.table.forEach(table => {
if (!database.con) { tables.push({
console.warn(`Database ${dbIndex} has no connection slug, skipping:`, database);
return;
}
// Get schemas for this database
const dbSchemas = schemasMap[database.con] || [];
console.log(`Found ${dbSchemas.length} schemas for database ${database.con}:`, dbSchemas);
// Calculate offset for positioning schemas from different databases
// This ensures schemas from different databases don't overlap
const dbOffsetX = dbIndex * 2000; // Horizontal offset between databases
const dbOffsetY = 0; // No vertical offset between databases
// Transform schemas for this database
const transformedSchemas = dbSchemas.map((schema, schemaIndex) => {
console.log(`Processing schema ${schemaIndex + 1}/${dbSchemas.length}:`, schema);
// Position schemas in a grid layout with offset based on database index
const position = {
x: dbOffsetX + 50 + (schemaIndex % 2) * 1300, // Position horizontally in a grid
y: dbOffsetY + 50 + Math.floor(schemaIndex / 2) * 800 // Position vertically in a grid
};
// Add database information to the schema
return {
name: schema.name,
slug: schema.slug,
description: schema.description || "",
color: schema.color || (schemaIndex % 2 === 0 ? "#1890ff" : "#52c41a"), // Alternate colors
position: schema.position || position,
width: schema.width || 1200,
height: schema.height || 700,
database: database.con, // Add database slug to schema
databaseName: database.name || `Database ${database.con}` // Add database name
};
});
// Add schemas from this database to the combined list
allSchemas = [...allSchemas, ...transformedSchemas];
// Process tables for each schema in this database
transformedSchemas.forEach(schema => {
const schemaTables = tablesMap[schema.slug] || [];
console.log(`Found ${schemaTables.length} tables for schema ${schema.name} (${schema.slug}):`, schemaTables);
schemaTables.forEach(table => {
// Extract column names for display in the UI
const columnNames = (table.columns || []).map(col =>
typeof col === 'string' ? col : col.name
);
// Add table with database information
allTables.push({
name: table.name, name: table.name,
slug: table.slug, slug: table.slug,
// Use table_type from API response or fallback to type if available, otherwise default to "stage" type: table.type || "stage",
type: table.table_type || table.type || "stage", schema: table.schema,
schema: schema.slug, orientation: table.orientation || { x: 100, y: 100 },
// Use orientation directly from API response or fallback to default columns: table.columns || []
orientation: table.orientation || { x: 1000, y: 100 },
// Include the columns fetched from the API (as an array of strings for the UI)
columns: columnNames,
// Keep the full column objects for reference if needed
columnsData: table.columns || [],
// Add database information
database: schema.database,
databaseName: schema.databaseName
}); });
}); });
}); }
}); });
console.log(`Transformed ${allSchemas.length} schemas and ${allTables.length} tables from ${databases.length} databases`); // Extract processes
const processes = database.process?.map(process => ({
// For processes, we would need another API endpoint name: process.name,
// For now, we'll use an empty array slug: process.slug,
const processes = []; source_table: process.source_table || [],
destination_table: process.destination_table || [],
description: process.description || "",
type: process.type || "ETL",
status: process.status || "active",
mappings: process.mappings?.map(mapping => ({
source: mapping.source,
target: mapping.target,
type: mapping.type
})) || []
})) || [];
return { return {
schemas: allSchemas, schemas,
tables: allTables, tables,
processes, processes,
viewportSettings: defaultViewportSettings viewportSettings: defaultViewportSettings
}; };
}; };
// Create a data cache to store API results // Create a custom hook to fetch and transform the data
const dataCache = { export const useApiData = () => {
databases: null, const [data, setData] = useState(null);
schemasMap: null, const [loading, setLoading] = useState(true);
tablesMap: null, const [error, setError] = useState(null);
transformedData: null,
isLoading: false, useEffect(() => {
error: null, const fetchData = async () => {
lastFetched: null try {
const response = await axios.post('https://sandbox.kezel.io/api/qbt_metadata_list_get', {}, {
headers: {
'Content-Type': 'application/json'
}
});
const apiData = response.data;
console.log('API Response Structure:', JSON.stringify(apiData, null, 2));
const transformedData = transformApiResponse(apiData);
console.log('Transformed Data Structure:', JSON.stringify(transformedData, null, 2));
setData(transformedData);
setLoading(false);
} catch (err) {
console.error('Error fetching data:', err);
setError(err.response?.data?.message || err.message);
setLoading(false);
}
};
fetchData();
}, []);
return { data, loading, error };
}; };
// For backward compatibility, provide a mockApiData object // For backward compatibility, provide a mockApiData object
@ -534,196 +117,26 @@ export const mockApiData = {
viewportSettings: defaultViewportSettings viewportSettings: defaultViewportSettings
}; };
// Function to fetch all data and populate the cache // Fetch data immediately to populate mockApiData using axios
const fetchAndCacheAllData = async (forceRefresh = false) => { axios.post('https://sandbox.kezel.io/api/qbt_metadata_list_get', {}, {
// If we're already loading data, don't start another fetch headers: {
if (dataCache.isLoading && !forceRefresh) { 'Content-Type': 'application/json'
console.log('Data fetch already in progress, waiting...');
return dataCache.transformedData;
} }
})
.then(response => {
const apiData = response.data;
const transformedData = transformApiResponse(apiData);
// Mark that we're loading data // Update mockApiData properties
dataCache.isLoading = true; mockApiData.schemas = transformedData.schemas;
mockApiData.tables = transformedData.tables;
mockApiData.processes = transformedData.processes;
mockApiData.viewportSettings = transformedData.viewportSettings;
try { console.log('API data loaded successfully');
console.log('Fetching fresh data from API'); })
.catch(error => {
// Step 1: Fetch databases console.error('Error fetching API data:', error.response?.data?.message || error.message);
const databases = await fetchDatabases(); });
dataCache.databases = databases;
if (databases.length === 0) {
console.log('No databases found');
const emptyData = {
schemas: [],
tables: [],
processes: [],
viewportSettings: defaultViewportSettings
};
dataCache.transformedData = emptyData;
// Update mockApiData properties for backward compatibility
mockApiData.schemas = emptyData.schemas;
mockApiData.tables = emptyData.tables;
mockApiData.processes = emptyData.processes;
mockApiData.viewportSettings = emptyData.viewportSettings;
return emptyData;
}
// Step 2: Fetch schemas for each database
const schemasMap = {};
for (const db of databases) {
// Use db.con as the slug for fetching schemas
schemasMap[db.con] = await fetchSchemas(db.con);
}
dataCache.schemasMap = schemasMap;
// Step 3: Fetch tables for each schema
const tablesMap = {};
for (const dbSlug in schemasMap) {
for (const schema of schemasMap[dbSlug]) {
// Use schema.slug for fetching tables and pass the database slug
tablesMap[schema.slug] = await fetchTables(schema.slug, dbSlug);
}
}
dataCache.tablesMap = tablesMap;
// Step 4: Transform the data
const transformedData = transformData(databases, schemasMap, tablesMap);
dataCache.transformedData = transformedData;
dataCache.lastFetched = new Date();
// Update mockApiData properties for backward compatibility
mockApiData.schemas = transformedData.schemas;
mockApiData.tables = transformedData.tables;
mockApiData.processes = transformedData.processes;
mockApiData.viewportSettings = transformedData.viewportSettings;
console.log('API data loaded successfully');
return transformedData;
} catch (error) {
console.error('Error fetching API data:', error.response?.data?.message || error.message);
dataCache.error = error.response?.data?.message || error.message;
throw error;
} finally {
dataCache.isLoading = false;
}
};
// Create a custom hook to fetch and transform the data
export const useApiData = (forceRefresh = false) => {
const [data, setData] = useState(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState(null);
useEffect(() => {
let isMounted = true;
// Check if we're viewing the "service 2" database which has no schemas
if (currentDbSlug === 'my_dwh2') {
console.log('Using empty data for service 2 database');
// Return empty data for "service 2" database
const emptyData = {
schemas: [],
tables: [],
processes: [],
viewportSettings: { x: 0, y: 0, zoom: 0.5 }
};
// Update mockApiData for backward compatibility
mockApiData.schemas = [];
mockApiData.tables = [];
mockApiData.processes = [];
setData(emptyData);
setLoading(false);
return () => {
isMounted = false;
};
}
// If a fetch is already in progress, wait for it
if (dataCache.isLoading && !forceRefresh) {
console.log('Data fetch already in progress, waiting...');
// Check every 100ms if the data has been loaded
const checkInterval = setInterval(() => {
if (!isMounted) {
clearInterval(checkInterval);
return;
}
if (dataCache.transformedData) {
console.log('Cached data now available');
setData(dataCache.transformedData);
setLoading(false);
clearInterval(checkInterval);
}
if (dataCache.error) {
console.error('Error occurred during data fetch:', dataCache.error);
setError(dataCache.error);
setLoading(false);
clearInterval(checkInterval);
}
}, 100);
return () => {
isMounted = false;
clearInterval(checkInterval);
};
}
// Start a new fetch
const fetchData = async () => {
try {
console.log('Fetching fresh data from API in useApiData hook');
const result = await fetchAndCacheAllData(forceRefresh);
if (isMounted) {
setData(result);
setLoading(false);
}
} catch (err) {
console.error('Error fetching data in useApiData hook:', err);
if (isMounted) {
setError(err.response?.data?.message || err.message);
setLoading(false);
}
}
};
fetchData();
return () => {
isMounted = false;
};
}, [forceRefresh, currentDbSlug]); // Add currentDbSlug as a dependency
// Add a refresh function to allow manual refresh
const refreshData = useCallback(async () => {
setLoading(true);
setError(null);
try {
const result = await fetchAndCacheAllData(true);
setData(result);
setLoading(false);
} catch (err) {
console.error('Error refreshing data:', err);
setError(err.response?.data?.message || err.message);
setLoading(false);
}
}, []);
return { data, loading, error, refreshData };
};
// Fetch data immediately to populate the cache and mockApiData
// Always force a refresh to ensure API calls are made
fetchAndCacheAllData(true);
export default mockApiData; export default mockApiData;