diff --git a/docs/integrations/data-ingestion/apache-spark/index.md b/docs/integrations/data-ingestion/apache-spark/index.md
index 7e46f668261..c722ba9bff0 100644
--- a/docs/integrations/data-ingestion/apache-spark/index.md
+++ b/docs/integrations/data-ingestion/apache-spark/index.md
@@ -6,6 +6,10 @@ description: 'Introduction to Apache Spark with ClickHouse'
keywords: ['clickhouse', 'Apache Spark', 'migrating', 'data']
title: 'Integrating Apache Spark with ClickHouse'
doc_type: 'guide'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/apache_spark_logo.svg'
+integration_title: 'Apache Spark'
+integration_tier: 'core'
---
import Tabs from '@theme/Tabs';
diff --git a/docs/integrations/data-ingestion/aws-glue/index.md b/docs/integrations/data-ingestion/aws-glue/index.md
index e73296c824a..14d06215397 100644
--- a/docs/integrations/data-ingestion/aws-glue/index.md
+++ b/docs/integrations/data-ingestion/aws-glue/index.md
@@ -6,6 +6,10 @@ description: 'Integrate ClickHouse and Amazon Glue'
keywords: ['clickhouse', 'amazon', 'aws', 'glue', 'migrating', 'data', 'spark']
title: 'Integrating Amazon Glue with ClickHouse and Spark'
doc_type: 'guide'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/glue_logo.png'
+integration_title: 'Amazon Glue'
+integration_tier: 'core'
---
import Image from '@theme/IdealImage';
diff --git a/docs/integrations/data-ingestion/azure-data-factory/index.md b/docs/integrations/data-ingestion/azure-data-factory/index.md
index cb5e01ec5dd..42d5e268632 100644
--- a/docs/integrations/data-ingestion/azure-data-factory/index.md
+++ b/docs/integrations/data-ingestion/azure-data-factory/index.md
@@ -4,6 +4,10 @@ description: 'Bringing Azure Data into ClickHouse'
keywords: ['azure data factory', 'azure', 'microsoft', 'data']
title: 'Bringing Azure Data into ClickHouse'
doc_type: 'guide'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/azure-data-factory.png'
+integration_title: 'Azure Data Factory'
+integration_tier: 'core'
---
| Page | Description |
diff --git a/docs/integrations/data-ingestion/azure-synapse/index.md b/docs/integrations/data-ingestion/azure-synapse/index.md
index a92b98dcaa5..38b2b2ab1d8 100644
--- a/docs/integrations/data-ingestion/azure-synapse/index.md
+++ b/docs/integrations/data-ingestion/azure-synapse/index.md
@@ -5,6 +5,10 @@ description: 'Introduction to Azure Synapse with ClickHouse'
keywords: ['clickhouse', 'azure synapse', 'azure', 'synapse', 'microsoft', 'azure spark', 'data']
title: 'Integrating Azure Synapse with ClickHouse'
doc_type: 'guide'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/azure-synapse.png'
+integration_title: 'Azure Synapse'
+integration_tier: 'core'
---
import TOCInline from '@theme/TOCInline';
diff --git a/docs/integrations/data-ingestion/clickpipes/kafka/index.md b/docs/integrations/data-ingestion/clickpipes/kafka/index.md
index 798e170e550..8531fc6ac29 100644
--- a/docs/integrations/data-ingestion/clickpipes/kafka/index.md
+++ b/docs/integrations/data-ingestion/clickpipes/kafka/index.md
@@ -4,6 +4,10 @@ slug: /integrations/clickpipes/kafka
sidebar_position: 1
title: 'Kafka ClickPipes'
doc_type: 'landing-page'
+integration_type: ['clickpipes']
+integration_logo: '/static/images/integrations/logos/kafka.svg'
+integration_title: 'ClickPipes for Kafka'
+integration_tier: 'core'
---
diff --git a/docs/integrations/data-ingestion/clickpipes/kinesis.md b/docs/integrations/data-ingestion/clickpipes/kinesis.md
index 052497fd04f..5fd3f37420d 100644
--- a/docs/integrations/data-ingestion/clickpipes/kinesis.md
+++ b/docs/integrations/data-ingestion/clickpipes/kinesis.md
@@ -4,6 +4,10 @@ description: 'Seamlessly connect your Amazon Kinesis data sources to ClickHouse
slug: /integrations/clickpipes/kinesis
title: 'Integrating Amazon Kinesis with ClickHouse Cloud'
doc_type: 'guide'
+integration_type: ['clickpipes']
+integration_logo: '/static/images/integrations/logos/amazon_kinesis_logo.svg'
+integration_title: 'ClickPipes for Amazon Kinesis'
+integration_tier: 'core'
---
import cp_service from '@site/static/images/integrations/data-ingestion/clickpipes/cp_service.png';
diff --git a/docs/integrations/data-ingestion/clickpipes/mongodb/index.md b/docs/integrations/data-ingestion/clickpipes/mongodb/index.md
index 9624b2833f5..8b6d82ad524 100644
--- a/docs/integrations/data-ingestion/clickpipes/mongodb/index.md
+++ b/docs/integrations/data-ingestion/clickpipes/mongodb/index.md
@@ -4,6 +4,10 @@ description: 'Describes how to seamlessly connect your MongoDB to ClickHouse Clo
slug: /integrations/clickpipes/mongodb
title: 'Ingesting data from MongoDB to ClickHouse (using CDC)'
doc_type: 'guide'
+integration_type: ['clickpipes']
+integration_logo: '/static/images/integrations/logos/mongodb.svg'
+integration_title: 'ClickPipes for MongoDB'
+integration_tier: 'core'
---
import BetaBadge from '@theme/badges/BetaBadge';
diff --git a/docs/integrations/data-ingestion/clickpipes/postgres/index.md b/docs/integrations/data-ingestion/clickpipes/postgres/index.md
index 8c4af9ed75b..e1bf8f69f64 100644
--- a/docs/integrations/data-ingestion/clickpipes/postgres/index.md
+++ b/docs/integrations/data-ingestion/clickpipes/postgres/index.md
@@ -4,6 +4,10 @@ description: 'Seamlessly connect your Postgres to ClickHouse Cloud.'
slug: /integrations/clickpipes/postgres
title: 'Ingesting Data from Postgres to ClickHouse (using CDC)'
doc_type: 'guide'
+integration_type: ['clickpipes']
+integration_logo: '/static/images/integrations/logos/postgresql.svg'
+integration_title: 'ClickPipes for PostgreSQL'
+integration_tier: 'core'
---
import BetaBadge from '@theme/badges/BetaBadge';
diff --git a/docs/integrations/data-ingestion/dbms/postgresql/connecting-to-postgresql.md b/docs/integrations/data-ingestion/dbms/postgresql/connecting-to-postgresql.md
index c89664195bb..511f0df8b1c 100644
--- a/docs/integrations/data-ingestion/dbms/postgresql/connecting-to-postgresql.md
+++ b/docs/integrations/data-ingestion/dbms/postgresql/connecting-to-postgresql.md
@@ -5,6 +5,10 @@ keywords: ['clickhouse', 'postgres', 'postgresql', 'connect', 'integrate', 'tabl
description: 'Page describing the various ways to connect PostgreSQL to ClickHouse'
show_related_blogs: true
doc_type: 'guide'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/postgresql.svg'
+integration_title: 'PostgreSQL'
+integration_tier: 'core'
---
import CloudNotSupportedBadge from '@theme/badges/CloudNotSupportedBadge';
diff --git a/docs/integrations/data-ingestion/emqx/index.md b/docs/integrations/data-ingestion/emqx/index.md
index 288fa1f4b3d..b284bf95bae 100644
--- a/docs/integrations/data-ingestion/emqx/index.md
+++ b/docs/integrations/data-ingestion/emqx/index.md
@@ -5,6 +5,10 @@ slug: /integrations/emqx
description: 'Introduction to EMQX with ClickHouse'
title: 'Integrating EMQX with ClickHouse'
doc_type: 'guide'
+integration_title: 'EMQX'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/emqx.svg'
+integration_tier: 'partner'
---
import emqx_cloud_artitecture from '@site/static/images/integrations/data-ingestion/emqx/emqx-cloud-artitecture.png';
diff --git a/docs/integrations/data-ingestion/etl-tools/airbyte-and-clickhouse.md b/docs/integrations/data-ingestion/etl-tools/airbyte-and-clickhouse.md
index 1a6c82db2e4..444e2ce7d1d 100644
--- a/docs/integrations/data-ingestion/etl-tools/airbyte-and-clickhouse.md
+++ b/docs/integrations/data-ingestion/etl-tools/airbyte-and-clickhouse.md
@@ -6,6 +6,10 @@ slug: /integrations/airbyte
description: 'Stream data into ClickHouse using Airbyte data pipelines'
title: 'Connect Airbyte to ClickHouse'
doc_type: 'guide'
+integration_title: 'Airbyte'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/airbyte-logo.png'
+integration_tier: 'partner'
---
import Image from '@theme/IdealImage';
@@ -18,11 +22,11 @@ import airbyte06 from '@site/static/images/integrations/data-ingestion/etl-tools
import airbyte07 from '@site/static/images/integrations/data-ingestion/etl-tools/airbyte_07.png';
import airbyte08 from '@site/static/images/integrations/data-ingestion/etl-tools/airbyte_08.png';
import airbyte09 from '@site/static/images/integrations/data-ingestion/etl-tools/airbyte_09.png';
-import CommunityMaintainedBadge from '@theme/badges/CommunityMaintained';
+import PartnerBadge from '@theme/badges/PartnerBadge';
# Connect Airbyte to ClickHouse
-
+
:::note
Please note that the Airbyte source and destination for ClickHouse are currently in Alpha status and not suitable for moving large datasets (> 10 million rows)
diff --git a/docs/integrations/data-ingestion/etl-tools/apache-beam.md b/docs/integrations/data-ingestion/etl-tools/apache-beam.md
index 5139529cb01..fdb232f69b8 100644
--- a/docs/integrations/data-ingestion/etl-tools/apache-beam.md
+++ b/docs/integrations/data-ingestion/etl-tools/apache-beam.md
@@ -4,6 +4,10 @@ slug: /integrations/apache-beam
description: 'Users can ingest data into ClickHouse using Apache Beam'
title: 'Integrating Apache Beam and ClickHouse'
doc_type: 'guide'
+integration_title: 'Apache Beam'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/logo_beam.png'
+integration_tier: 'core'
---
import ClickHouseSupportedBadge from '@theme/badges/ClickHouseSupported';
diff --git a/docs/integrations/data-ingestion/etl-tools/bladepipe-and-clickhouse.md b/docs/integrations/data-ingestion/etl-tools/bladepipe-and-clickhouse.md
index d552e50561b..423fcc43ec8 100644
--- a/docs/integrations/data-ingestion/etl-tools/bladepipe-and-clickhouse.md
+++ b/docs/integrations/data-ingestion/etl-tools/bladepipe-and-clickhouse.md
@@ -6,6 +6,10 @@ slug: /integrations/bladepipe
description: 'Stream data into ClickHouse using BladePipe data pipelines'
title: 'Connect BladePipe to ClickHouse'
doc_type: 'guide'
+integration_title: 'BladePipe'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/bladepipe.svg'
+integration_tier: 'partner'
---
import Image from '@theme/IdealImage';
@@ -18,11 +22,11 @@ import bp_ck_6 from '@site/static/images/integrations/data-ingestion/etl-tools/b
import bp_ck_7 from '@site/static/images/integrations/data-ingestion/etl-tools/bp_ck_7.png';
import bp_ck_8 from '@site/static/images/integrations/data-ingestion/etl-tools/bp_ck_8.png';
import bp_ck_9 from '@site/static/images/integrations/data-ingestion/etl-tools/bp_ck_9.png';
-import CommunityMaintainedBadge from '@theme/badges/CommunityMaintained';
+import PartnerBadge from '@theme/badges/PartnerBadge';
# Connect BladePipe to ClickHouse
-
+BladePipe is a real-time end-to-end data integration tool with sub-second latency, boosting seamless data flow across platforms.
diff --git a/docs/integrations/data-ingestion/etl-tools/dbt/index.md b/docs/integrations/data-ingestion/etl-tools/dbt/index.md
index 9d9ac698341..87725d41663 100644
--- a/docs/integrations/data-ingestion/etl-tools/dbt/index.md
+++ b/docs/integrations/data-ingestion/etl-tools/dbt/index.md
@@ -5,6 +5,10 @@ sidebar_position: 1
description: 'Users can transform and model their data in ClickHouse using dbt'
title: 'Integrating dbt and ClickHouse'
doc_type: 'guide'
+integration_type: ['data integration']
+integration_logo: '/static/images/integrations/logos/dbt.svg'
+integration_title: 'dbt'
+integration_tier: 'core'
---
import TOCInline from '@theme/TOCInline';
diff --git a/docs/integrations/data-ingestion/etl-tools/dlt-and-clickhouse.md b/docs/integrations/data-ingestion/etl-tools/dlt-and-clickhouse.md
index 566c36bd5ef..a1dbb17f414 100644
--- a/docs/integrations/data-ingestion/etl-tools/dlt-and-clickhouse.md
+++ b/docs/integrations/data-ingestion/etl-tools/dlt-and-clickhouse.md
@@ -5,13 +5,17 @@ description: 'Load data into Clickhouse using dlt integration'
title: 'Connect dlt to ClickHouse'
slug: /integrations/data-ingestion/etl-tools/dlt-and-clickhouse
doc_type: 'guide'
+integration_title: 'DLT'
+integration_type: ['data integration']
+integration_logo: '/static/images/integrations/logos/dlthub_logo.svg'
+integration_tier: 'partner'
---
-import CommunityMaintainedBadge from '@theme/badges/CommunityMaintained';
+import PartnerBadge from '@theme/badges/PartnerBadge';
# Connect dlt to ClickHouse
-
+dlt is an open-source library that you can add to your Python scripts to load data from various and often messy data sources into well-structured, live datasets.
diff --git a/docs/integrations/data-ingestion/etl-tools/fivetran/index.md b/docs/integrations/data-ingestion/etl-tools/fivetran/index.md
index 400ea3a9fd1..e6baf47a4bf 100644
--- a/docs/integrations/data-ingestion/etl-tools/fivetran/index.md
+++ b/docs/integrations/data-ingestion/etl-tools/fivetran/index.md
@@ -5,6 +5,10 @@ sidebar_position: 2
description: 'Users can transform and model their data in ClickHouse using dbt'
title: 'Fivetran and ClickHouse Cloud'
doc_type: 'guide'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/fivetran.svg'
+integration_title: 'Fivetran'
+integration_tier: 'core'
---
import ClickHouseSupportedBadge from '@theme/badges/ClickHouseSupported';
diff --git a/docs/integrations/data-ingestion/etl-tools/nifi-and-clickhouse.md b/docs/integrations/data-ingestion/etl-tools/nifi-and-clickhouse.md
index 6678c164539..25fd60acb34 100644
--- a/docs/integrations/data-ingestion/etl-tools/nifi-and-clickhouse.md
+++ b/docs/integrations/data-ingestion/etl-tools/nifi-and-clickhouse.md
@@ -6,6 +6,10 @@ slug: /integrations/nifi
description: 'Stream data into ClickHouse using NiFi data pipelines'
title: 'Connect Apache NiFi to ClickHouse'
doc_type: 'guide'
+integration_title: 'Apache NiFi'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/logo_nifi.png'
+integration_tier: 'community'
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_http.mdx';
diff --git a/docs/integrations/data-ingestion/etl-tools/vector-to-clickhouse.md b/docs/integrations/data-ingestion/etl-tools/vector-to-clickhouse.md
index dc5c36aef08..655e9a3d8b2 100644
--- a/docs/integrations/data-ingestion/etl-tools/vector-to-clickhouse.md
+++ b/docs/integrations/data-ingestion/etl-tools/vector-to-clickhouse.md
@@ -6,16 +6,20 @@ description: 'How to tail a log file into ClickHouse using Vector'
title: 'Integrating Vector with ClickHouse'
show_related_blogs: true
doc_type: 'guide'
+integration_title: 'Vector'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/vector.png'
+integration_tier: 'partner'
---
import Image from '@theme/IdealImage';
import vector01 from '@site/static/images/integrations/data-ingestion/etl-tools/vector_01.png';
import vector02 from '@site/static/images/integrations/data-ingestion/etl-tools/vector_02.png';
-import CommunityMaintainedBadge from '@theme/badges/CommunityMaintained';
+import PartnerBadge from '@theme/badges/PartnerBadge';
# Integrating Vector with ClickHouse
-
+
Being able to analyze your logs in real time is critical for production applications. Have you ever wondered if ClickHouse is good at storing and analyzing log data? Just checkout Uber's experience with converting their logging infrastructure from ELK to ClickHouse.
diff --git a/docs/integrations/data-ingestion/gcs/index.md b/docs/integrations/data-ingestion/gcs/index.md
index ee640dc9a6e..0660ff3e73a 100644
--- a/docs/integrations/data-ingestion/gcs/index.md
+++ b/docs/integrations/data-ingestion/gcs/index.md
@@ -5,6 +5,10 @@ slug: /integrations/gcs
description: 'Google Cloud Storage (GCS) Backed MergeTree'
title: 'Integrate Google Cloud Storage with ClickHouse'
doc_type: 'guide'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/gcs.svg'
+integration_title: 'Google Cloud Storage (GCS)'
+integration_tier: 'core'
---
import BucketDetails from '@site/docs/_snippets/_GCS_authentication_and_bucket.md';
diff --git a/docs/integrations/data-ingestion/google-dataflow/dataflow.md b/docs/integrations/data-ingestion/google-dataflow/dataflow.md
index d0560a44a85..65694b7b61d 100644
--- a/docs/integrations/data-ingestion/google-dataflow/dataflow.md
+++ b/docs/integrations/data-ingestion/google-dataflow/dataflow.md
@@ -5,6 +5,10 @@ sidebar_position: 1
description: 'Users can ingest data into ClickHouse using Google Dataflow'
title: 'Integrating Google Dataflow with ClickHouse'
doc_type: 'guide'
+integration_title: 'Dataflow'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/dataflow_logo.png'
+integration_tier: 'core'
---
import ClickHouseSupportedBadge from '@theme/badges/ClickHouseSupported';
diff --git a/docs/integrations/data-ingestion/kafka/confluent/custom-connector.md b/docs/integrations/data-ingestion/kafka/confluent/custom-connector.md
index 9225c8e16e9..848125f877d 100644
--- a/docs/integrations/data-ingestion/kafka/confluent/custom-connector.md
+++ b/docs/integrations/data-ingestion/kafka/confluent/custom-connector.md
@@ -5,6 +5,10 @@ slug: /integrations/kafka/cloud/confluent/custom-connector
description: 'Using ClickHouse Connector Sink with Kafka Connect and ClickHouse'
title: 'Integrating Confluent Cloud with ClickHouse'
doc_type: 'guide'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/confluent.svg'
+integration_title: 'Kafka connector - Confluent'
+integration_tier: 'core'
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_http.mdx';
diff --git a/docs/integrations/data-ingestion/kafka/index.md b/docs/integrations/data-ingestion/kafka/index.md
index f460d47ef88..f65a0812806 100644
--- a/docs/integrations/data-ingestion/kafka/index.md
+++ b/docs/integrations/data-ingestion/kafka/index.md
@@ -5,6 +5,10 @@ slug: /integrations/kafka
description: 'Introduction to Kafka with ClickHouse'
title: 'Integrating Kafka with ClickHouse'
doc_type: 'guide'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/kafka.svg'
+integration_title: 'Kafka'
+integration_tier: 'core'
---
# Integrating Kafka with ClickHouse
diff --git a/docs/integrations/data-ingestion/kafka/msk/index.md b/docs/integrations/data-ingestion/kafka/msk/index.md
index 825e974a213..e0685c3d11d 100644
--- a/docs/integrations/data-ingestion/kafka/msk/index.md
+++ b/docs/integrations/data-ingestion/kafka/msk/index.md
@@ -6,6 +6,7 @@ description: 'The official Kafka connector from ClickHouse with Amazon MSK'
keywords: ['integration', 'kafka', 'amazon msk', 'sink', 'connector']
title: 'Integrating Amazon MSK with ClickHouse'
doc_type: 'guide'
+integration_type: ['core', 'data ingestion']
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_http.mdx';
diff --git a/docs/integrations/data-ingestion/s3-minio.md b/docs/integrations/data-ingestion/s3-minio.md
index ee1e05bfe89..113606297aa 100644
--- a/docs/integrations/data-ingestion/s3-minio.md
+++ b/docs/integrations/data-ingestion/s3-minio.md
@@ -5,6 +5,10 @@ slug: /integrations/minio
description: 'Page describing how to use MinIO with ClickHouse'
title: 'Using MinIO'
doc_type: 'guide'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/minio.png'
+integration_title: 'MinIO'
+integration_tier: 'core'
---
# Using MinIO
diff --git a/docs/integrations/data-ingestion/s3/index.md b/docs/integrations/data-ingestion/s3/index.md
index e48e84408e6..bb960a1fd64 100644
--- a/docs/integrations/data-ingestion/s3/index.md
+++ b/docs/integrations/data-ingestion/s3/index.md
@@ -5,6 +5,10 @@ sidebar_label: 'Integrating S3 with ClickHouse'
title: 'Integrating S3 with ClickHouse'
description: 'Page describing how to integrate S3 with ClickHouse'
doc_type: 'guide'
+integration_type: ['data ingestion']
+integration_logo: '/static/images/integrations/logos/amazon_s3_logo.svg'
+integration_title: 'Amazon S3'
+integration_tier: 'core'
---
import BucketDetails from '@site/docs/_snippets/_S3_authentication_and_bucket.md';
diff --git a/docs/integrations/data-visualization/astrato-and-clickhouse.md b/docs/integrations/data-visualization/astrato-and-clickhouse.md
index 2900a48daad..497be1b2cae 100644
--- a/docs/integrations/data-visualization/astrato-and-clickhouse.md
+++ b/docs/integrations/data-visualization/astrato-and-clickhouse.md
@@ -6,6 +6,10 @@ keywords: ['clickhouse', 'Power BI', 'connect', 'integrate', 'ui', 'data apps',
description: 'Astrato brings true Self-Service BI to Enterprises & Data Businesses by putting analytics in the hands of every user, enabling them to build their own dashboards, reports and data apps, enabling the answering of data questions without IT help. Astrato accelerates adoption, speeds up decision-making, and unifies analytics, embedded analytics, data input, and data apps in one platform. Astrato unites action and analytics in one, introduce live write-back, interact with ML models, accelerate your analytics with AI – go beyond dashboarding, thanks to pushdown SQL support in Astrato.'
title: 'Connecting Astrato to ClickHouse'
doc_type: 'guide'
+integration_title: 'Astrato'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/astrato_logo.svg'
+integration_tier: 'community'
---
import astrato_1_dataconnection from '@site/static/images/integrations/data-visualization/astrato_1_dataconnection.png';
diff --git a/docs/integrations/data-visualization/chartbrew-and-clickhouse.md b/docs/integrations/data-visualization/chartbrew-and-clickhouse.md
index 27cc5e8bfc6..f68d1745646 100644
--- a/docs/integrations/data-visualization/chartbrew-and-clickhouse.md
+++ b/docs/integrations/data-visualization/chartbrew-and-clickhouse.md
@@ -6,6 +6,10 @@ slug: /integrations/chartbrew-and-clickhouse
keywords: ['ClickHouse', 'Chartbrew', 'connect', 'integrate', 'visualization']
description: 'Connect Chartbrew to ClickHouse to create real-time dashboards and client reports.'
doc_type: 'guide'
+integration_title: 'Chartbrew'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/logo_chartbrew.png'
+integration_tier: 'community'
---
import chartbrew_01 from '@site/static/images/integrations/data-visualization/chartbrew_01.png';
diff --git a/docs/integrations/data-visualization/deepnote.md b/docs/integrations/data-visualization/deepnote.md
index 936c384ea62..76f3aba97a0 100644
--- a/docs/integrations/data-visualization/deepnote.md
+++ b/docs/integrations/data-visualization/deepnote.md
@@ -6,6 +6,10 @@ keywords: ['clickhouse', 'Deepnote', 'connect', 'integrate', 'notebook']
description: 'Efficiently query very large datasets, analyzing and modeling in the comfort of known notebook environment.'
title: 'Connect ClickHouse to Deepnote'
doc_type: 'guide'
+integration_title: 'Deepnote'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/deepnote.svg'
+integration_tier: 'community'
---
import deepnote_01 from '@site/static/images/integrations/data-visualization/deepnote_01.png';
diff --git a/docs/integrations/data-visualization/draxlr-and-clickhouse.md b/docs/integrations/data-visualization/draxlr-and-clickhouse.md
index b010bd07703..5121372087d 100644
--- a/docs/integrations/data-visualization/draxlr-and-clickhouse.md
+++ b/docs/integrations/data-visualization/draxlr-and-clickhouse.md
@@ -6,6 +6,10 @@ keywords: ['clickhouse', 'Draxlr', 'connect', 'integrate', 'ui']
description: 'Draxlr is a Business intelligence tool with data visualization and analytics.'
title: 'Connecting Draxlr to ClickHouse'
doc_type: 'guide'
+integration_title: 'Draxlr'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/draxlr.svg'
+integration_tier: 'community'
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_http.mdx';
diff --git a/docs/integrations/data-visualization/explo-and-clickhouse.md b/docs/integrations/data-visualization/explo-and-clickhouse.md
index 071a3b3a275..b1bd1539d6c 100644
--- a/docs/integrations/data-visualization/explo-and-clickhouse.md
+++ b/docs/integrations/data-visualization/explo-and-clickhouse.md
@@ -6,6 +6,10 @@ keywords: ['clickhouse', 'Explo', 'connect', 'integrate', 'ui']
description: 'Explo is an easy-to-use, open source UI tool for asking questions about your data.'
title: 'Connecting Explo to ClickHouse'
doc_type: 'guide'
+integration_title: 'Explo'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/explo.png'
+integration_tier: 'community'
---
import Image from '@theme/IdealImage';
diff --git a/docs/integrations/data-visualization/fabi-and-clickhouse.md b/docs/integrations/data-visualization/fabi-and-clickhouse.md
index 878b66a3208..958a0c78737 100644
--- a/docs/integrations/data-visualization/fabi-and-clickhouse.md
+++ b/docs/integrations/data-visualization/fabi-and-clickhouse.md
@@ -5,6 +5,10 @@ keywords: ['clickhouse', 'Fabi.ai', 'connect', 'integrate', 'notebook', 'ui', 'a
description: 'Fabi.ai is an all-in-one collaborate data analysis platform. You can leverage SQL, Python, AI, and no-code to build dashboard and data workflows faster than ever before'
title: 'Connect ClickHouse to Fabi.ai'
doc_type: 'guide'
+integration_title: 'Fabi.ai'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/fabi.png'
+integration_tier: 'community'
---
import fabi_01 from '@site/static/images/integrations/data-visualization/fabi_01.png';
diff --git a/docs/integrations/data-visualization/grafana/index.md b/docs/integrations/data-visualization/grafana/index.md
index 1f4ee182e47..2e9ed418d2e 100644
--- a/docs/integrations/data-visualization/grafana/index.md
+++ b/docs/integrations/data-visualization/grafana/index.md
@@ -6,6 +6,10 @@ description: 'Introduction to using ClickHouse with Grafana'
title: 'ClickHouse data source plugin for Grafana'
show_related_blogs: true
doc_type: 'guide'
+integration_title: 'Grafana'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/grafana.svg'
+integration_tier: 'core'
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_native.md';
diff --git a/docs/integrations/data-visualization/looker-and-clickhouse.md b/docs/integrations/data-visualization/looker-and-clickhouse.md
index cc3be8c1055..4d85b63083b 100644
--- a/docs/integrations/data-visualization/looker-and-clickhouse.md
+++ b/docs/integrations/data-visualization/looker-and-clickhouse.md
@@ -5,6 +5,10 @@ keywords: ['clickhouse', 'looker', 'connect', 'integrate', 'ui']
description: 'Looker is an enterprise platform for BI, data applications, and embedded analytics that helps you explore and share insights in real time.'
title: 'Looker'
doc_type: 'guide'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/looker.svg'
+integration_title: 'Looker'
+integration_tier: 'core'
---
import Image from '@theme/IdealImage';
diff --git a/docs/integrations/data-visualization/looker-studio-and-clickhouse.md b/docs/integrations/data-visualization/looker-studio-and-clickhouse.md
index 07590cf70d6..47f8793d33f 100644
--- a/docs/integrations/data-visualization/looker-studio-and-clickhouse.md
+++ b/docs/integrations/data-visualization/looker-studio-and-clickhouse.md
@@ -5,6 +5,10 @@ keywords: ['clickhouse', 'looker', 'studio', 'connect', 'mysql', 'integrate', 'u
description: 'Looker Studio, formerly Google Data Studio, is an online tool for converting data into customizable informative reports and dashboards.'
title: 'Looker Studio'
doc_type: 'guide'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/looker_studio.svg'
+integration_title: 'Looker Studio'
+integration_tier: 'core'
---
import Image from '@theme/IdealImage';
diff --git a/docs/integrations/data-visualization/luzmo-and-clickhouse.md b/docs/integrations/data-visualization/luzmo-and-clickhouse.md
index 93b5b8fd4ce..b89dbba9411 100644
--- a/docs/integrations/data-visualization/luzmo-and-clickhouse.md
+++ b/docs/integrations/data-visualization/luzmo-and-clickhouse.md
@@ -6,6 +6,10 @@ description: 'Luzmo is an embedded analytics platform with a native ClickHouse i
title: 'Integrating Luzmo with ClickHouse'
sidebar: 'integrations'
doc_type: 'guide'
+integration_title: 'Luzmo'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/luzmo.png'
+integration_tier: 'community'
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_http.mdx';
diff --git a/docs/integrations/data-visualization/metabase-and-clickhouse.md b/docs/integrations/data-visualization/metabase-and-clickhouse.md
index 93cde69c5d3..826a9cc5e07 100644
--- a/docs/integrations/data-visualization/metabase-and-clickhouse.md
+++ b/docs/integrations/data-visualization/metabase-and-clickhouse.md
@@ -7,6 +7,10 @@ description: 'Metabase is an easy-to-use, open source UI tool for asking questio
title: 'Connecting Metabase to ClickHouse'
show_related_blogs: true
doc_type: 'guide'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/logo_metabase.png'
+integration_tier: 'community'
+integration_title: 'Metabase'
---
import Image from '@theme/IdealImage';
diff --git a/docs/integrations/data-visualization/mitzu-and-clickhouse.md b/docs/integrations/data-visualization/mitzu-and-clickhouse.md
index 97e83b62594..9d399fa1441 100644
--- a/docs/integrations/data-visualization/mitzu-and-clickhouse.md
+++ b/docs/integrations/data-visualization/mitzu-and-clickhouse.md
@@ -5,6 +5,10 @@ keywords: ['clickhouse', 'Mitzu', 'connect', 'integrate', 'ui']
description: 'Mitzu is a no-code warehouse-native product analytics application.'
title: 'Connecting Mitzu to ClickHouse'
doc_type: 'guide'
+integration_tier: 'community'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/logo_mitzu.png'
+integration_title: 'Mitzu'
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_http.mdx';
diff --git a/docs/integrations/data-visualization/powerbi-and-clickhouse.md b/docs/integrations/data-visualization/powerbi-and-clickhouse.md
index 82a5f578a32..c17452b4173 100644
--- a/docs/integrations/data-visualization/powerbi-and-clickhouse.md
+++ b/docs/integrations/data-visualization/powerbi-and-clickhouse.md
@@ -5,6 +5,10 @@ keywords: ['clickhouse', 'Power BI', 'connect', 'integrate', 'ui']
description: 'Microsoft Power BI is an interactive data visualization software product developed by Microsoft with a primary focus on business intelligence.'
title: 'Power BI'
doc_type: 'guide'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/powerbi.png'
+integration_title: 'PowerBI'
+integration_tier: 'core'
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_http.mdx';
diff --git a/docs/integrations/data-visualization/quicksight-and-clickhouse.md b/docs/integrations/data-visualization/quicksight-and-clickhouse.md
index 47cbf7806ff..9367663f12f 100644
--- a/docs/integrations/data-visualization/quicksight-and-clickhouse.md
+++ b/docs/integrations/data-visualization/quicksight-and-clickhouse.md
@@ -5,6 +5,10 @@ keywords: ['clickhouse', 'aws', 'amazon', 'QuickSight', 'mysql', 'connect', 'int
description: 'Amazon QuickSight powers data-driven organizations with unified business intelligence (BI).'
title: 'QuickSight'
doc_type: 'guide'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/quicksight.svg'
+integration_title: 'QuickSight'
+integration_tier: 'core'
---
import MySQLOnPremiseSetup from '@site/docs/_snippets/_clickhouse_mysql_on_premise_setup.mdx';
diff --git a/docs/integrations/data-visualization/rocketbi-and-clickhouse.md b/docs/integrations/data-visualization/rocketbi-and-clickhouse.md
index cf43ecea971..0b5bd5ff88b 100644
--- a/docs/integrations/data-visualization/rocketbi-and-clickhouse.md
+++ b/docs/integrations/data-visualization/rocketbi-and-clickhouse.md
@@ -6,6 +6,10 @@ keywords: ['clickhouse', 'RocketBI', 'connect', 'integrate', 'ui']
description: 'RocketBI is a self-service business intelligence platform that helps you quickly analyze data, build drag-n-drop visualizations and collaborate with colleagues right on your web browser.'
title: 'GOAL: BUILD YOUR 1ST DASHBOARD'
doc_type: 'guide'
+integration_title: 'Rocket.BI'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/rocketbi-logo.svg'
+integration_tier: 'community'
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_http.mdx';
@@ -30,7 +34,7 @@ import rocketbi_17 from '@site/static/images/integrations/data-visualization/roc
import rocketbi_18 from '@site/static/images/integrations/data-visualization/rocketbi_18.png';
import CommunityMaintainedBadge from '@theme/badges/CommunityMaintained';
-# Goal: build your first dashboard
+# Goal: build your first dashboard with Rocket.BI
diff --git a/docs/integrations/data-visualization/superset-and-clickhouse.md b/docs/integrations/data-visualization/superset-and-clickhouse.md
index e842b9214da..57869eddc29 100644
--- a/docs/integrations/data-visualization/superset-and-clickhouse.md
+++ b/docs/integrations/data-visualization/superset-and-clickhouse.md
@@ -7,6 +7,10 @@ description: 'Apache Superset is an open-source data exploration and visualizati
title: 'Connect Superset to ClickHouse'
show_related_blogs: true
doc_type: 'guide'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/superset.svg'
+integration_title: 'Superset'
+integration_tier: 'core'
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_http.mdx';
diff --git a/docs/integrations/data-visualization/tableau/tableau-and-clickhouse.md b/docs/integrations/data-visualization/tableau/tableau-and-clickhouse.md
index 422fd4fc7e6..0652b3cd083 100644
--- a/docs/integrations/data-visualization/tableau/tableau-and-clickhouse.md
+++ b/docs/integrations/data-visualization/tableau/tableau-and-clickhouse.md
@@ -6,6 +6,10 @@ keywords: ['clickhouse', 'tableau', 'connect', 'integrate', 'ui']
description: 'Tableau can use ClickHouse databases and tables as a data source.'
title: 'Connecting Tableau to ClickHouse'
doc_type: 'guide'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/logo_tableau.png'
+integration_title: 'Tableau'
+integration_tier: 'core'
---
import TOCInline from '@theme/TOCInline';
diff --git a/docs/integrations/data-visualization/tableau/tableau-online-and-clickhouse.md b/docs/integrations/data-visualization/tableau/tableau-online-and-clickhouse.md
index 7d3dcdd5636..547a8b89fb5 100644
--- a/docs/integrations/data-visualization/tableau/tableau-online-and-clickhouse.md
+++ b/docs/integrations/data-visualization/tableau/tableau-online-and-clickhouse.md
@@ -6,6 +6,10 @@ keywords: ['clickhouse', 'tableau', 'online', 'mysql', 'connect', 'integrate', '
description: 'Tableau Online streamlines the power of data to make people faster and more confident decision makers from anywhere.'
title: 'Tableau Online'
doc_type: 'guide'
+integration_type: ['data visualization']
+integration_logo: '/static/images/integrations/logos/logo_tableau.png'
+integration_title: 'Tableau Online'
+integration_tier: 'core'
---
import MySQLCloudSetup from '@site/docs/_snippets/_clickhouse_mysql_cloud_setup.mdx';
diff --git a/docs/integrations/index.mdx b/docs/integrations/index.mdx
index 951bea08c33..0d6c7e106b7 100644
--- a/docs/integrations/index.mdx
+++ b/docs/integrations/index.mdx
@@ -3,405 +3,20 @@ slug: /integrations
title: 'Integrations'
keywords: ['integrations', 'integrate', 'integrate with']
description: 'Integrations with ClickHouse'
-hide_table_of_contents: true
doc_type: 'landing-page'
+sidebarCollapsed: true
+hide_table_of_contents: true
---
-import acceldatapng from '@site/static/images/integrations/logos/acceldata_logo.png';
-import Amazonmsksvg from '@site/static/images/integrations/logos/amazon_msk.svg';
-import Astratosvg from '@site/static/images/integrations/logos/astrato_logo.svg';
-import apachestreamparkpng from '@site/static/images/integrations/logos/apache-streampark.png';
-import Azureeventhubssvg from '@site/static/images/integrations/logos/azure_event_hubs.svg';
-import BladePipesvg from '@site/static/images/integrations/logos/bladepipe.svg';
-import blinkopspng from '@site/static/images/integrations/logos/blinkops_logo.png';
-import CloudCanalsvg from '@site/static/images/integrations/logos/cloudcanal.svg';
-import Chdbsvg from '@site/static/images/integrations/logos/chdb.svg';
-import Clickhousesvg from '@site/static/images/integrations/logos/clickhouse.svg';
-import Clickhousemonitoringdashboardsvg from '@site/static/images/integrations/logos/clickhouse-monitoring-dashboard.svg';
-import Cloudquerysvg from '@site/static/images/integrations/logos/cloudquery_logo.svg';
-import Confluentsvg from '@site/static/images/integrations/logos/confluent.svg';
-import Csharpsvg from '@site/static/images/integrations/logos/csharp.svg';
-import Cubejssvg from '@site/static/images/integrations/logos/cubejs.svg';
-import Datagripsvg from '@site/static/images/integrations/logos/data_grip.svg';
-import Datalenssvg from '@site/static/images/integrations/logos/datalens.svg';
-import Dbeaversvg from '@site/static/images/integrations/logos/dbeaver_logo.svg';
-import Dbtsvg from '@site/static/images/integrations/logos/dbt.svg';
-import Deepnotesvg from '@site/static/images/integrations/logos/deepnote.svg';
-import Dlthubsvg from '@site/static/images/integrations/logos/dlthub_logo.svg';
-import Draxlrsvg from '@site/static/images/integrations/logos/draxlr.svg';
-import Emqxsvg from '@site/static/images/integrations/logos/emqx.svg';
-import Explosvg from '@site/static/images/integrations/logos/explo.svg';
-import Fivetransvg from '@site/static/images/integrations/logos/fivetran.svg';
-import Gcssvg from '@site/static/images/integrations/logos/gcs.svg';
-import Golangsvg from '@site/static/images/integrations/logos/golang.svg';
-import Grafanasvg from '@site/static/images/integrations/logos/grafana.svg';
-import Hdfssvg from '@site/static/images/integrations/logos/hadoop.svg';
-import Hivesvg from '@site/static/images/integrations/logos/hive.svg';
-import Javasvg from '@site/static/images/integrations/logos/java.svg';
-import Jitsusvg from '@site/static/images/integrations/logos/jitsu.svg';
-import Kafkasvg from '@site/static/images/integrations/logos/kafka.svg';
-import Kinesissvg from '@site/static/images/integrations/logos/amazon_kinesis_logo.svg';
-import Kestrasvg from '@site/static/images/integrations/logos/kestra.svg';
-import Lookersvg from '@site/static/images/integrations/logos/looker.svg';
-import Lookerstudiosvg from '@site/static/images/integrations/logos/looker_studio.svg';
-import Mongodbsvg from '@site/static/images/integrations/logos/mongodb.svg';
-import Mysqlsvg from '@site/static/images/integrations/logos/mysql.svg';
-import Natssvg from '@site/static/images/integrations/logos/nats.svg';
-import Nodesvg from '@site/static/images/integrations/logos/node_js.svg';
-import Omnisvg from '@site/static/images/integrations/logos/omni.svg';
-import Observablesvg from '@site/static/images/integrations/logos/observable.svg';
-import opsramppng from '@site/static/images/integrations/logos/ops_ramp_logo.png';
-import Popsinksvg from '@site/static/images/integrations/logos/popsink.svg';
-import Postgresqlsvg from '@site/static/images/integrations/logos/postgresql.svg';
-import Prequelsvg from '@site/static/images/integrations/logos/prequel.svg';
-import Pythonsvg from '@site/static/images/integrations/logos/notext-python.svg';
-import Qrynsvg from '@site/static/images/integrations/logos/qryn.svg';
-import Quesmasvg from '@site/static/images/integrations/logos/quesma.svg';
-import Quicksightsvg from '@site/static/images/integrations/logos/quicksight.svg';
-import Rabbitmqsvg from '@site/static/images/integrations/logos/rabbitmq.svg';
-import Redissvg from '@site/static/images/integrations/logos/redis.svg';
-import restackpng from '@site/static/images/integrations/logos/restack_logo.png';
-import Retoolsvg from '@site/static/images/integrations/logos/retool.svg';
-import Rillsvg from '@site/static/images/integrations/logos/rill.svg';
-import Risingwavesvg from '@site/static/images/integrations/logos/risingwave.svg';
-import RocketbiSVG from '@site/static/images/integrations/logos/rocketbi-logo.svg';
-import Rocksdbsvg from '@site/static/images/integrations/logos/rocksdb.svg';
-import Rudderstacksvg from '@site/static/images/integrations/logos/rudderstack.svg';
-import S3svg from '@site/static/images/integrations/logos/amazon_s3_logo.svg';
-import sematextpng from '@site/static/images/integrations/logos/sematext_logo.png';
-import skywalkingjpeg from '@site/static/images/integrations/logos/skywalking_logo.jpeg';
-import snappyflowpng from '@site/static/images/integrations/logos/snappy_flow_logo.png';
-import Sparksvg from '@site/static/images/integrations/logos/apache_spark_logo.svg';
-import sodapng from '@site/static/images/integrations/logos/soda_logo.png';
-import Sqlitesvg from '@site/static/images/integrations/logos/sqlite.svg';
-import Supersetsvg from '@site/static/images/integrations/logos/superset.svg';
-import Tablumsvg from '@site/static/images/integrations/logos/tablum.svg';
-import teleport from '@site/static/images/integrations/logos/teleport_logo.png';
-import Trickstercachesvg from '@site/static/images/integrations/logos/trickster-logo.svg';
-import Upstashsvg from '@site/static/images/integrations/logos/upstash.svg';
-import Yepcodesvg from '@site/static/images/integrations/logos/yepcode.svg';
-import Warpstreamsvg from '@site/static/images/integrations/logos/warpstream.svg';
-import Bytewaxsvg from '@site/static/images/integrations/logos/bytewax.svg';
-import glue_logo from '@site/static/images/integrations/logos/glue_logo.png';
-import azure_synapse_logo from '@site/static/images/integrations/logos/azure-synapse.png';
-import azure_data_factory_logo from '@site/static/images/integrations/logos/azure-data-factory.png';
-import logo_cpp from '@site/static/images/integrations/logos/logo_cpp.png';
-import cassandra from '@site/static/images/integrations/logos/cassandra.png';
-import deltalake from '@site/static/images/integrations/logos/deltalake.png';
-import hudi from '@site/static/images/integrations/logos/hudi.png';
-import iceberg from '@site/static/images/integrations/logos/iceberg.png';
-import metabase from '@site/static/images/integrations/logos/logo_metabase.png';
-import minio from '@site/static/images/integrations/logos/minio.png';
-import odbc from '@site/static/images/integrations/logos/odbc.png';
-import logo_otel from '@site/static/images/integrations/logos/logo_otel.png';
-import powerbi from '@site/static/images/integrations/logos/powerbi.png';
-import redpanda from '@site/static/images/integrations/logos/logo_redpanda.png';
-import rust from '@site/static/images/integrations/logos/logo_rust.png';
-import tableau from '@site/static/images/integrations/logos/logo_tableau.png';
-import airbyte_logo from '@site/static/images/integrations/logos/airbyte-logo.png';
-import acceldata_logo from '@site/static/images/integrations/logos/acceldata_logo.png';
-import atlas_logo from '@site/static/images/integrations/logos/atlas-logo.png';
-import automq_logo from '@site/static/images/integrations/logos/automq.png';
-import blinkops_logo from '@site/static/images/integrations/logos/blinkops_logo.png';
-import calyptia_logo from '@site/static/images/integrations/logos/logo_calyptia.png';
-import dataddo_logo from '@site/static/images/integrations/logos/logo_dataddo.png';
-import dbvisualizer_logo from '@site/static/images/integrations/logos/logo_dbvisualizer.png';
-import decodable_logo from '@site/static/images/integrations/logos/logo_decodable.png';
-import explo_logo from '@site/static/images/integrations/logos/explo.png';
-import fabi_logo from '@site/static/images/integrations/logos/fabi.png';
-import gigasheet_logo from '@site/static/images/integrations/logos/gigasheet.png';
-import glassflow_logo from '@site/static/images/integrations/logos/glassflow.png';
-import goldsky_logo from '@site/static/images/integrations/logos/goldsky.png';
-import growthbook_logo from '@site/static/images/integrations/logos/logo_growthbook.png';
-import hex_logo from '@site/static/images/integrations/logos/logo-hex.png';
-import hightouch_logo from '@site/static/images/integrations/logos/logo_hightouch.png';
-import holistics_logo from '@site/static/images/integrations/logos/logo_holistics.png';
-import housewatch_logo from '@site/static/images/integrations/logos/housewatch.png';
-import mindsdb_logo from '@site/static/images/integrations/logos/logo_mindsdb.png';
-import mitzu_logo from '@site/static/images/integrations/logos/logo_mitzu.png';
-import mode_logo from '@site/static/images/integrations/logos/logo_mode.png';
-import redash_logo from '@site/static/images/integrations/logos/logo_redash.png';
-import restack_logo from '@site/static/images/integrations/logos/restack_logo.png';
-import sematext_logo from '@site/static/images/integrations/logos/sematext_logo.png';
-import streamingfast_logo from '@site/static/images/integrations/logos/streamingfast.png';
-import supabase_logo from '@site/static/images/integrations/logos/logo_supabase.png';
-import teleport_logo from '@site/static/images/integrations/logos/teleport_logo.png';
-import tooljet_logo from '@site/static/images/integrations/logos/tooljet.png';
-import vector_logo from '@site/static/images/integrations/logos/vector.png';
-import zing_logo from '@site/static/images/integrations/logos/zing-logo.png';
-import mage_logo from '@site/static/images/integrations/logos/mage.jpg';
-import metaplane_logo from '@site/static/images/integrations/logos/logo_metaplane.png';
-import openBlocks_logo from '@site/static/images/integrations/logos/logo_openBlocks.png';
-import ramp_logo from '@site/static/images/integrations/logos/ops_ramp_logo.png';
-import runreveal_logo from '@site/static/images/integrations/logos/runreveal.png';
-import sisense_logo from '@site/static/images/integrations/logos/logo_sisense.png';
-import signoz_logo from '@site/static/images/integrations/logos/signoz-logo.png';
-import snappy_flow_logo from '@site/static/images/integrations/logos/snappy_flow_logo.png';
-import soda_logo from '@site/static/images/integrations/logos/soda_logo.png';
-import splunk_logo from '@site/static/images/integrations/logos/splunk_logo.png';
-import streamkap_logo from '@site/static/images/integrations/logos/streamkap-logo.png';
-import airflow_logo from '@site/static/images/integrations/logos/logo_airflow.png';
-import beam_logo from '@site/static/images/integrations/logos/logo_beam.png';
-import inlong_logo from '@site/static/images/integrations/logos/logo_inlong.png';
-import nifi_logo from '@site/static/images/integrations/logos/logo_nifi.png';
-import seatunnel_logo from '@site/static/images/integrations/logos/logo_seatunnel.png';
-import skywalking_logo from '@site/static/images/integrations/logos/skywalking_logo.jpeg';
-import streampark_logo from '@site/static/images/integrations/logos/apache-streampark.png';
-import bytebase_logo from '@site/static/images/integrations/logos/logo_bytebase.png';
-import clickhouse_cl_logo from '@site/static/images/integrations/logos/clickhouse-cl.png';
-import dataflow_logo from '@site/static/images/integrations/logos/dataflow_logo.png';
-import dbnet_logo from '@site/static/images/integrations/logos/dbnet_logo.png';
-import datalens_logo from '@site/static/images/integrations/logos/datalens.png';
-import dataease_logo from '@site/static/images/integrations/logos/dataease.png';
-import datahub_logo from '@site/static/images/integrations/logos/logo_datahub.png';
-import deepflow_logo from '@site/static/images/integrations/logos/logo_deepflow.png';
-import easypanel_logo from '@site/static/images/integrations/logos/logo-easypanel.png';
-import flink_logo from '@site/static/images/integrations/logos/logo_flink.png';
-import goose_logo from '@site/static/images/integrations/logos/goose_logo.png';
-import ibis_logo from '@site/static/images/integrations/logos/logo_ibis.png';
-import jaeger_logo from '@site/static/images/integrations/logos/logo_jaeger.png';
-import jupyter_logo from '@site/static/images/integrations/logos/jupyter.png';
-import adaptive_logo from '@site/static/images/integrations/logos/adaptive_logo.png';
-import mprove_logo from '@site/static/images/integrations/logos/logo_mprove.png';
-import php_logo from '@site/static/images/integrations/logos/logo_php.png';
-import pinax_logo from '@site/static/images/integrations/logos/pinax-logo.png';
-import pulse_logo from '@site/static/images/integrations/logos/pulse.png';
-import qstudio_logo from '@site/static/images/integrations/logos/qstudio.png';
-import qryn_logo from '@site/static/images/integrations/logos/logo_qryn.png';
-import rsyslog_logo from '@site/static/images/integrations/logos/rsyslog.png';
-import ruby_logo from '@site/static/images/integrations/logos/logo_ruby.png';
-import r_logo from '@site/static/images/integrations/logos/logo_r.png';
-import scala_logo from '@site/static/images/integrations/logos/logo_scala.png';
-import schemaspy_logo from '@site/static/images/integrations/logos/schemaspy_logo.png';
-import visual_studio_logo from '@site/static/images/integrations/logos/logo_vs.png';
-import vulcansql_logo from '@site/static/images/integrations/logos/logo-vulcansql.png';
-import great_expectations_logo from '@site/static/images/integrations/logos/great-expectations.webp';
-import Hashboardsvg from '@site/static/images/integrations/logos/hashboard.svg';
-import luzmo_logo from '@site/static/images/integrations/logos/luzmo.png';
-import vs_logo from '@site/static/images/integrations/logos/logo_vs.png';
-import Moosesvg from '@site/static/images/integrations/logos/moose_logo.svg';
-import chartbrew_logo from '@site/static/images/integrations/logos/logo_chartbrew.png';
-import marimo_logo from '@site/static/images/integrations/logos/logo_marimo.png';
-import Image from '@theme/IdealImage';
-
-ClickHouse integrations are organized by their support level:
-
-- **Core integrations:** built or maintained by ClickHouse, they are supported by ClickHouse and live in the ClickHouse GitHub organization
-- **Partner integrations:** built or maintained, and supported by, third-party software vendors
-- **Community integrations:** built or maintained and supported by community members. No direct support is available besides the public GitHub repositories and community Slack channels
-
-Each integration is further categorized into **Language client**, **Data ingestion**, **Data visualization** and **SQL client** categories.
-
-:::note
-We are actively compiling this list of ClickHouse integrations below, so it's not exhaustive. Feel free to
-[contribute](https://github.com/ClickHouse/clickhouse-docs#contributing) any relevant ClickHouse integration to the list.
-:::
-
-## Core integrations
-
-
-
-|Name|Logo|Category|Description|Resources|
-|------|----|----------------|------------------|-------------|
-|Amazon Kinesis| |Data ingestion|Integration with Amazon Kinesis.|[Documentation](/integrations/clickpipes/kinesis/)|
-|Amazon MSK| |Data ingestion|Integration with Amazon Managed Streaming for Apache Kafka (MSK).|[Documentation](/integrations/kafka/cloud/amazon-msk/)|
-|Amazon S3||Data ingestion|Import from, export to, and transform S3 data in flight with ClickHouse built-in S3 functions.|[Documentation](/integrations/data-ingestion/s3/index.md)|
-|Amazon Glue||Data ingestion|Query ClickHouse over Spark using our official Glue connector|[Documentation](/integrations/glue)|
-|Apache Spark||Data ingestion|Spark ClickHouse Connector is a high performance connector built on top of Spark DataSource V2.|[GitHub](https://github.com/housepower/spark-clickhouse-connector), [Documentation](/integrations/data-ingestion/apache-spark/index.md)|
-|Azure Event Hubs||Data ingestion|A data streaming platform that supports Apache Kafka's native protocol|[Website](https://azure.microsoft.com/en-gb/products/event-hubs)|
-|Azure Synapse||Data ingestion|A cloud-based analytics service for big data and data warehousing.|[Documentation](/integrations/azure-synapse)|
-|Azure Data Factory||Data ingestion|A cloud-based data integration service that enables you to create, schedule, and orchestrate data workflows at scale.|[Documentation](/integrations/azure-data-factory)|
-|C++||Language client|C++ client for ClickHouse|[GitHub](https://github.com/ClickHouse/clickhouse-cpp)|
-|C#||Language client|ADO.NET client implementation for ClickHouse|[Documentation](/integrations/csharp)|
-|Cassandra||Data ingestion|Allows ClickHouse to use [Cassandra](https://cassandra.apache.org/) as a dictionary source.|[Documentation](/sql-reference/dictionaries/index.md#cassandra)|
-|CHDB||AI/ML|An embedded OLAP SQL Engine|[GitHub](https://github.com/chdb-io/chdb#/), [Documentation](https://doc.chdb.io/)|
-|ClickHouse Client||SQL client|ClickHouse Client is the native command-line client for ClickHouse.|[Documentation](/interfaces/cli.md)|
-|Confluent||Data ingestion|Integration with Apache Kafka on Confluent platform.|[Documentation](/integrations/kafka/cloud/confluent/custom-connector)|
-|dbt||Data integration|Use dbt (data build tool) to transform data in ClickHouse by simply writing select statements. dbt puts the T in ELT.|[Documentation](/integrations/data-ingestion/etl-tools/dbt/index.md)|
-|DeltaLake||Data ingestion| provides a read-only integration with existing [Delta Lake](https://github.com/delta-io/delta) tables in Amazon S3.|[Documentation](/engines/table-engines/integrations/deltalake)|
-|EmbeddedRocksDB||Data integration|Allows integrating ClickHouse with [rocksdb](http://rocksdb.org/).|[Documentation](/engines/table-engines/integrations/embedded-rocksdb)|
-|Fivetran||Data ingestion|[ClickHouse Cloud](https://clickhouse.com/cloud) destination for the [Fivetran data movement platform](https://www.fivetran.com/).|[Documentation](/integrations/data-ingestion/etl-tools/fivetran/index.md)|
-|Google Cloud Storage||Data ingestion|Import from, export to, and transform GCS data in flight with ClickHouse built-in `S3` functions.|[Documentation](/integrations/data-ingestion/s3/index.md)|
-|Golang||Language client|The Go client uses the native interface for a performant, low-overhead means of connecting to ClickHouse.|[Documentation](/integrations/language-clients/go/index.md)|
-|HDFS||Data ingestion|Provides integration with the [Apache Hadoop](https://en.wikipedia.org/wiki/Apache_Hadoop) ecosystem by allowing to manage data on [HDFS](https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-hdfs/HdfsDesign.html) via ClickHouse.|[Documentation](/engines/table-engines/integrations/hdfs)|
-|Hive||Data ingestionn|The Hive engine allows you to perform `SELECT` queries on HDFS Hive table.|[Documentation](/engines/table-engines/integrations/hive)|
-|Hudi||Data ingestion| provides a read-only integration with existing Apache [Hudi](https://hudi.apache.org/) tables in Amazon S3.|[Documentation](/engines/table-engines/integrations/hudi)|
-|Iceberg||Data ingestion|Provides a read-only integration with existing Apache [Iceberg](https://iceberg.apache.org/) tables in Amazon S3.|[Documentation](/engines/table-engines/integrations/iceberg)|
-|Java, JDBC||Language client|The Java client and JDBC driver.|[Documentation](/integrations/language-clients/java/index.md)|
-|Kafka||Data ingestion|Integration with Apache Kafka, the open-source distributed event streaming platform.|[Documentation](/integrations/kafka)|
-|Looker Studio||Data visualization|Looker Studio is a free tool that turns your data into informative, easy to read, easy to share, and fully customizable dashboards and reports.|[Documentation](/integrations/lookerstudio)|
-|Looker||Data visualization|Looker is an enterprise platform for BI, data applications, and embedded analytics that helps you explore and share insights in real time.|[Documentation](/integrations/looker)|
-|Metabase||Data visualization|Metabase is an easy-to-use, open source UI tool for asking questions about your data.|[Documentation](/integrations/metabase)|
-|MinIO||Data ingestion|MinIO is a High Performance Object Storage released under GNU Affero General Public License v3.0. It is API compatible with the Amazon S3 cloud storage service|[Documentation](/integrations/minio)|
-|MongoDB||Data ingestion|MongoDB engine is read-only table engine which allows to read data (`SELECT` queries) from remote MongoDB collection.|[Documentation](/engines/table-engines/integrations/mongodb)|
-|MySQL||Data ingestion|The MySQL engine allows you to perform `SELECT` and `INSERT` queries on data that is stored on a remote MySQL server.|[Documentation](/engines/table-engines/integrations/mysql)|
-|NATS||Data ingestion|Allows integrating ClickHouse with [NATS](https://nats.io/).|[Documentation](/engines/table-engines/integrations/nats)|
-|Node.JS||Language client|The official JS client for connecting to ClickHouse.|[Documentation](/integrations/language-clients/js.md)|
-|ODBC||Data integration|Allows ClickHouse to connect to external databases via [ODBC](https://en.wikipedia.org/wiki/Open_Database_Connectivity) table engine.|[Documentation](/engines/table-engines/integrations/odbc)|
-|OpenTelemetry||Data ingestion|Exporter that supports sending logs, metrics, trace OpenTelemetry data to ClickHouse|[GitHub](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/exporter/clickhouseexporter)|
-|PostgreSQL||Data ingestion|Snapshot and real-time CDC data replication from PostgreSQL databases to ClickHouse Cloud.|[Documentation](/integrations/postgresql)|
-|PowerBI||Data visualization|Microsoft Power BI is an interactive data visualization software product developed by Microsoft with a primary focus on business intelligence.|[Documentation](/integrations/powerbi)
-|Python||Language client|A suite of Python packages for connecting Python to ClickHouse.|[Documentation](/integrations/language-clients/python/index.md)|
-|QuickSight||Data visualization|Amazon QuickSight powers data-driven organizations with unified business intelligence (BI).|[Documentation](/integrations/quicksight)|
-|RabbitMQ||Data ingestion|Allows ClickHouse to connect [RabbitMQ](https://www.rabbitmq.com/).|[Documentation](/engines/table-engines/integrations/rabbitmq)|
-|Redis||Data ingestion|Allows ClickHouse to use [Redis](https://redis.io/) as a dictionary source.|[Documentation](/sql-reference/dictionaries/index.md#redis)|
-|Redpanda||Data ingestion|Redpanda is the streaming data platform for developers. It's API-compatible with Apache Kafka, but 10x faster, much easier to use, and more cost effective|[Blog](https://redpanda.com/blog/real-time-olap-database-clickhouse-redpanda)|
-|REST Catalog||Data ingestion|Integration with REST Catalog specification for Iceberg tables, supporting multiple catalog providers including Tabular.io.|[Documentation](/use-cases/data-lake/rest-catalog)|
-|Lakekeeper||Data ingestion|Integration with Lakekeeper, an open-source REST catalog implementation for Apache Iceberg with multi-tenant support.|[Documentation](/use-cases/data-lake/lakekeeper-catalog)|
-|Nessie||Data ingestion|Integration with Nessie, an open-source transactional catalog for data lakes with Git-like data version control.|[Documentation](/use-cases/data-lake/nessie-catalog)|
-|Rust||Language client|A typed client for ClickHouse|[Documentation](/integrations/language-clients/rust.md)|
-|SQLite||Data ingestion|Allows to import and export data to SQLite and supports queries to SQLite tables directly from ClickHouse.|[Documentation](/engines/table-engines/integrations/sqlite)|
-|Superset||Data visualization|Explore and visualize your ClickHouse data with Apache Superset.|[Documentation](/integrations/data-visualization/superset-and-clickhouse.md)|
-|Tableau||Data visualization|Interactive data visualization software focused on business intelligence|[Documentation](/integrations/tableau)|
-|Tableau Online||Data visualization|Tableau Online streamlines the power of data to make people faster and more confident decision makers from anywhere|[Documentation](/integrations/tableau-online)|
-
-
-
-
-## Partner integrations
-
-
-
-|Name|Logo|Category|Description|Resources|
-|------|----|----------------|------------------|-------------|
-|Airbyte||Data ingestion|Use Airbyte, to create ELT data pipelines with more than 140 connectors to load and sync your data into ClickHouse.|[Documentation](/integrations/data-ingestion/etl-tools/airbyte-and-clickhouse.md)|
-|AccelData||Data management|ADOC allows users to monitor and ensure the dependability and integrity of their visualized data, facilitating rea-time data processing and analytics.|[Documentation](https://docs.acceldata.io/documentation/clickhouse) |
-|Atlas||Schema management|Manage your ClickHouse schema as code. |[Documentation](https://atlasgo.io/guides/clickhouse?utm_source=clickhouse&utm_term=docs)|
-|Astrato||Data visualization|Astrato brings true Self-Service BI to Enterprises & Data Businesses by putting analytics in the hands of every user, enabling them to build dashboards, reports & data apps without IT.|[Documentation](/integrations/astrato)|
-|AutoMQ||Data ingestion|Cloud-native Kafka and RocketMQ alternatives that decouples durability to S3 and EBS|[Website](https://www.automq.com/)|
-|BladePipe||Data ingestion|A real-time end-to-end data integration tool with sub-second latency.|[Documentation](/integrations/data-ingestion/etl-tools/bladepipe-and-clickhouse.md)|
-|BlinkOps||Security automation|Create automations to manage data and user permissions.|[Documentation](https://docs.blinkops.com/docs/integrations/clickhouse)|
-|Bytewax||Data ingestion|Open source Python stream processor for transforming and ingesting data to ClickHouse|[Documentation](https://bytewax.io/blog/building-a-click-house-sink-for-bytewax)|
-|Calyptia (Fluent Bit)||Data ingestion|CNCF graduated open-source project for the collection, processing, and delivery of logs, metrics, and traces|[Blog](https://clickhouse.com/blog/kubernetes-logs-to-clickhouse-fluent-bit)|
-|Chartbrew||Data visualization|Chartbrew is a data visualization platform that allows users to create dashboards and monitor data in real time.|[Documentation](/integrations/chartbrew-and-clickhouse), [Website](https://chartbrew.com/integrations/clickhouse), [Blog](https://chartbrew.com/blog/visualizing-clickhouse-data-with-chartbrew-a-step-by-step-guide/)|
-|CloudQuery||Data ingestion|Open source high-performance ELT framework.|[Documentation](https://www.cloudquery.io/docs/plugins/destinations/clickhouse/overview)|
-|Cube.js||Data visualization|Cube is the Semantic Layer for building data apps.|[Website](https://cube.dev/for/clickhouse-dashboard)|
-|CloudCanal||Data ingestion|A real-time data replication tool for seamless and easy data flow.|[Website](https://www.clougence.com/)|
-|DBeaver||SQL client|Free multi-platform database administration tool. Connects to Clickhouse through JDBC driver.|[Documentation](/integrations/sql-clients/dbeaver.md)|
-|DataGrip||SQL client|DataGrip is a powerful database IDE with dedicated support for ClickHouse.|[Documentation](/integrations/sql-clients/datagrip.md)|
-|Dataddo||Data integration|Data integration platform|[Website](https://www.dataddo.com/storage/clickhouse)|
-|DbVisualizer||SQL client|DbVisualizer is a database tool with extended support for ClickHouse.|[Documentation](/integrations/sql-clients/dbvisualizer.md)|
-|Decodable||Data ingestion|Powerful Stream Processing Built On Apache Flink|[Website](https://www.decodable.co/connectors/clickhouse)|
-|Deepnote||Data visualization|Deepnote is a collaborative Jupyter-compatible data notebook built for teams to discover and share insights.|[Documentation](/integrations/data-visualization/deepnote.md)|
-|DLT||Data integration|An open source Python library that makes data loading easy.|[Documentation](/integrations/data-ingestion/etl-tools/dlt-and-clickhouse)|
-|Draxlr||Data visualization|Draxlr is a Business intelligence tool with data visualization and analytics.|[Documentation](/integrations/data-visualization/draxlr-and-clickhouse.md)|
-|EMQX||Data ingestion|EMQX is an open source MQTT broker with a high-performance real-time message processing engine, powering event streaming for IoT devices at massive scale.|[Documentation](/integrations/emqx)|
-|Explo| | Data visualization | Explo is a customer-facing analytics tool for any platform. | [Documentation](/integrations/explo) |
-|Fabi.ai| | Data visualization | Fabi.ai is an all-in-one collaborate data analysis platform. You can leverage SQL, Python, AI, and no-code to build dashboard and data workflows faster than ever before. | [Documentation](/integrations/data-visualization/fabi-and-clickhouse.md) |
-|Gigasheet| | Data visualization | A cloud big data analytics spreadsheet that enables business users to instantly analyze and explore ClickHouse data. | [Website](https://gigasheet.com/enterprise) |
-|GlassFlow| | Data ingestion | Real-time event streaming and data transformation in Python for ClickHouse. | [Documentation](https://docs.glassflow.dev/integrations/managed-connectors/sinks/clickhouse) |
-|Goldsky| | Data Integration | High-performance Web3 data indexing via Subgraphs and real-time data replication pipelines. | [Documentation](https://docs.goldsky.com/introduction) |
-|Grafana||Data visualization|With Grafana you can create, explore and share all of your data through dashboards.|[Documentation](/integrations/data-visualization/grafana/index.md)|
-|Great Expectations| | Data management | An open-source data management tool, with a paid cloud offering. | [Website](https://greatexpectations.io/) |
-|GrowthBook| | Data visualization | Warehouse native experimentation platform (feature flagging and A/B testing). | [Documentation](https://docs.growthbook.io/warehouses/clickhouse) |
-|HEX| | Data visualization | Hex is a modern, collaborative platform with notebooks, data apps, SQL, Python, no-code, R, and so much more. | [Documentation](https://learn.hex.tech/docs/connect-to-data/data-connections/overview) |
-|Hashboard||Data visualization|[Hashboard](https://hashboard.com) is a business intelligence platform that enables self-service data exploration and metric tracking.|[Documentation](https://docs.hashboard.com/docs/database-connections/clickhouse)|
-|HighTouch||Data integration|Sync your data directly from your warehouse to 140+ destinations|[Website](https://hightouch.com/docs/sources/clickhouse)|
-|Holistics||Data visualization|Business Intelligence for ClickHouse database|[Website](https://www.holistics.io/integrations/clickhouse/)|
-|HouseWatch||Data management| Open source tool for monitoring and managing ClickHouse clusters. |[GitHub](https://github.com/PostHog/HouseWatch)|
-|IBM Instana| |Data management|Instana can auto-discover and monitor ClickHouse server processes|[Documentation](https://www.ibm.com/docs/en/instana-observability/current?topic=technologies-monitoring-clickhouse)|
-|Jitsu||Data analytics|An open-source event collection platform.|[Documentation](https://docs.jitsu.com/destinations/warehouse/clickhouse)|
-|LangChain|🦜️🔗|SDK|LangChain is a framework for developing applications powered by language models|[Documentation](https://python.langchain.com/docs/integrations/vectorstores/clickhouse/)|
-|Luzmo||Data visualization|Luzmo is an embedded analytics platform with a native ClickHouse integration, purpose-built for Software and SaaS applications.|[Documentation](/integrations/data-visualization/luzmo-and-clickhouse.md)|
-|Mage||Data Ingestion|Open-source data pipeline tool for transforming and integrating data|[Documentation](https://docs.mage.ai/integrations/databases/ClickHouse)
-|Metaplane||Data management|Data observability for every data team|[Website](https://www.metaplane.dev/integrations)|
-|MindsDB||AI/ML| The platform for customizing AI from enterprise data |[Website](https://mindsdb.com/clickhouse-machine-learning )|
-|Mitzu||Data visualization|Mitzu is a no-code warehouse-native product analytics application. Find funnel, retention, user segmentation insights without copying your data.|[Documentation](/integrations/mitzu)|
-|Mode Analytics||Data visualization|Business Intelligence built around data teams|[Website](https://mode.com/)|
-|Moose OLAP||Language client|Moose OLAP offers TypeScript and Python abstractions and ORM-like functionality built natively for ClickHouse|[Documentation](interfaces/third-party/moose-olap),[Website](https://www.fiveonefour.com/moose)|
-|Omni||Data visualization|Business intelligence that speaks your language. Explore, visualize, and model data your way with Omni. From spreadsheets to SQL—in a single platform.| [Website](https://omni.co/)|
-|Openblocks||SQL client|Openblocks is a low code platform for building UIs|[Documentation](https://blog.openblocks.dev/blog/openblocks-x-clickhouse)|
-|OpsRamp (HP)| |Data management| Provides observability metrics for ClickHouse|[Documentation](https://docs.opsramp.com/integrations/database-no-sql/automonitor-clickhouse-monitoring/)|
-|Popsink||Data integration|Build real-time Change Data Capture (CDC) pipelines to ClickHouse.|[Documentation](https://docs.popsink.com/connectors/target/clickhouse/)|
-|Prequel||Data sharing|Connect your ClickHouse instance to Prequel to share data to or sync data from your users and partners.|[Documentation](https://docs.prequel.co/docs/sources-clickhouse-generic)|
-|Quesma||Data integration|Use Kibana and OpenSearch Dashboards with data in ClickHouse.|[Website](https://quesma.com/quesma-for-elk)|
-|Redash||Data visualization|Connect and query your data sources, build dashboards to visualize data and share|[Website](https://redash.io/help/data-sources/querying/supported-data-sources)|
-|Restack Data Hub||Data governance|Users can achieve more comprehensive data governance and observability framework with Restack Data Hub.|[Documentation](https://www.restack.io/docs/datahub-knowledge-datahub-clickhouse-integration)|
-|Restack OpenMetadata||Data quality|Restack OpenMetadata supports metadata extraction, query usage tracking, data profiling, and data quality checks.|[Documentation](https://www.restack.io/docs/openmetadata-knowledge-openmetadata-clickhouse-integration)|
-|Retool||No code|Create your application with drag-and-drop interface. |[Documentation](/integrations/retool)|
-|Rill||Data visualization|Rill is an Operational BI tool purpose-built for slicing & dicing data with OLAP engines.|[Documentation](https://docs.rilldata.com/reference/olap-engines/clickhouse)|
-|RisingWave||Data ingestion| SQL stream processing with a Postgres-like experience. 10x faster and more cost-efficient than Apache Flink. |[Documentation](https://docs.risingwave.com/docs/current/sink-to-clickhouse/)|
-|RudderStack||Data ingestion|RudderStack makes it easy to collect and send customer data to the tools and teams that need it|[Documentation](https://www.rudderstack.com/docs/destinations/warehouse-destinations/clickhouse/)|
-|RunReveal||Data ingestion|Ingest and normalize audit logs from any SaaS application into ClickHouse. Create alerts and detections from scheduled queries.|[Website](https://runreveal.com)|
-|Sematext||Data management|Observability monitoring for ClickHouse databases.|[Documentation](https://sematext.com/docs/integration/clickhouse/)|
-|SiSense||Data visualization|Embed analytics into any application or workflow|[Website](https://www.sisense.com/data-connectors/)|
-|SigNoz||Data visualization|Open Source Observability Platform|[Documentation](https://www.signoz.io/docs/architecture/)|
-|Snappy Flow||Data management|Collects ClickHouse database metrics via plugin.|[Documentation](https://docs.snappyflow.io/docs/Integrations/clickhouse/instance)|
-|Soda||Data quality|Soda integration makes it easy for organizations to detect, resolve, and prevent data quality issues by running data quality checks on data before it is loaded into the database.|[Website](https://www.soda.io/integrations/clickhouse)|
-|Splunk||Data integration|Splunk modular input to import to Splunk the ClickHouse Cloud Audit logs.|[Website](https://splunkbase.splunk.com/app/7709), [Documentation](/integrations/tools/data-integration/splunk/index.md)|
-|StreamingFast||Data ingestion| Blockchain-agnostic, parallelized and streaming-first data engine. |[Website](https://www.streamingfast.io/)|
-|Streamkap||Data ingestion|Setup real-time CDC (Change Data Capture) streaming to ClickHouse with high throughput in minutes.|[Documentation](https://docs.streamkap.com/docs/clickhouse)|
-|Supabase||Data ingestion|Open source Firebase alternative|[GitHub](https://github.com/supabase/wrappers/tree/main/wrappers/src/fdw/clickhouse_fdw),[Blog](https://clickhouse.com/blog/migrating-data-between-clickhouse-postgres)|
-|Teleport||Secure connection|Teleport Database Service authenticates to ClickHouse using x509 certificates, which are available for the ClickHouse HTTP and Native (TCP) interfaces.|[Documentation](https://goteleport.com/docs/enroll-resources/database-access/enroll-self-hosted-databases/clickhouse-self-hosted/)|
-|TABLUM.IO||SQL client|TABLUM.IO ingests data from a variety of sources, normalizes and cleans inconsistencies, and gives you access to it via SQL.|[Documentation](/integrations/sql-clients/tablum.md)|
-|Tooljet||Data Visualization|ToolJet is an open-source low-code framework to build and deploy custom internal tools.|[Documentation](https://docs.tooljet.com/docs/data-sources/clickhouse/)|
-|Upstash||Data Ingestion|A data platform offering serverless Kafka and other solutions|[Website](https://upstash.com/)|
-|Vector||Data ingestion|A lightweight, ultra-fast tool for building observability pipelines with built-in compatibility with ClickHouse.|[Documentation](/integrations/vector/)|
-|WarpStream||Data Ingestion|A Kafka compatible data streaming platform built directly on top of object storage|[Website](https://www.warpstream.com/)|
-|YepCode||Data integration|YepCode is the integration & automation tool that loves source code.|[Documentation](https://yepcode.io/docs/integrations/clickhouse/)|
-|Zing Data||Data visualization|Simple social business intelligence for ClickHouse, made for iOS, Android and the web.|[Documentation](https://docs.getzingdata.com/docs/)|
-
-
-
-
+import { IntegrationGrid } from '@site/src/components/IntegrationGrid';
-## Community integrations
+# Integrations
-
+Connect ClickHouse with your favorite tools and services to build powerful data pipelines and analytics workflows.
-|Name|Logo|Category|Description|Resources|
-|------|----|----------------|------------------|-------------|
-|Apache Airflow||Data ingestion|Open-source workflow management platform for data engineering pipelines|[Github](https://github.com/bryzgaloff/airflow-clickhouse-plugin)|
-|Apache Beam||Data ingestion|Open source, unified model and set of language-specific SDKs for defining and executing data processing workflows. Compatible with Google Dataflow.|[Documentation](/integrations/apache-beam), [Examples](https://github.com/ClickHouse/clickhouse-beam-connector/)|
-|Apache InLong||Data ingestion|One-stop integration framework for massive data|[Documentation](https://inlong.apache.org/docs/data_node/load_node/clickhouse)|
-|Apache NiFi||Data ingestion|Automates the flow of data between software systems|[Documentation](/integrations/nifi)|
-|Apache SeaTunnel||Data ingestion|SeaTunnel is a very easy-to-use ultra-high-performance distributed data integration platform|[Website](https://seatunnel.apache.org/docs/2.3.0/connector-v2/sink/Clickhouse)|
-|Apache SkyWalking||Data management|Open-source APM system that provides monitoring, tracing and diagnosing capabilities for distributed systems in Cloud Native architectures.|[Blog](https://skywalking.apache.org/blog/2024-03-12-monitoring-clickhouse-through-skywalking/)|
-|Apache StreamPark||Data ingestion|A stream processing application development framework and stream processing operation platform. |[Website](https://streampark.apache.org/docs/intro)|
-|Bytebase||Data management|Open-source database DevOps tool, it's the GitLab for managing databases throughout the application development lifecycle|[Documentation](https://www.bytebase.com/docs/introduction/supported-databases)|
-|CHProxy| |Data management|Chproxy is an HTTP proxy and load balancer for the ClickHouse database|[GitHub](https://github.com/ContentSquare/chproxy)|
-|Chat-DBT| |AI Integration|Create ClickHouse queries using Chat GPT.|[GitHub](https://github.com/plmercereau/chat-dbt)|
-|ClickHouse Monitoring Dashboard||Dashboard|A simple monitoring dashboard for ClickHouse|[Github](https://github.com/duyet/clickhouse-monitoring)|
-|Common Lisp||Language client|Common Lisp ClickHouse Client Library|[GitHub](https://github.com/juliojimenez/clickhouse-cl)|
-| Dataflow||Data ingestion|Google Dataflow is a serverless service for running batch and streaming data pipelines using Apache Beam.|[Documentation](/integrations/google-dataflow/dataflow)|
-|DBNet||Software IDE|Web-based SQL IDE using Go as a back-end, and the browser as the front-end.|[Github](https://github.com/dbnet-io/dbnet)|
-|DataLens||Data visualization|An open-source data analytics and visualization tool.|[Website](https://datalens.tech/), [Documentation](https://datalens.tech/docs/en/)|
-|Dataease||Data visualization|Open source data visualization analysis tool to help users analyze data and gain insight into business trends.|[Website](https://dataease.io/)|
-|Datahub||Data management|Open Source Data Catalog that enables data discovery, data observability and federated governance|[Documentation](https://datahubproject.io/docs/generated/ingestion/sources/clickhouse/)|
-|Dbmate| |Data management|Database migration tool that will keep your database schema in sync across multiple developers and servers|[GitHub](https://github.com/amacneil/dbmate#clickhouse)|
-|DeepFlow||Data ingestion|Application Observability using eBPF|[Website](https://deepflow.io)|
-|Easypanel||Deployment method|It's a modern server control panel. You can use it to deploy ClickHouse on your own server.|[Website](https://easypanel.io), [Documentation](/integrations/tools/data-integration/easypanel/index.md)|
-|Explo||Data visualization|Explo helps companies build real-time analytics dashboard by providing flexible components.|[Website](https://www.explo.co/integrations/clickhouse)|
-|Flink||Data ingestion|Flink sink for ClickHouse database, powered by Async Http Client|[GitHub](https://github.com/itinycheng/flink-connector-clickhouse)|
-|Goose||Data migration|A database migration tool that supports SQL migrations and Go functions.|[GitHub](https://github.com/pressly/goose), [Documentation](https://pressly.github.io/goose/)|
-|Ibis||Language client|The flexibility of Python analytics with the scale and performance of modern SQL|[Website](https://ibis-project.org/backends/ClickHouse/)|
-|Jaeger||Data ingestion|Jaeger gRPC storage plugin implementation for storing traces in ClickHouse|[GitHub](https://github.com/jaegertracing/jaeger-clickhouse)|
-|JupySQL||SQL client|The native SQL client for Jupyter notebooks.|[Documentation](/integrations/jupysql)|
-|Kestra||Data orchestration|Open source data orchestration and scheduling platform|[Website](https://kestra.io/plugins/plugin-jdbc-clickhouse/)|
-|Logchain||Security|Data security and privileged access management|[Website](https://github.com/adaptive-scale/logchain)|
-|Meltano||Data ingestion|Meltano is an open-source, full-stack data integration platform|[Documentation](https://hub.meltano.com/extractors/tap-clickhouse)
-|Mprove||Data visualization|Self-service Business Intelligence with Version Control|[Website](https://mprove.io/)|
-|Netobserv||Data management|An OpenShift and Kubernetes operator for network observability.|[Blog](https://cloud.redhat.com/blog/deploying-network-observability-without-loki-an-example-with-clickhouse)
-|Observable||Data visualization|Observable is a platform where you can collaboratively explore, analyze, visualize, and communicate with data on the web.|[Website](https://observablehq.com/@stas-sl/clickhouse-playground)|
-|PHP||Language client|This extension provides the ClickHouse integration for the Yii framework 2.0|[GitHub](https://github.com/smi2/phpClickHouse)|
-|Pgwarehouse||Data ingestion|Simple tool to quickly replicate Postgres tables into ClickHouse|[GitHub](https://github.com/scottpersinger/pgwarehouse)|
-|Pinax||Blockchain analytics|Indexing, analytics, and search tools for blockchains.|[Blog](https://blog.pinax.network/substreams/simplify-real-time-blockchain-analytics-with-clickhouse/)|
-|Pulse||Data management|A developer platform for internal data UIs.|[Website](https://www.timestored.com/pulse/)|
-|QStudio||GUI|A simple to use GUI for interacting with ClickHouse databases.|[Website](https://www.timestored.com/qstudio/database/clickhouse)|
-|Qryn||Data Ingestion, Management, Visualization | qryn is a polyglot observability stack built on top of ClickHouse, transparently compatible with Loki, Prometheus, Tempo, Opentelemetry and many other formats and standard APIs without requiring custom clients, code or plugins|[Documentation](https://qryn.dev), [Github](https://github.com/metrico), [Website](https://qryn.cloud)|
-|RSyslog||Data Ingestion|This module provides native support for logging to ClickHouse.|[Documentation](https://www.rsyslog.com/doc/master/configuration/modules/omclickhouse.html)|
-|Rocket.BI||Data visualization|RocketBI is a self-service business intelligence platform that helps you quickly analyze data, build drag-n-drop visualizations and collaborate with colleagues right on your web browser.|[GitHub](https://github.com/datainsider-co/rocket-bi), [Documentation](/integrations/data-visualization/rocketbi-and-clickhouse.md)|
-|Ruby||Language client|A modern Ruby database driver for ClickHouse|[GitHub](https://github.com/shlima/click_house)|
-|R||Language client|R package is a DBI interface for the ClickHouse database|[GitHub](https://github.com/IMSMWU/RClickHouse)|
-|SQLPad||SQL client|SQLPad is a web app for writing and running SQL queries and visualizing the results|[Documentation](https://getsqlpad.com/en/connections/#clickhouse)|
-|Scala||Language client|ClickHouse Scala Client that uses Akka Http|[GitHub](https://github.com/crobox/clickhouse-scala-client)|
-|SchemaSpy||Data visualization|SchemaSpy supports ClickHouse schema visualuzation|[GitHub](https://github.com/schemaspy/schemaspy)|
-|TricksterCache||Data visualization| Open Source HTTP Reverse Proxy Cache and Time Series Dashboard Accelerator |[Website](https://trickstercache.org/)|
-|Visual Studio Client||Language client|Visual studio lightweight client|[Marketplace](https://marketplace.visualstudio.com/items?itemName=fanruten.clickhouse-light)|
-|VulcanSQL||Data API Framework|It's a Data API Framework for data applications that helps data folks create and share data APIs faster. It turns your SQL templates into data APIs. No backend skills required.|[Website](https://vulcansql.com/), [Documentation](https://vulcansql.com/docs/connect/clickhouse)|
-|marimo||SQL Client|An open-source reactive notebook for Python with SQL built-in. Create data visualizations, shareable apps or executable scripts.|[Website](https://marimo.io/), [Documentation](https://docs.marimo.io/guides/working_with_data/sql/?h=sql#clickhouse-support)|
-
+
-:::info Notice
-Third-party logos and trademarks belong to their respective owners and are shown only to indicate available integrations. No endorsement is implied.
-:::
+:::note Notice
+Third-party logos and trademarks belong to their respective owners and are shown only to indicate available integrations.
+No endorsement is implied.
+:::
\ No newline at end of file
diff --git a/docs/integrations/language-clients/csharp.md b/docs/integrations/language-clients/csharp.md
index cc7fd3274ea..736d653903c 100644
--- a/docs/integrations/language-clients/csharp.md
+++ b/docs/integrations/language-clients/csharp.md
@@ -6,6 +6,10 @@ slug: /integrations/csharp
description: 'The official C# client for connecting to ClickHouse.'
title: 'ClickHouse C# Driver'
doc_type: 'guide'
+integration_type: ['language client']
+integration_logo: '/static/images/integrations/logos/csharp.svg'
+integration_title: 'C#'
+integration_tier: 'core'
---
# ClickHouse C# Client
diff --git a/docs/integrations/language-clients/go/index.md b/docs/integrations/language-clients/go/index.md
index 624dbb35650..63c3894c5f9 100644
--- a/docs/integrations/language-clients/go/index.md
+++ b/docs/integrations/language-clients/go/index.md
@@ -6,6 +6,10 @@ slug: /integrations/go
description: 'The Go clients for ClickHouse allows users to connect to ClickHouse using either the Go standard database/sql interface or an optimized native interface.'
title: 'ClickHouse Go'
doc_type: 'reference'
+integration_type: ['language client']
+integration_logo: '/static/images/integrations/logos/golang.svg'
+integration_title: 'Golang'
+integration_tier: 'core'
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_native.md';
diff --git a/docs/integrations/language-clients/java/index.md b/docs/integrations/language-clients/java/index.md
index b816ab32529..5f8a8661c4d 100644
--- a/docs/integrations/language-clients/java/index.md
+++ b/docs/integrations/language-clients/java/index.md
@@ -4,6 +4,10 @@ keywords: ['clickhouse', 'java', 'jdbc', 'client', 'integrate', 'r2dbc']
description: 'Options for connecting to ClickHouse from Java'
slug: /integrations/java
doc_type: 'reference'
+integration_type: ['language client']
+integration_logo: '/static/images/integrations/logos/java.svg'
+integration_title: 'Java, JDBC'
+integration_tier: 'core'
---
import Tabs from '@theme/Tabs';
diff --git a/docs/integrations/language-clients/js.md b/docs/integrations/language-clients/js.md
index aa5ba02650c..8496b046b48 100644
--- a/docs/integrations/language-clients/js.md
+++ b/docs/integrations/language-clients/js.md
@@ -6,6 +6,10 @@ slug: /integrations/javascript
description: 'The official JS client for connecting to ClickHouse.'
title: 'ClickHouse JS'
doc_type: 'reference'
+integration_type: ['language client']
+integration_logo: '/static/images/integrations/logos/node_js.svg'
+integration_title: 'Node.JS'
+integration_tier: 'core'
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_http.mdx';
diff --git a/docs/integrations/language-clients/moose-olap.md b/docs/integrations/language-clients/moose-olap.md
index 6d6458e0ca9..984b1704b6b 100644
--- a/docs/integrations/language-clients/moose-olap.md
+++ b/docs/integrations/language-clients/moose-olap.md
@@ -6,6 +6,10 @@ slug: /interfaces/third-party/moose-olap
title: 'Developing on ClickHouse with Moose OLAP'
keywords: ['Moose']
doc_type: 'guide'
+integration_title: 'Moose OLAP'
+integration_type: ['language client']
+integration_logo: '/static/images/integrations/logos/moose_logo.svg'
+integration_tier: 'community'
---
import CommunityMaintainedBadge from '@theme/badges/CommunityMaintained';
diff --git a/docs/integrations/language-clients/python/index.md b/docs/integrations/language-clients/python/index.md
index 28314d73cb9..43964d636f5 100644
--- a/docs/integrations/language-clients/python/index.md
+++ b/docs/integrations/language-clients/python/index.md
@@ -6,6 +6,10 @@ slug: /integrations/python
description: 'The ClickHouse Connect project suite for connecting Python to ClickHouse'
title: 'Python Integration with ClickHouse Connect'
doc_type: 'guide'
+integration_type: ['language client']
+integration_logo: '/static/images/integrations/logos/notext-python.svg'
+integration_title: 'Python'
+integration_tier: 'core'
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_http.mdx';
diff --git a/docs/integrations/language-clients/rust.md b/docs/integrations/language-clients/rust.md
index 09592eab3f2..3fdd0892f1c 100644
--- a/docs/integrations/language-clients/rust.md
+++ b/docs/integrations/language-clients/rust.md
@@ -6,6 +6,10 @@ slug: /integrations/rust
description: 'The official Rust client for connecting to ClickHouse.'
title: 'ClickHouse Rust Client'
doc_type: 'reference'
+integration_type: ['language client']
+integration_logo: '/static/images/integrations/logos/logo_rust.png'
+integration_title: 'Rust'
+integration_tier: 'core'
---
# ClickHouse Rust client
diff --git a/docs/integrations/sql-clients/datagrip.md b/docs/integrations/sql-clients/datagrip.md
index 6cfe1bb4039..5fcfd573526 100644
--- a/docs/integrations/sql-clients/datagrip.md
+++ b/docs/integrations/sql-clients/datagrip.md
@@ -4,6 +4,10 @@ slug: /integrations/datagrip
description: 'DataGrip is a database IDE that supports ClickHouse out of the box.'
title: 'Connecting DataGrip to ClickHouse'
doc_type: 'guide'
+integration_title: 'DataGrip'
+integration_type: ['sql client']
+integration_logo: '/static/images/integrations/logos/data_grip.svg'
+integration_tier: 'community'
---
import Image from '@theme/IdealImage';
diff --git a/docs/integrations/sql-clients/dbeaver.md b/docs/integrations/sql-clients/dbeaver.md
index e8cbe7337ef..07a9b10363b 100644
--- a/docs/integrations/sql-clients/dbeaver.md
+++ b/docs/integrations/sql-clients/dbeaver.md
@@ -4,6 +4,10 @@ sidebar_label: 'DBeaver'
description: 'DBeaver is a multi-platform database tool.'
title: 'Connect DBeaver to ClickHouse'
doc_type: 'guide'
+integration_title: 'DBeaver'
+integration_type: ['sql client']
+integration_logo: '/static/images/integrations/logos/dbeaver_logo.svg'
+integration_tier: 'core'
---
import Image from '@theme/IdealImage';
diff --git a/docs/integrations/sql-clients/dbvisualizer.md b/docs/integrations/sql-clients/dbvisualizer.md
index 35478e8b0b5..4359e84ce3e 100644
--- a/docs/integrations/sql-clients/dbvisualizer.md
+++ b/docs/integrations/sql-clients/dbvisualizer.md
@@ -4,6 +4,10 @@ slug: /integrations/dbvisualizer
description: 'DbVisualizer is a database tool with extended support for ClickHouse.'
title: 'Connecting DbVisualizer to ClickHouse'
doc_type: 'guide'
+integration_title: 'DbVisualizer'
+integration_type: ['sql client']
+integration_logo: '/static/images/integrations/logos/logo_dbvisualizer.png'
+integration_tier: 'community'
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_http.mdx';
diff --git a/docs/integrations/sql-clients/jupysql.md b/docs/integrations/sql-clients/jupysql.md
index b1663e76575..28f7f46764e 100644
--- a/docs/integrations/sql-clients/jupysql.md
+++ b/docs/integrations/sql-clients/jupysql.md
@@ -4,6 +4,10 @@ sidebar_label: 'Jupyter notebooks'
description: 'JupySQL is a multi-platform database tool for Jupyter.'
title: 'Using JupySQL with ClickHouse'
doc_type: 'guide'
+integration_title: 'JupySQL'
+integration_type: ['sql client']
+integration_logo: '/static/images/integrations/logos/jupyter.png'
+integration_tier: 'community'
---
import Image from '@theme/IdealImage';
diff --git a/docs/integrations/sql-clients/tablum.md b/docs/integrations/sql-clients/tablum.md
index 3dcd100d2dc..10b743d1267 100644
--- a/docs/integrations/sql-clients/tablum.md
+++ b/docs/integrations/sql-clients/tablum.md
@@ -4,6 +4,10 @@ slug: /integrations/tablumio
description: 'TABLUM.IO is a data management SaaS that supports ClickHouse out of the box.'
title: 'Connecting TABLUM.IO to ClickHouse'
doc_type: 'guide'
+integration_title: 'TABLUM.IO'
+integration_type: ['sql client']
+integration_logo: '/static/images/integrations/logos/tablum.svg'
+integration_tier: 'community'
---
import Image from '@theme/IdealImage';
diff --git a/docs/integrations/tools/data-integration/easypanel/index.md b/docs/integrations/tools/data-integration/easypanel/index.md
index f0ac1717b67..fdbbd81e949 100644
--- a/docs/integrations/tools/data-integration/easypanel/index.md
+++ b/docs/integrations/tools/data-integration/easypanel/index.md
@@ -5,6 +5,10 @@ keywords: ['clickhouse', 'Easypanel', 'deployment', 'integrate', 'install']
description: 'You can use it to deploy ClickHouse on your own server.'
title: 'Deploying ClickHouse on Easypanel'
doc_type: 'guide'
+integration_title: 'Easypanel'
+integration_type: ['data integration']
+integration_logo: '/static/images/integrations/logos/logo-easypanel.png'
+integration_tier: 'community'
---
import CommunityMaintainedBadge from '@theme/badges/CommunityMaintained';
diff --git a/docs/integrations/tools/data-integration/retool/index.md b/docs/integrations/tools/data-integration/retool/index.md
index a33567b4111..c9f519c90c6 100644
--- a/docs/integrations/tools/data-integration/retool/index.md
+++ b/docs/integrations/tools/data-integration/retool/index.md
@@ -5,6 +5,10 @@ keywords: ['clickhouse', 'retool', 'connect', 'integrate', 'ui', 'admin', 'panel
description: 'Quickly build web and mobile apps with rich user interfaces, automate complex tasks, and integrate AI—all powered by your data.'
title: 'Connecting Retool to ClickHouse'
doc_type: 'guide'
+integration_title: 'Retool'
+integration_type: ['data integration']
+integration_logo: '/static/images/integrations/logos/retool.svg'
+integration_tier: 'partner'
---
import ConnectionDetails from '@site/docs/_snippets/_gather_your_details_http.mdx';
@@ -14,11 +18,11 @@ import retool_02 from '@site/static/images/integrations/tools/data-integration/r
import retool_03 from '@site/static/images/integrations/tools/data-integration/retool/retool_03.png';
import retool_04 from '@site/static/images/integrations/tools/data-integration/retool/retool_04.png';
import retool_05 from '@site/static/images/integrations/tools/data-integration/retool/retool_05.png';
-import CommunityMaintainedBadge from '@theme/badges/CommunityMaintained';
+import PartnerBadge from '@theme/badges/PartnerBadge';
# Connecting Retool to ClickHouse
-
+
## 1. Gather your connection details {#1-gather-your-connection-details}
diff --git a/docs/integrations/tools/data-integration/splunk/index.md b/docs/integrations/tools/data-integration/splunk/index.md
index f57228f0fa7..7222cbd3b5e 100644
--- a/docs/integrations/tools/data-integration/splunk/index.md
+++ b/docs/integrations/tools/data-integration/splunk/index.md
@@ -5,6 +5,10 @@ keywords: ['clickhouse', 'Splunk', 'audit', 'cloud']
description: 'Store ClickHouse Cloud audit logs into Splunk.'
title: 'Storing ClickHouse Cloud Audit logs into Splunk'
doc_type: 'guide'
+integration_title: 'Splunk'
+integration_type: ['data integration']
+integration_logo: '/static/images/integrations/logos/splunk_logo.png'
+integration_tier: 'partner'
---
import Image from '@theme/IdealImage';
@@ -20,11 +24,11 @@ import splunk_009 from '@site/static/images/integrations/tools/data-integration/
import splunk_010 from '@site/static/images/integrations/tools/data-integration/splunk/splunk_010.png';
import splunk_011 from '@site/static/images/integrations/tools/data-integration/splunk/splunk_011.png';
import splunk_012 from '@site/static/images/integrations/tools/data-integration/splunk/splunk_012.png';
-import ClickHouseSupportedBadge from '@theme/badges/ClickHouseSupported';
+import PartnerBadge from '@theme/badges/PartnerBadge';
# Storing ClickHouse Cloud Audit logs into Splunk
-
+
[Splunk](https://www.splunk.com/) is a data analytics and monitoring platform.
diff --git a/docusaurus.config.en.js b/docusaurus.config.en.js
index 125244df775..db877279a5b 100644
--- a/docusaurus.config.en.js
+++ b/docusaurus.config.en.js
@@ -11,6 +11,7 @@ const codeImportPlugin = require('./plugins/code-import-plugin');
const { customParseFrontMatter } = require('./plugins/frontmatter-validation/customParseFrontMatter');
const checkFloatingPages = require('./plugins/checkFloatingPages');
const frontmatterValidator = require('./plugins/frontmatter-validation/frontmatterValidatorPlugin');
+const integrationExtractor = require('./plugins/integration-extractor-plugin');
import pluginLlmsTxt from './plugins/llms-txt-plugin.ts'
import prismLight from "./src/utils/prismLight";
import prismDark from "./src/utils/prismDark";
@@ -356,6 +357,10 @@ const config = {
pluginLlmsTxt,
{}
],
+ [
+ integrationExtractor,
+ {}
+ ],
[
'./plugins/tailwind-config.js',
{}
diff --git a/plugins/integration-extractor-plugin.js b/plugins/integration-extractor-plugin.js
new file mode 100644
index 00000000000..8228fac2c5e
--- /dev/null
+++ b/plugins/integration-extractor-plugin.js
@@ -0,0 +1,107 @@
+const path = require('path');
+const fs = require('fs');
+const matter = require('gray-matter');
+
+/**
+ * Recursively find all .md and .mdx files in a directory
+ */
+function findMarkdownFiles(dir) {
+ const files = [];
+
+ function scanDirectory(currentDir) {
+ try {
+ const entries = fs.readdirSync(currentDir, { withFileTypes: true });
+
+ for (const entry of entries) {
+ const fullPath = path.join(currentDir, entry.name);
+
+ if (entry.isDirectory()) {
+ scanDirectory(fullPath);
+ } else if (entry.isFile() && /\.(md|mdx)$/.test(entry.name)) {
+ files.push(fullPath);
+ }
+ }
+ } catch (err) {
+ // Skip directories that can't be read
+ }
+ }
+
+ scanDirectory(dir);
+ return files;
+}
+
+/**
+ * Plugin to extract integration data and generate static JSON file
+ */
+function integrationExtractorPlugin(context, options) {
+ return {
+ name: 'integration-extractor-plugin',
+
+ async loadContent() {
+ const integrationsDir = path.join(context.siteDir, 'docs/integrations');
+
+ // Find all markdown files in the docs/integrations directory
+ const integrationFiles = findMarkdownFiles(integrationsDir);
+ const integrations = [];
+
+ integrationFiles.forEach(fullPath => {
+ try {
+ const fileContent = fs.readFileSync(fullPath, 'utf8');
+ const { data: frontmatter } = matter(fileContent);
+
+ // Only include files that have both integration_type and integration_logo
+ if (frontmatter.integration_type && frontmatter.integration_logo) {
+ // Fix logo path for Docusaurus static serving
+ let logoPath = frontmatter.integration_logo;
+ if (logoPath.startsWith('/static/')) {
+ logoPath = logoPath.replace('/static/', '/');
+ }
+
+ integrations.push({
+ slug: frontmatter.slug,
+ integration_logo: logoPath,
+ integration_type: Array.isArray(frontmatter.integration_type)
+ ? frontmatter.integration_type
+ : [frontmatter.integration_type],
+ integration_title: frontmatter.integration_title,
+ integration_tier: frontmatter.integration_tier
+ });
+ }
+ } catch (err) {
+ console.warn(`Warning: Could not process file ${fullPath}:`, err.message);
+ }
+ });
+
+ // Sort integrations alphabetically by integration_title, fallback to slug
+ integrations.sort((a, b) => {
+ const titleA = a.integration_title || a.slug;
+ const titleB = b.integration_title || b.slug;
+ return titleA.localeCompare(titleB);
+ });
+
+ console.log(`✅ Integration extractor: Found ${integrations.length} total integrations`);
+ return integrations;
+ },
+
+ async contentLoaded({ content, actions }) {
+ // Write JSON file to static directory for easy importing
+ const staticDir = path.join(context.siteDir, 'static');
+ const jsonPath = path.join(staticDir, 'integrations.json');
+
+ // Ensure static directory exists
+ if (!fs.existsSync(staticDir)) {
+ fs.mkdirSync(staticDir, { recursive: true });
+ }
+
+ fs.writeFileSync(jsonPath, JSON.stringify(content, null, 2));
+ console.log(`✅ Integration extractor: Generated static/integrations.json`);
+ },
+
+ async postBuild({ outDir }) {
+ // Log how many integrations were processed
+ console.log(`✅ Integration extractor: Found and processed ${this.loadContent ? (await this.loadContent()).length : 0} integrations`);
+ }
+ };
+}
+
+module.exports = integrationExtractorPlugin;
\ No newline at end of file
diff --git a/src/components/CUICard/CUICard.tsx b/src/components/CUICard/CUICard.tsx
new file mode 100644
index 00000000000..a83e383f1de
--- /dev/null
+++ b/src/components/CUICard/CUICard.tsx
@@ -0,0 +1,56 @@
+import React, { HTMLAttributes } from 'react'
+import styles from './styles.module.scss'
+
+type CardComponent = React.FunctionComponent> & {
+ Header: React.FunctionComponent>
+ Body: React.FunctionComponent>
+ Footer: React.FunctionComponent>
+}
+
+const CUICard: CardComponent = ({
+ children,
+ className,
+ ...props
+}): JSX.Element => (
+
+
+ {/* Integration Sections */}
+ {selectedFilter === 'All' ? (
+ // Show sections grouped by type
+ Array.from(groupedIntegrations.entries())
+ .sort(([a], [b]) => {
+ // Use the same sort order as the filter buttons
+ const sortOrder = [
+ 'Language client',
+ 'ClickPipes',
+ 'Data ingestion',
+ 'Data visualization',
+ 'AI/ML',
+ 'Data integration',
+ 'Data management',
+ 'Security governance',
+ 'SQL client'
+ ];
+
+ const indexA = sortOrder.indexOf(a);
+ const indexB = sortOrder.indexOf(b);
+
+ // If both items are in the sort order, sort by their position
+ if (indexA !== -1 && indexB !== -1) {
+ return indexA - indexB;
+ }
+
+ // If only one item is in the sort order, prioritize it
+ if (indexA !== -1) return -1;
+ if (indexB !== -1) return 1;
+
+ // If neither item is in the sort order, fall back to alphabetical
+ return a.localeCompare(b);
+ })
+ .map(([type, typeIntegrations]) => (
+
+