diff --git a/docs/cloud/get-started/cloud-quick-start.md b/docs/cloud/get-started/cloud-quick-start.mdx
similarity index 98%
rename from docs/cloud/get-started/cloud-quick-start.md
rename to docs/cloud/get-started/cloud-quick-start.mdx
index c984024ea81..9dcb1416485 100644
--- a/docs/cloud/get-started/cloud-quick-start.md
+++ b/docs/cloud/get-started/cloud-quick-start.mdx
@@ -27,7 +27,9 @@ import SQLConsoleDetail from '@site/docs/_snippets/_launch_sql_console.md';
The quickest and easiest way to get up and running with ClickHouse is to create a new
service in [ClickHouse Cloud](https://console.clickhouse.cloud).
-## 1. Create a ClickHouse service {#1-create-a-clickhouse-service}
+
+
+## Create a ClickHouse service {#1-create-a-clickhouse-service}
To create a free ClickHouse service in [ClickHouse Cloud](https://console.clickhouse.cloud), you just need to sign up by completing the following steps:
@@ -67,22 +69,20 @@ Users can customize the service resources if required, specifying a minimum and
Congratulations! Your ClickHouse Cloud service is up and running and onboarding is complete. Keep reading for details on how to start ingesting and querying your data.
-## 2. Connect to ClickHouse {#2-connect-to-clickhouse}
+## Connect to ClickHouse {#2-connect-to-clickhouse}
There are 2 ways to connect to ClickHouse:
- Connect using our web-based SQL console
- Connect with your app
-
+
### Connect using SQL console {#connect-using-sql-console}
For getting started quickly, ClickHouse provides a web-based SQL console to which you will be redirected on completing onboarding.
-
Create a query tab and enter a simple query to verify that your connection is working:
-
```sql
SHOW databases
```
@@ -104,7 +104,7 @@ Press the connect button from the navigation menu. A modal will open offering th
If you can't see your language client, you may want to check our list of [Integrations](/integrations).
-## 3. Add data {#3-add-data}
+## Add data {#3-add-data}
ClickHouse is better with data! There are multiple ways to add data and most of them are available on the Data Sources page, which can be accessed in the navigation menu.
@@ -322,6 +322,8 @@ Suppose we have the following text in a CSV file named `data.csv`:
+
+
## What's Next? {#whats-next}
- The [Tutorial](/tutorial.md) has you insert 2 million rows into a table and write some analytical queries
diff --git a/docs/quick-start.mdx b/docs/quick-start.mdx
index f5da552e1db..4cc7551a3d7 100644
--- a/docs/quick-start.mdx
+++ b/docs/quick-start.mdx
@@ -11,19 +11,20 @@ description: 'ClickHouse Quick Start guide'
import Tabs from '@theme/Tabs';
import TabItem from '@theme/TabItem';
import CodeBlock from '@theme/CodeBlock';
+import {VerticalStepper} from '@clickhouse/click-ui/bundled';
-## Overview
+**Welcome to ClickHouse!**
-Get set up quickly with ClickHouse. Download an appropriate binary for your OS,
-learn to run ClickHouse server, and create a table, insert data into it, and
-query your table using ClickHouse client.
+In this quick-start tutorial, we'll get you set up in 8
+easy steps. You'll download an appropriate binary for your OS,
+learn to run ClickHouse server, and use the ClickHouse client to create a table,
+then insert data into it and run a query to select that data.
-### Prerequisites
+Let's get started?
-You'll need curl or another command-line HTTP client to fetch the ClickHouse
-binary.
+
-## Download the binary
+## Download ClickHouse {#download-the-binary}
ClickHouse runs natively on Linux, FreeBSD and macOS, and runs on Windows via
the [WSL](https://learn.microsoft.com/en-us/windows/wsl/about). The simplest way to download ClickHouse locally is to run the
@@ -157,214 +158,214 @@ below, or you can check out our [Integrations](/integrations) page for a long li
technologies that integrate with ClickHouse.
-
-
-Use the [`s3` table function](/sql-reference/table-functions/s3.md) to
-read files from S3. It's a table function - meaning that the result is a table
-that can be:
-
-1. used as the source of a `SELECT` query (allowing you to run ad-hoc queries and
- leave your data in S3), or...
-2. insert the resulting table into a `MergeTree` table (when you are ready to
- move your data into ClickHouse)
-
-An ad-hoc query looks like:
-
-```sql
-SELECT
- passenger_count,
- avg(toFloat32(total_amount))
-FROM s3(
- 'https://datasets-documentation.s3.eu-west-3.amazonaws.com/nyc-taxi/trips_0.gz',
- 'TabSeparatedWithNames'
-)
-GROUP BY passenger_count
-ORDER BY passenger_count;
-```
-
-Moving the data into a ClickHouse table looks like the following, where
-`nyc_taxi` is a `MergeTree` table:
-
-```sql
-INSERT INTO nyc_taxi
- SELECT * FROM s3(
- 'https://datasets-documentation.s3.eu-west-3.amazonaws.com/nyc-taxi/trips_0.gz',
- 'TabSeparatedWithNames'
-)
-SETTINGS input_format_allow_errors_num=25000;
-```
-
-View our [collection of AWS S3 documentation pages](/integrations/data-ingestion/s3/index.md) for lots more details and examples of using S3 with ClickHouse.
-
-
-
-
-The [`s3` table function](/sql-reference/table-functions/s3.md) used for
-reading data in AWS S3 also works on files in Google Cloud Storage.
-
-For example:
-
-```sql
-SELECT
- *
-FROM s3(
- 'https://storage.googleapis.com/my-bucket/trips.parquet',
- 'MY_GCS_HMAC_KEY',
- 'MY_GCS_HMAC_SECRET_KEY',
- 'Parquet'
-)
-LIMIT 1000
-```
-
-Find more details on the [`s3` table function page](/sql-reference/table-functions/s3.md).
-
-
-
-
-The [`url` table function](/sql-reference/table-functions/url) reads
-files accessible from the web:
-
-```sql
---By default, ClickHouse prevents redirects to protect from SSRF attacks.
---The URL below requires a redirect, so we must set max_http_get_redirects > 0.
-SET max_http_get_redirects=10;
-
-SELECT *
-FROM url(
- 'http://prod2.publicdata.landregistry.gov.uk.s3-website-eu-west-1.amazonaws.com/pp-complete.csv',
- 'CSV'
- );
-```
-
-Find more details on the [`url` table function page](/sql-reference/table-functions/url).
-
-
-
-
-Use the [`file` table engine](/sql-reference/table-functions/file) to
-read a local file. For simplicity, copy the file to the `user_files` directory
-(which is found in the directory where you downloaded the ClickHouse binary).
-
-```sql
-DESCRIBE TABLE file('comments.tsv')
-
-Query id: 8ca9b2f9-65a2-4982-954a-890de710a336
-
-┌─name──────┬─type────────────────────┬─default_type─┬─default_expression─┬─comment─┬─codec_expression─┬─ttl_expression─┐
-│ id │ Nullable(Int64) │ │ │ │ │ │
-│ type │ Nullable(String) │ │ │ │ │ │
-│ author │ Nullable(String) │ │ │ │ │ │
-│ timestamp │ Nullable(DateTime64(9)) │ │ │ │ │ │
-│ comment │ Nullable(String) │ │ │ │ │ │
-│ children │ Array(Nullable(Int64)) │ │ │ │ │ │
-└───────────┴─────────────────────────┴──────────────┴────────────────────┴─────────┴──────────────────┴────────────────┘
-```
-
-Notice ClickHouse infers the names and data types of your columns by analyzing a
-large batch of rows. If ClickHouse can not determine the storage type from the
-filename, you can specify it as the second argument:
-
-```sql
-SELECT count()
-FROM file(
- 'comments.tsv',
- 'TabSeparatedWithNames'
-)
-```
-
-View the [`file` table function](/sql-reference/table-functions/file)
-docs page for more details.
-
-
-
-
-Use the [`postgresql` table function](/sql-reference/table-functions/postgresql)
-to read data from a table in PostgreSQL:
-
-```sql
-SELECT *
-FROM
- postgresql(
- 'localhost:5432',
- 'my_database',
- 'my_table',
- 'postgresql_user',
- 'password')
-;
-```
-
-View the [`postgresql` table function](/sql-reference/table-functions/postgresql)
-docs page for more details.
-
-
-
-
-Use the [`mysql` table function](/sql-reference/table-functions/mysql)
-to read data from a table in MySQL:
-
-```sql
-SELECT *
-FROM
- mysql(
- 'localhost:3306',
- 'my_database',
- 'my_table',
- 'postgresql_user',
- 'password')
-;
-```
-
-View the [`mysql` table function](/sql-reference/table-functions/mysql)
-docs page for more details.
-
-
-
-
-ClickHouse can read data from any ODBC or JDBC data source:
-
-```sql
-SELECT *
-FROM
- odbc(
- 'DSN=mysqlconn',
- 'my_database',
- 'my_table'
- );
-```
-
-View the [`odbc` table function](/sql-reference/table-functions/odbc)
-and the [`jdbc` table function](/sql-reference/table-functions/jdbc) docs
-pages for more details.
-
-
-
-
-Message queues can stream data into ClickHouse using the corresponding table
-engine, including:
-
-- **Kafka**: integrate with Kafka using the [`Kafka` table engine](/engines/table-engines/integrations/kafka)
-- **Amazon MSK**: integrate with [Amazon Managed Streaming for Apache Kafka (MSK)](/integrations/kafka/cloud/amazon-msk/)
-- **RabbitMQ**: integrate with RabbitMQ using the [`RabbitMQ` table engine](/engines/table-engines/integrations/rabbitmq)
-
-
-
-
-ClickHouse has table functions to read data from the following sources:
-
-- **Hadoop**: integrate with Apache Hadoop using the [`hdfs` table function](/sql-reference/table-functions/hdfs)
-- **Hudi**: read from existing Apache Hudi tables in S3 using the [`hudi` table function](/sql-reference/table-functions/hudi)
-- **Iceberg**: read from existing Apache Iceberg tables in S3 using the [`iceberg` table function](/sql-reference/table-functions/iceberg)
-- **DeltaLake**: read from existing Delta Lake tables in S3 using the [`deltaLake` table function](/sql-reference/table-functions/deltalake)
-
-
-
-
-Check out our [long list of ClickHouse integrations](/integrations) to find how to connect your existing frameworks and data sources to ClickHouse.
-
-
+
+
+ Use the [`s3` table function](/sql-reference/table-functions/s3.md) to
+ read files from S3. It's a table function - meaning that the result is a table
+ that can be:
+
+ 1. used as the source of a `SELECT` query (allowing you to run ad-hoc queries and
+ leave your data in S3), or...
+ 2. insert the resulting table into a `MergeTree` table (when you are ready to
+ move your data into ClickHouse)
+
+ An ad-hoc query looks like:
+
+ ```sql
+ SELECT
+ passenger_count,
+ avg(toFloat32(total_amount))
+ FROM s3(
+ 'https://datasets-documentation.s3.eu-west-3.amazonaws.com/nyc-taxi/trips_0.gz',
+ 'TabSeparatedWithNames'
+ )
+ GROUP BY passenger_count
+ ORDER BY passenger_count;
+ ```
+
+ Moving the data into a ClickHouse table looks like the following, where
+ `nyc_taxi` is a `MergeTree` table:
+
+ ```sql
+ INSERT INTO nyc_taxi
+ SELECT * FROM s3(
+ 'https://datasets-documentation.s3.eu-west-3.amazonaws.com/nyc-taxi/trips_0.gz',
+ 'TabSeparatedWithNames'
+ )
+ SETTINGS input_format_allow_errors_num=25000;
+ ```
+
+ View our [collection of AWS S3 documentation pages](/integrations/data-ingestion/s3/index.md) for lots more details and examples of using S3 with ClickHouse.
+
+
+
+
+ The [`s3` table function](/sql-reference/table-functions/s3.md) used for
+ reading data in AWS S3 also works on files in Google Cloud Storage.
+
+ For example:
+
+ ```sql
+ SELECT
+ *
+ FROM s3(
+ 'https://storage.googleapis.com/my-bucket/trips.parquet',
+ 'MY_GCS_HMAC_KEY',
+ 'MY_GCS_HMAC_SECRET_KEY',
+ 'Parquet'
+ )
+ LIMIT 1000
+ ```
+
+ Find more details on the [`s3` table function page](/sql-reference/table-functions/s3.md).
+
+
+
+
+ The [`url` table function](/sql-reference/table-functions/url) reads
+ files accessible from the web:
+
+ ```sql
+ --By default, ClickHouse prevents redirects to protect from SSRF attacks.
+ --The URL below requires a redirect, so we must set max_http_get_redirects > 0.
+ SET max_http_get_redirects=10;
+
+ SELECT *
+ FROM url(
+ 'http://prod2.publicdata.landregistry.gov.uk.s3-website-eu-west-1.amazonaws.com/pp-complete.csv',
+ 'CSV'
+ );
+ ```
+
+ Find more details on the [`url` table function page](/sql-reference/table-functions/url).
+
+
+
+
+ Use the [`file` table engine](/sql-reference/table-functions/file) to
+ read a local file. For simplicity, copy the file to the `user_files` directory
+ (which is found in the directory where you downloaded the ClickHouse binary).
+
+ ```sql
+ DESCRIBE TABLE file('comments.tsv')
+
+ Query id: 8ca9b2f9-65a2-4982-954a-890de710a336
+
+ ┌─name──────┬─type────────────────────┐
+ │ id │ Nullable(Int64) │
+ │ type │ Nullable(String) │
+ │ author │ Nullable(String) │
+ │ timestamp │ Nullable(DateTime64(9)) │
+ │ comment │ Nullable(String) │
+ │ children │ Array(Nullable(Int64)) │
+ └───────────┴─────────────────────────┘
+ ```
+
+ Notice ClickHouse infers the names and data types of your columns by analyzing a
+ large batch of rows. If ClickHouse can not determine the file format from the
+ filename, you can specify it as the second argument:
+
+ ```sql
+ SELECT count()
+ FROM file(
+ 'comments.tsv',
+ 'TabSeparatedWithNames'
+ )
+ ```
+
+ View the [`file` table function](/sql-reference/table-functions/file)
+ docs page for more details.
+
+
+
+
+ Use the [`postgresql` table function](/sql-reference/table-functions/postgresql)
+ to read data from a table in PostgreSQL:
+
+ ```sql
+ SELECT *
+ FROM
+ postgresql(
+ 'localhost:5432',
+ 'my_database',
+ 'my_table',
+ 'postgresql_user',
+ 'password')
+ ;
+ ```
+
+ View the [`postgresql` table function](/sql-reference/table-functions/postgresql)
+ docs page for more details.
+
+
+
+
+ Use the [`mysql` table function](/sql-reference/table-functions/mysql)
+ to read data from a table in MySQL:
+
+ ```sql
+ SELECT *
+ FROM
+ mysql(
+ 'localhost:3306',
+ 'my_database',
+ 'my_table',
+ 'postgresql_user',
+ 'password')
+ ;
+ ```
+
+ View the [`mysql` table function](/sql-reference/table-functions/mysql)
+ docs page for more details.
+
+
+
+
+ ClickHouse can read data from any ODBC or JDBC data source:
+
+ ```sql
+ SELECT *
+ FROM
+ odbc(
+ 'DSN=mysqlconn',
+ 'my_database',
+ 'my_table'
+ );
+ ```
+
+ View the [`odbc` table function](/sql-reference/table-functions/odbc)
+ and the [`jdbc` table function](/sql-reference/table-functions/jdbc) docs
+ pages for more details.
+
+
+
+
+ Message queues can stream data into ClickHouse using the corresponding table
+ engine, including:
+
+ - **Kafka**: integrate with Kafka using the [`Kafka` table engine](/engines/table-engines/integrations/kafka)
+ - **Amazon MSK**: integrate with [Amazon Managed Streaming for Apache Kafka (MSK)](/integrations/kafka/cloud/amazon-msk/)
+ - **RabbitMQ**: integrate with RabbitMQ using the [`RabbitMQ` table engine](/engines/table-engines/integrations/rabbitmq)
+
+
+
+
+ ClickHouse has table functions to read data from the following sources:
+
+ - **Hadoop**: integrate with Apache Hadoop using the [`hdfs` table function](/sql-reference/table-functions/hdfs)
+ - **Hudi**: read from existing Apache Hudi tables in S3 using the [`hudi` table function](/sql-reference/table-functions/hudi)
+ - **Iceberg**: read from existing Apache Iceberg tables in S3 using the [`iceberg` table function](/sql-reference/table-functions/iceberg)
+ - **DeltaLake**: read from existing Delta Lake tables in S3 using the [`deltaLake` table function](/sql-reference/table-functions/deltalake)
+
+
+
+
+ Check out our [long list of ClickHouse integrations](/integrations) to find how to connect your existing frameworks and data sources to ClickHouse.
+
+
-## Next steps
+## Explore
- Check out our [Core Concepts](/managing-data/core-concepts) section to learn some of the fundamentals of how ClickHouse works under the hood.
- Check out the [Advanced Tutorial](tutorial.md) which takes a much deeper dive into the key concepts and capabilities of ClickHouse.
@@ -373,3 +374,5 @@ Check out our [long list of ClickHouse integrations](/integrations) to find how
- If your data is coming from an external source, view our [collection of integration guides](/integrations/) for connecting to message queues, databases, pipelines and more.
- If you are using a UI/BI visualization tool, view the [user guides for connecting a UI to ClickHouse](/integrations/data-visualization/).
- The user guide on [primary keys](/guides/best-practices/sparse-primary-indexes.md) is everything you need to know about primary keys and how to define them.
+
+
diff --git a/docs/tutorial.md b/docs/tutorial.md
index fb60ef43e52..98641bc290c 100644
--- a/docs/tutorial.md
+++ b/docs/tutorial.md
@@ -17,6 +17,8 @@ Learn how to ingest and query data in ClickHouse using a New York City taxi exam
You need access to a running ClickHouse service to complete this tutorial. For instructions, see the [Quick Start](./quick-start.mdx) guide.
+
+
## Create a new table {#create-a-new-table}
The New York City taxi dataset contains details about millions of taxi rides, with columns including tip amount, tolls, payment type, and more. Create a table to store this data.
@@ -508,6 +510,7 @@ Write some queries that join the `taxi_zone_dictionary` with your `trips` table.
Generally, we avoid using `SELECT *` often in ClickHouse. You should only retrieve the columns you actually need. However, this query is slower for the purposes of the example.
:::
+
## Next steps {#next-steps}
@@ -517,3 +520,4 @@ Learn more about ClickHouse with the following documentation:
- [Integrate an external data source](/integrations/index.mdx): Review data source integration options, including files, Kafka, PostgreSQL, data pipelines, and many others.
- [Visualize data in ClickHouse](./integrations/data-visualization/index.md): Connect your favorite UI/BI tool to ClickHouse.
- [SQL Reference](./sql-reference/index.md): Browse the SQL functions available in ClickHouse for transforming, processing and analyzing data.
+
diff --git a/docusaurus.config.en.js b/docusaurus.config.en.js
index 37d526ec4bf..54426a5c704 100644
--- a/docusaurus.config.en.js
+++ b/docusaurus.config.en.js
@@ -3,10 +3,13 @@ import math from "remark-math";
import katex from "rehype-katex";
import chHeader from "./plugins/header.js";
import fixLinks from "./src/hooks/fixLinks.js";
+const path = require('path');
+const remarkCustomBlocks = require('./plugins/remark-custom-blocks');
+
+// Import custom plugins
const { customParseFrontMatter } = require('./plugins/frontmatter-validation/customParseFrontMatter');
const checkFloatingPages = require('./plugins/checkFloatingPages');
const frontmatterValidator = require('./plugins/frontmatter-validation/frontmatterValidatorPlugin');
-const path = require('path');
import pluginLlmsTxt from './plugins/llms-txt-plugin.ts'
// Helper function to skip over index.md files.
@@ -156,7 +159,7 @@ const config = {
showLastUpdateTime: false,
sidebarCollapsed: true,
routeBasePath: "/",
- remarkPlugins: [math],
+ remarkPlugins: [math, remarkCustomBlocks],
beforeDefaultRemarkPlugins: [fixLinks],
rehypePlugins: [katex],
},
@@ -360,7 +363,10 @@ const config = {
pluginLlmsTxt,
{}
],
- ['./plugins/tailwind-config.js', {}],
+ [
+ './plugins/tailwind-config.js',
+ {}
+ ]
],
customFields: {
blogSidebarLink: "/docs/knowledgebase", // Used for KB article page
diff --git a/package.json b/package.json
index 2d4198e9a86..fb76d3ceb97 100644
--- a/package.json
+++ b/package.json
@@ -70,7 +70,8 @@
"remark-math": "^6.0.0",
"sass": "^1.86.1",
"search-insights": "^2.17.3",
- "short-uuid": "^5.2.0"
+ "short-uuid": "^5.2.0",
+ "unist-util-visit": "^5.0.0"
},
"devDependencies": {
"@argos-ci/cli": "^2.5.5",
diff --git a/plugins/remark-custom-blocks.js b/plugins/remark-custom-blocks.js
new file mode 100644
index 00000000000..cdaad6bc908
--- /dev/null
+++ b/plugins/remark-custom-blocks.js
@@ -0,0 +1,130 @@
+const { visit } = require('unist-util-visit');
+
+// --- Helper Functions ---
+const extractText = (nodes) => {
+ let text = '';
+ if (!nodes) return text;
+ for (const node of nodes) {
+ if (node.type === 'text') {
+ text += node.value;
+ } else if (node.children && Array.isArray(node.children)) {
+ text += extractText(node.children);
+ }
+ }
+ return text.trim();
+};
+
+// --- Main Plugin Function ---
+const plugin = (options) => {
+ const transformer = (tree, file) => {
+
+ // Target JSX elements in the AST
+ visit(tree, 'mdxJsxFlowElement', (node, index, parent) => {
+ // Look specifically for the tag used in the markdown file
+ if (node.name === 'VerticalStepper') {
+ try {
+ // --- 1. Parse Attributes ---
+ const jsxAttributes = node.attributes || [];
+ let type = "numbered"; // Default type
+ let isExpanded = true;
+
+ // Extract attributes
+ jsxAttributes.forEach(attr => {
+ if (attr.type === 'mdxJsxAttribute') {
+ if (attr.name === 'type' && typeof attr.value === 'string') {
+ type = attr.value;
+ }
+ }
+ });
+
+ // --- 2. Process Children to Build Steps Data ---
+ const stepsData = [];
+ let currentStepContent = [];
+ let currentStepLabel = null;
+ let currentStepId = null;
+ let currentAnchorId = null;
+
+ const finalizeStep = () => {
+ if (currentStepLabel) {
+ stepsData.push({
+ id: currentStepId, // step-X ID
+ label: currentStepLabel, // Plain text label
+ anchorId: currentAnchorId,
+ content: [...currentStepContent],
+ });
+ }
+ currentStepContent = [];
+ currentStepLabel = null; // Reset label
+ };
+
+ if (node.children && node.children.length > 0) {
+ node.children.forEach((child) => {
+ if (child.type === 'heading' && child.depth === 2) {
+ finalizeStep(); // Finalize the previous step first
+ currentStepLabel = extractText(child.children);
+ currentAnchorId = child.data.hProperties.id;
+ currentStepId = `step-${stepsData.length + 1}`; // Generate step-X ID
+ currentStepContent.push(child); // We need the header otherwise onBrokenAnchors fails
+ } else if (currentStepLabel) {
+ // Only collect content nodes *after* a heading has defined a step
+ currentStepContent.push(child);
+ }
+ });
+ }
+ finalizeStep(); // Finalize the last step found
+
+ // --- 3. Transform Parent Node ---
+ // Transforms to to match src/theme/MDXComponents.js
+ node.name = 'Stepper';
+ node.children = []; // Clear original children
+
+ // Set attributes
+ node.attributes = [
+ { type: 'mdxJsxAttribute', name: 'type', value: type },
+ ];
+ if (isExpanded) {
+ node.attributes.push({
+ type: 'mdxJsxAttribute',
+ name: 'expanded', // Pass 'expanded' prop to React component
+ value: 'true'
+ });
+ }
+
+ // --- 4. Generate Child Nodes ---
+ stepsData.forEach(step => {
+ // Basic attributes for Step
+ const stepAttributes = [
+ { type: 'mdxJsxAttribute', name: 'id', value: step.id }, // step-X
+ { type: 'mdxJsxAttribute', name: 'label', value: step.label }, // Plain text
+ ];
+
+ // Add forceExpanded attribute if parent was expanded
+ // (Matches React prop name used before anchor logic)
+ if (isExpanded) {
+ stepAttributes.push({
+ type: 'mdxJsxAttribute',
+ name: 'forceExpanded',
+ value: 'true'
+ });
+ }
+
+ // Push the Step node
+ node.children.push({
+ type: 'mdxJsxFlowElement',
+ name: 'Step', // Output Step tag
+ attributes: stepAttributes,
+ children: [...step.content], // Pass content nodes as children
+ });
+ });
+ } catch (error) {
+ const filePath = file?.path || 'unknown file';
+ // Added error logging
+ console.error(`Error processing in ${filePath}:`, error);
+ }
+ }
+ });
+ };
+ return transformer;
+};
+
+module.exports = plugin;
diff --git a/src/components/Stepper/Stepper.tsx b/src/components/Stepper/Stepper.tsx
new file mode 100644
index 00000000000..245c78e86f6
--- /dev/null
+++ b/src/components/Stepper/Stepper.tsx
@@ -0,0 +1,129 @@
+import React from 'react';
+import { VerticalStepper as OriginalVerticalStepper } from '@clickhouse/click-ui/bundled';
+
+// --- Step Component ---
+interface StepProps {
+ children?: React.ReactNode;
+ id?: string; // step-X ID
+ label?: React.ReactNode;
+ forceExpanded?: string; // From parent 'expanded' state
+ isFirstStep?: boolean; // Prop calculated by parent
+ [key: string]: any;
+}
+
+const Step = ({
+ children,
+ id,
+ label,
+ forceExpanded,
+ isFirstStep = false,
+ ...restProps
+ }: StepProps) => {
+
+ // Determine 'active' status based on props passed from parent
+ const shouldBeActive = isFirstStep || forceExpanded === 'true';
+ const status: 'active' | 'complete' | 'incomplete' = shouldBeActive ? 'active' : 'incomplete';
+
+ // Let underlying component handle expansion based on status='active'
+ const collapsed = true;
+
+ // Swap out the Click-UI Stepper label for the H2 header
+ React.useEffect(() => {
+ try {
+ const button = document.querySelectorAll(`button[id^=${id}]`)[0];
+ const divChildren = Array.from(button.children).filter(el => el.tagName === 'DIV');
+ const label = divChildren[1];
+ const content = button.nextElementSibling;
+ const header = content.querySelectorAll('h2')[0]
+ header.style.margin = '0';
+ button.append(header)
+ label.remove()
+ } catch (e) {
+ console.log('Error occurred in Stepper.tsx while swapping H2 for Click-UI label')
+ }
+ }, [id]);
+
+ // Filter out props specific to this wrapper logic
+ const {
+ forceExpanded: _,
+ isFirstStep: __,
+ ...domSafeProps // Pass the rest to the underlying component
+ } = restProps;
+
+ return (
+
+ {children}
+
+ );
+};
+
+// --- Main VerticalStepper Component ---
+interface StepperProps {
+ children?: React.ReactNode;
+ type?: 'numbered' | 'bulleted';
+ className?: string;
+ expanded?: string; // Corresponds to allExpanded in MDX
+ [key: string]: any;
+}
+
+// Using VerticalStepper name based on MDXComponents.js
+const VStepper = ({
+ children,
+ type = 'numbered',
+ className,
+ expanded, // 'true' if allExpanded was set
+ ...props
+ }: StepperProps) => {
+
+ // Determine if all steps should be expanded from the start
+ const isExpandedMode = expanded === 'true';
+
+ // Get children and filter out non-elements
+ const childSteps = React.Children.toArray(children)
+ .filter(child => React.isValidElement(child));
+
+ // Extract step-X IDs (used for keys)
+ const stepIds = childSteps.map((child, index) => {
+ const childElement = child as React.ReactElement;
+ return childElement.props.id || `step-${index + 1}`;
+ });
+
+ // Prepare children, passing down calculated state
+ const enhancedChildren = childSteps.map((child, index) => {
+ const childElement = child as React.ReactElement;
+ const stepId = childElement.props.id || `step-${index + 1}`;
+ const isFirstStep = index === 0; // Is this the first step?
+
+ return React.cloneElement(childElement, {
+ key: stepId,
+ id: stepId,
+ isFirstStep, // Pass down flag for first step logic
+ forceExpanded: isExpandedMode ? 'true' : undefined // Pass down expanded mode
+ });
+ });
+
+ // Filter out custom props before passing to underlying component
+ const { expanded: _, ...domProps } = props;
+
+ return (
+
+ {enhancedChildren}
+
+ );
+};
+
+// Attach the Step component
+VStepper.Step = Step;
+
+// Export the main component
+export default VStepper;
diff --git a/src/css/default.scss b/src/css/default.scss
index b2342d46493..f97073a9f4e 100644
--- a/src/css/default.scss
+++ b/src/css/default.scss
@@ -1,7 +1,7 @@
/* You can override the default Infima variables here. */
:root {
- --default-font: 14px;
- --default-line-height: 21px;
+ --default-font: 16px;
+ --default-line-height: 28px;
/* colors for right side table of contents */
--ifm-toc-link-color: var(--click-color-text-muted);
diff --git a/src/theme/MDXComponents.js b/src/theme/MDXComponents.js
new file mode 100644
index 00000000000..626feb75980
--- /dev/null
+++ b/src/theme/MDXComponents.js
@@ -0,0 +1,18 @@
+// src/theme/MDXComponents.js
+import React from 'react';
+import MDXComponents from '@theme-original/MDXComponents';
+
+// Import the custom Stepper component
+// Make sure the path matches your project structure
+import VStepper from '@site/src/components/Stepper/Stepper';
+
+// Define the enhanced components
+const enhancedComponents = {
+ ...MDXComponents,
+
+ // Map to the components expected from the remark plugin
+ Stepper: VStepper,
+ Step: VStepper.Step,
+};
+
+export default enhancedComponents;
\ No newline at end of file