diff --git a/menu/changelogs.json b/menu/changelogs.json
index c7c95cd1fd..452f221d2e 100644
--- a/menu/changelogs.json
+++ b/menu/changelogs.json
@@ -150,8 +150,8 @@
"label": "Data Warehouse for ClickHouse®"
},
{
- "category": "data-lab",
- "label": "Data Lab for Apache Spark™"
+ "category": "apache-spark",
+ "label": "Clusters for Apache Spark™"
},
{
"category": "nats",
diff --git a/menu/filters.json b/menu/filters.json
index 933cf8172a..856d4f9dae 100644
--- a/menu/filters.json
+++ b/menu/filters.json
@@ -115,8 +115,8 @@
{
"items": [
{
- "category": "data-lab",
- "label": "Data Lab for Apache Spark™"
+ "category": "apache-spark",
+ "label": "Clusters for Apache Spark™"
},
{
"category": "nats",
diff --git a/menu/navigation.ts b/menu/navigation.ts
index c127afda26..a0117fbf02 100644
--- a/menu/navigation.ts
+++ b/menu/navigation.ts
@@ -10,7 +10,7 @@ import { clustersForKafkaMenu } from "../pages/clusters-for-kafka/menu"
import { cockpitMenu } from "../pages/cockpit/menu"
import { containerRegistryMenu } from "../pages/container-registry/menu"
import { cpanelHostingMenu } from "../pages/cpanel-hosting/menu"
-import { dataLabMenu } from "../pages/data-lab/menu"
+import { dataLabMenu } from "../pages/apache-spark/menu"
import { dataOrchestratorMenu } from "../pages/data-orchestrator/menu"
import { dataWarehouseMenu } from "../pages/data-warehouse/menu"
import { dediboxMenu } from "../pages/dedibox/menu"
diff --git a/pages/data-lab/concepts.mdx b/pages/apache-spark/concepts.mdx
similarity index 100%
rename from pages/data-lab/concepts.mdx
rename to pages/apache-spark/concepts.mdx
diff --git a/pages/data-lab/faq.mdx b/pages/apache-spark/faq.mdx
similarity index 95%
rename from pages/data-lab/faq.mdx
rename to pages/apache-spark/faq.mdx
index 2114abba72..a7ef5a416a 100644
--- a/pages/data-lab/faq.mdx
+++ b/pages/apache-spark/faq.mdx
@@ -59,7 +59,7 @@ Yes, you can run your cluster on either CPUs or GPUs. Scaleway leverages Nvidia'
Yes, you can connect a different notebook via Private Networks.
-Refer to the [dedicated documentation](/data-lab/how-to/use-private-networks/) for comprehensive information on how to connect to an Apache Spark™ cluster over Private Networks.
+Refer to the [dedicated documentation](/apache-spark/how-to/use-private-networks/) for comprehensive information on how to connect to an Apache Spark™ cluster over Private Networks.
## Usage and management
diff --git a/pages/data-lab/how-to/access-notebook.mdx b/pages/apache-spark/how-to/access-notebook.mdx
similarity index 88%
rename from pages/data-lab/how-to/access-notebook.mdx
rename to pages/apache-spark/how-to/access-notebook.mdx
index 8b90e1c891..7f5881268c 100644
--- a/pages/data-lab/how-to/access-notebook.mdx
+++ b/pages/apache-spark/how-to/access-notebook.mdx
@@ -11,13 +11,13 @@ import Requirements from '@macros/iam/requirements.mdx'
This page explains how to access and use the notebook environment of your Apache Spark™ cluster using the Scaleway console.
-You can also use your Apache Spark™ cluster using a separate notebook (JupyterLab, Zeppelin, etc.) running on a Scaleway Instance within the same Private Network as your cluster. Refer to the [dedicated documentation](/data-lab/how-to/use-private-networks/) for more information.
+You can also use your Apache Spark™ cluster using a separate notebook (JupyterLab, Zeppelin, etc.) running on a Scaleway Instance within the same Private Network as your cluster. Refer to the [dedicated documentation](/apache-spark/how-to/use-private-networks/) for more information.
- A Scaleway account logged into the [console](https://console.scaleway.com)
- [Owner](/iam/concepts/#owner) status or [IAM permissions](/iam/concepts/#permission) allowing you to perform actions in the intended Organization
-- Created an [Apache Spark™ cluster](/data-lab/how-to/create-data-lab/) with a notebook
+- Created an [Apache Spark™ cluster](/apache-spark/how-to/create-spark-cluster/) with a notebook
- Created an [IAM API key](/iam/how-to/create-api-keys/)
## How to access the notebook of your cluster
diff --git a/pages/data-lab/how-to/access-spark-ui.mdx b/pages/apache-spark/how-to/access-spark-ui.mdx
similarity index 94%
rename from pages/data-lab/how-to/access-spark-ui.mdx
rename to pages/apache-spark/how-to/access-spark-ui.mdx
index 4d6b2e05cf..7cb17b1346 100644
--- a/pages/data-lab/how-to/access-spark-ui.mdx
+++ b/pages/apache-spark/how-to/access-spark-ui.mdx
@@ -15,7 +15,7 @@ This page explains how to access the Apache Spark™ UI of your Apache Spark™
- A Scaleway account logged into the [console](https://console.scaleway.com)
- [Owner](/iam/concepts/#owner) status or [IAM permissions](/iam/concepts/#permission) allowing you to perform actions in the intended Organization
-- Created an [Apache Spark™ cluster](/data-lab/how-to/create-data-lab/)
+- Created an [Apache Spark™ cluster](/apache-spark/how-to/create-spark-cluster/)
- Created an [IAM API key](/iam/how-to/create-api-keys/)
1. Click **Apache Spark™** under **Data & Analytics** on the side menu. The **Clusters for Apache Spark™** page displays.
diff --git a/pages/data-lab/how-to/create-data-lab.mdx b/pages/apache-spark/how-to/create-spark-cluster.mdx
similarity index 94%
rename from pages/data-lab/how-to/create-data-lab.mdx
rename to pages/apache-spark/how-to/create-spark-cluster.mdx
index 2c7f6bdf3f..43e6c5c162 100644
--- a/pages/data-lab/how-to/create-data-lab.mdx
+++ b/pages/apache-spark/how-to/create-spark-cluster.mdx
@@ -29,7 +29,7 @@ Clusters for Apache Spark™ is a product designed to assist data scientists and
6. Enter the desired number of worker nodes.
-7. Add a [persistent volume](/data-lab/concepts/#persistent-volume) if required, then enter a volume size according to your needs.
+7. Add a [persistent volume](/apache-spark/concepts/#persistent-volume) if required, then enter a volume size according to your needs.
Persistent volume usage depends on your workload, and only the actual usage will be billed, within the limit defined. A minimum of 1 GB is required to run the notebook.
diff --git a/pages/data-lab/how-to/index.mdx b/pages/apache-spark/how-to/index.mdx
similarity index 100%
rename from pages/data-lab/how-to/index.mdx
rename to pages/apache-spark/how-to/index.mdx
diff --git a/pages/data-lab/how-to/manage-delete-data-lab.mdx b/pages/apache-spark/how-to/manage-delete-spark-cluster.mdx
similarity index 87%
rename from pages/data-lab/how-to/manage-delete-data-lab.mdx
rename to pages/apache-spark/how-to/manage-delete-spark-cluster.mdx
index 251f997fc9..9bee550101 100644
--- a/pages/data-lab/how-to/manage-delete-data-lab.mdx
+++ b/pages/apache-spark/how-to/manage-delete-spark-cluster.mdx
@@ -14,7 +14,7 @@ This page explains how to manage and delete your Apache Spark™ cluster.
- A Scaleway account logged into the [console](https://console.scaleway.com)
- [Owner](/iam/concepts/#owner) status or [IAM permissions](/iam/concepts/#permission) allowing you to perform actions in the intended Organization
-- Created an [Apache Spark™ cluster](/data-lab/how-to/create-data-lab/)
+- Created an [Apache Spark™ cluster](/apache-spark/how-to/create-spark-cluster/)
## How to manage an Apache Spark™ cluster
@@ -23,8 +23,8 @@ This page explains how to manage and delete your Apache Spark™ cluster.
2. Click the name of the cluster you want to manage. The **Overview** tab of the cluster displays. From this page, you can:
- Consult the configuration of your cluster.
- View the network information of your cluster.
- - [Access the Apache Spark™ UI](/data-lab/how-to/access-spark-ui/) of your cluster.
- - [Access the notebook environment](/data-lab/how-to/access-notebook/) of your cluster.
+ - [Access the Apache Spark™ UI](/apache-spark/how-to/access-spark-ui/) of your cluster.
+ - [Access the notebook environment](/apache-spark/how-to/access-notebook/) of your cluster.
3. Click the **Settings** tab.
@@ -37,7 +37,7 @@ This page explains how to manage and delete your Apache Spark™ cluster.
- [Delete your cluster](#how-to-delete-an-apache-sparktm-cluster).
-Once you have created a cluster, you cannot edit the node type. You must [create a new cluster](/data-lab/how-to/create-data-lab/) instead.
+Once you have created a cluster, you cannot edit the node type. You must [create a new cluster](/apache-spark/how-to/create-spark-cluster/) instead.
## How to delete an Apache Spark™ cluster
diff --git a/pages/data-lab/how-to/use-private-networks.mdx b/pages/apache-spark/how-to/use-private-networks.mdx
similarity index 95%
rename from pages/data-lab/how-to/use-private-networks.mdx
rename to pages/apache-spark/how-to/use-private-networks.mdx
index 75bc8c10bf..544b522d1e 100644
--- a/pages/data-lab/how-to/use-private-networks.mdx
+++ b/pages/apache-spark/how-to/use-private-networks.mdx
@@ -11,7 +11,7 @@ import Requirements from '@macros/iam/requirements.mdx'
[Private Networks](/vpc/concepts/#private-networks) allow your Clusters for Apache Spark™ cluster to communicate in an isolated and secure network without needing to be connected to the public internet.
-At the moment, Apache Spark™ clusters can only be attached to a Private Network [during their creation](/data-lab/how-to/create-data-lab/), and cannot be detached and reattached to another Private Network afterward.
+At the moment, Apache Spark™ clusters can only be attached to a Private Network [during their creation](/apache-spark/how-to/create-spark-cluster/), and cannot be detached and reattached to another Private Network afterward.
For full information about Scaleway Private Networks and VPC, see our [dedicated documentation](/vpc/) and [best practices guide](/vpc/reference-content/getting-most-private-networks/).
@@ -220,6 +220,6 @@ Your notebook hosted on an Instance is ready to be used over Private Networks.
- `` can be found in the **Private Networks** tab of your Instance. Make sure to only copy the IP, and not the `/22` part.
-8. [Access the Apache Spark™ UI](/data-lab/how-to/access-spark-ui/) of your cluster. The list of completed applications displays. From here, you can inspect the jobs previously started using `spark-submit`.
+8. [Access the Apache Spark™ UI](/apache-spark/how-to/access-spark-ui/) of your cluster. The list of completed applications displays. From here, you can inspect the jobs previously started using `spark-submit`.
You successfully run workloads on your cluster from an Instance over a Private Network.
\ No newline at end of file
diff --git a/pages/data-lab/index.mdx b/pages/apache-spark/index.mdx
similarity index 87%
rename from pages/data-lab/index.mdx
rename to pages/apache-spark/index.mdx
index c921dd76f4..f91f2d4656 100644
--- a/pages/data-lab/index.mdx
+++ b/pages/apache-spark/index.mdx
@@ -7,7 +7,7 @@ description: Dive into Scaleway Clusters for Apache Spark™ with our quickstart
productName="Clusters for Apache Spark™"
productLogo="dataLab"
description="Clusters for Apache Spark™ is designed to assist data scientists and data engineers perform calculations on a remotely managed Apache Spark infrastructure."
- url="/data-lab/quickstart"
+ url="/apache-spark/quickstart"
label="Clusters for Apache Spark™ Quickstart"
/>
@@ -19,27 +19,27 @@ description: Dive into Scaleway Clusters for Apache Spark™ with our quickstart
icon="rocket"
description="Learn how to create, use, manage, and delete an Apache Spark™ cluster in a few steps."
label="View Quickstart"
- url="/data-lab/quickstart/"
+ url="/apache-spark/quickstart/"
/>
## Changelog
diff --git a/pages/data-lab/menu.ts b/pages/apache-spark/menu.ts
similarity index 86%
rename from pages/data-lab/menu.ts
rename to pages/apache-spark/menu.ts
index f6b3e3cfde..b8b51b94e8 100644
--- a/pages/data-lab/menu.ts
+++ b/pages/apache-spark/menu.ts
@@ -2,7 +2,7 @@ export const dataLabMenu = {
items: [
{
label: 'Overview',
- slug: '../data-lab',
+ slug: '../apache-spark',
},
{
label: 'Concepts',
@@ -20,7 +20,7 @@ export const dataLabMenu = {
items: [
{
label: 'Create a Spark™ cluster',
- slug: 'create-data-lab',
+ slug: 'create-spark-cluster',
},
{
label: 'Access the notebook',
@@ -36,7 +36,7 @@ export const dataLabMenu = {
},
{
label: 'Manage and delete a cluster',
- slug: 'manage-delete-data-lab',
+ slug: 'manage-delete-spark-cluster',
},
],
label: 'How to',
@@ -48,5 +48,5 @@ export const dataLabMenu = {
},
],
label: 'Clusters for Apache Spark™',
- slug: 'data-lab',
+ slug: 'apache-spark',
}
diff --git a/pages/data-lab/quickstart.mdx b/pages/apache-spark/quickstart.mdx
similarity index 96%
rename from pages/data-lab/quickstart.mdx
rename to pages/apache-spark/quickstart.mdx
index b7f658b306..ebd78ec029 100644
--- a/pages/data-lab/quickstart.mdx
+++ b/pages/apache-spark/quickstart.mdx
@@ -46,7 +46,7 @@ This documentation explains how to quickly create an Apache Spark™ cluster, ac
Once the cluster is created, you are directed to its **Overview** page.
-Refer to the [dedicated documentation](/data-lab/how-to/create-data-lab/) for detailed information on how to create a cluster.
+Refer to the [dedicated documentation](/apache-spark/how-to/create-spark-cluster/) for detailed information on how to create a cluster.
## How to connect to your cluster's notebook
diff --git a/pages/data-lab/troubleshooting/cannot-run-data-lab.mdx b/pages/apache-spark/troubleshooting/cannot-run-spark-cluster.mdx
similarity index 81%
rename from pages/data-lab/troubleshooting/cannot-run-data-lab.mdx
rename to pages/apache-spark/troubleshooting/cannot-run-spark-cluster.mdx
index 98f203b85c..c4d9a73b10 100644
--- a/pages/data-lab/troubleshooting/cannot-run-data-lab.mdx
+++ b/pages/apache-spark/troubleshooting/cannot-run-spark-cluster.mdx
@@ -13,7 +13,7 @@ import Requirements from '@macros/iam/requirements.mdx'
- A Scaleway account logged into the [console](https://console.scaleway.com)
- [Owner](/iam/concepts/#owner) status or [IAM permissions](/iam/concepts/#permission) allowing you to perform actions in the intended Organization
-- An [Apache Spark™ cluster](/data-lab/how-to/create-data-lab/)
+- An [Apache Spark™ cluster](/apache-spark/how-to/create-spark-cluster/)
## Timeout errors
@@ -27,7 +27,7 @@ The Apache Spark™ cluster has zero worker nodes provisioned and cannot raise a
### Solution
-[Edit your cluster configuration](/data-lab/how-to/manage-delete-data-lab/) by provisioning at least one worker node to be able to run calculations with it.
+[Edit your cluster configuration](/apache-spark/how-to/manage-delete-spark-cluster/) by provisioning at least one worker node to be able to run calculations with it.
You can provision zero worker nodes again to retain you cluster and notebook configurations while minimizing its cost.
diff --git a/pages/data-lab/troubleshooting/index.mdx b/pages/apache-spark/troubleshooting/index.mdx
similarity index 89%
rename from pages/data-lab/troubleshooting/index.mdx
rename to pages/apache-spark/troubleshooting/index.mdx
index 9dfbdfa6c6..c0797af78e 100644
--- a/pages/data-lab/troubleshooting/index.mdx
+++ b/pages/apache-spark/troubleshooting/index.mdx
@@ -19,7 +19,7 @@ productIcon: DistributedDataLabProductIcon
@@ -27,5 +27,5 @@ productIcon: DistributedDataLabProductIcon
## Clusters for Apache Spark™ troubleshooting pages
- - [Troubleshooting Clusters for Apache Spark™ execution issues](/data-lab/troubleshooting/cannot-run-data-lab)
+ - [Troubleshooting Clusters for Apache Spark™ execution issues](/apache-spark/troubleshooting/cannot-run-spark-cluster)