Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ protected enum Service {
BLOCK_STORAGE(8776),
METERING(8087),
TELEMETRY(8087),
SAHARA(8386),
SHARE(8786),
OBJECT_STORAGE(8800),
BARBICAN(9311),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
package org.openstack4j.api.sahara;

import org.openstack4j.model.sahara.Cluster;
import org.openstack4j.model.sahara.NodeGroup;
import org.openstack4j.model.sahara.ServiceConfig;

import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;

import org.openstack4j.api.AbstractTest;
import org.openstack4j.api.Builders;
import org.testng.Assert;
import org.testng.annotations.Test;

import okhttp3.mockwebserver.RecordedRequest;

/**
* Test cases for Server based Services
*
* @author Ekasit Kijsipongse
*/
@Test(suiteName="Sahara/Cluster")
public class ClusterTests extends AbstractTest {

private static final String JSON_CLUSTER_CREATE_REQUEST = "/sahara/cluster_create_req.json";
private static final String JSON_CLUSTER_CREATE_RESPONSE = "/sahara/cluster_create_resp.json";

@Test
public void createCluster() throws Exception {
respondWith(JSON_CLUSTER_CREATE_RESPONSE);

// Create a new cluster from node group template
NodeGroup workerGroup = Builders.nodeGroup().name("worker")
.count(2)
.flavor("ef7f4d7f-267f-4762-abe2-729fe350256c")
.nodeGroupTemplateId("f78dda4d-17e1-48cd-ab85-ee56261382ef")
.build();

NodeGroup masterGroup = Builders.nodeGroup().name("master")
.count(1)
.flavor("ef7f4d7f-267f-4762-abe2-729fe350256c")
.nodeGroupTemplateId("0febf422-98ee-47e6-a1c5-60f90f1f9c96")
.build();

ServiceConfig hdfsConf = Builders.serviceConfig()
.set("dfs.replication",1)
.build();
ServiceConfig sparkConf = Builders.serviceConfig()
.set("spark.executor.memory","1g")
.set("spark.executor.cores",2)
.build();

Cluster cluster = Builders.cluster().name("cluster-test-1")
.hadoopVersion("1.6.2")
.pluginName("spark")
.image("f56cc7c5-9588-49fa-8bcd-5c5d5eda5466")
.keypairName("tester")
.managementNetworkId("4c065f9c-ad1b-43c4-ba1e-893d330da079")
.addNodeGroup(workerGroup)
.addNodeGroup(masterGroup)
.addServiceConfig("HDFS",hdfsConf)
.addServiceConfig("Spark",sparkConf)
.build();


cluster = osv2().sahara().clusters().create(cluster);

// Check that the request is the one we expect
RecordedRequest request = server.takeRequest();

String requestBody = request.getBody().readUtf8();
assertTrue(requestBody.contains("\"spark.executor.memory\" : \"1g\""));
assertTrue(requestBody.contains("\"spark.executor.cores\" : 2"));


assertEquals("cluster-test-1", cluster.getName());
assertEquals("1.6.2", cluster.getHadoopVersion());
assertEquals("spark", cluster.getPluginName());
assertEquals(2, cluster.getNodeGroups().size());
assertEquals(2, cluster.getClusterConfigs().size());
assertEquals(1, cluster.getClusterConfigs().get("HDFS").getConfigs().size());
assertEquals(2, cluster.getClusterConfigs().get("Spark").getConfigs().size());
assertEquals(2, cluster.getClusterConfigs().get("Spark").get("spark.executor.cores"));
assertEquals("1g", cluster.getClusterConfigs().get("Spark").get("spark.executor.memory"));

}

@Override
protected Service service() {
return Service.SAHARA;
}



}
15 changes: 14 additions & 1 deletion core-test/src/main/resources/identity/v2/access.json
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,19 @@
"endpoints_links": [],
"type": "workflow",
"name": "mistral"
}, {
"endpoints": [
{
"adminURL": "http:\/\/127.0.0.1:8386\/v2",
"region": "RegionOne",
"internalURL": "http:\/\/127.0.0.1:8386\/v2",
"id": "8707e3735d4415c97ae231b4841eb1d",
"publicURL": "http:\/\/127.0.0.1:8386\/v2"
}
],
"endpoints_links": [],
"type": "data-processing",
"name": "sahara"
}
],
"user": {
Expand All @@ -209,4 +222,4 @@
"roles": ["3e9245e5660344e39328aa68fc9eb6e0", "2cce246750cf4e0da09ada919702e30c", "71713b8306ad4566996286ed8afda89e"]
}
}
}
}
28 changes: 28 additions & 0 deletions core-test/src/main/resources/identity/v3/authv3_project.json
Original file line number Diff line number Diff line change
Expand Up @@ -676,6 +676,34 @@
"type": "workflow",
"id": "2241f6c6096a11e793ae92361f002671",
"name": "mistral"
},
{
"endpoints": [
{
"region_id": "RegionOne",
"url": "http://127.0.0.1:8386",
"region": "RegionOne",
"interface": "public",
"id": "8707e3735d4415c97ae231b4841eb1a"
},
{
"region_id": "RegionOne",
"url": "http://127.0.0.1:8386",
"region": "RegionOne",
"interface": "admin",
"id": "8707e3735d4415c97ae231b4841eb1b"
},
{
"region_id": "RegionOne",
"url": "http://127.0.0.1:8386",
"region": "RegionOne",
"interface": "internal",
"id": "8707e3735d4415c97ae231b4841eb1c"
}
],
"type": "data-processing",
"id": "8707e3735d4415c97ae231b4841eb1d",
"name": "sahara"
}
],
"extras": {},
Expand Down
29 changes: 29 additions & 0 deletions core-test/src/main/resources/sahara/cluster_create_req.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
{
"name" : "cluster-test-1",
"cluster_configs" : {
"HDFS" : {
"dfs.replication" : 1
},
"Spark" : {
"spark.executor.memory" : "1g",
"spark.executor.cores" : 2
}
},
"default_image_id" : "f56cc7c5-9588-49fa-8bcd-5c5d5eda5466",
"user_keypair_id" : "tester",
"plugin_name" : "spark",
"neutron_management_network" : "4c065f9c-ad1b-43c4-ba1e-893d330da079",
"node_groups" : [ {
"name" : "worker",
"count" : 2,
"node_group_template_id" : "f78dda4d-17e1-48cd-ab85-ee56261382ef",
"flavor_id" : "ef7f4d7f-267f-4762-abe2-729fe350256c"
}, {
"name" : "master",
"count" : 1,
"node_group_template_id" : "0febf422-98ee-47e6-a1c5-60f90f1f9c96",
"flavor_id" : "ef7f4d7f-267f-4762-abe2-729fe350256c"
} ],
"hadoop_version" : "1.6.2"
}

106 changes: 106 additions & 0 deletions core-test/src/main/resources/sahara/cluster_create_resp.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
{
"cluster": {
"provision_progress": [],
"user_keypair_id": "tester",
"updated_at": "2017-08-04T08:33:44",
"is_transient": false,
"use_autoconfig": true,
"node_groups": [
{
"volume_local_to_instance": false,
"availability_zone": "nova",
"updated_at": null,
"instances": [],
"node_group_template_id": "f78dda4d-17e1-48cd-ab85-ee56261382ef",
"volumes_per_node": 0,
"id": "36f4ecc7-e098-4fbc-826b-e645a4e0561c",
"security_groups": [
"0a3f7e8d-3769-45ac-ac74-89efaa179404"
],
"shares": [],
"node_configs": {
"HDFS": {}
},
"auto_security_group": false,
"volumes_availability_zone": null,
"volume_mount_prefix": "/volumes/disk",
"floating_ip_pool": "7c6358b6-11da-4da5-9ae6-cb42dd51f483",
"image_id": "f56cc7c5-9588-49fa-8bcd-5c5d5eda5466",
"volumes_size": null,
"is_proxy_gateway": false,
"count": 2,
"name": "worker",
"created_at": "2017-08-04T08:33:43",
"volume_type": null,
"node_processes": [
"datanode",
"slave"
],
"flavor_id": "ef7f4d7f-267f-4762-abe2-729fe350256c",
"use_autoconfig": true
},
{
"volume_local_to_instance": false,
"availability_zone": "nova",
"updated_at": null,
"instances": [],
"node_group_template_id": "0febf422-98ee-47e6-a1c5-60f90f1f9c96",
"volumes_per_node": 0,
"id": "d4a382ca-6c9c-48ef-84c2-c9ad3360cd81",
"security_groups": [
"0a3f7e8d-3769-45ac-ac74-89efaa179404"
],
"shares": [],
"node_configs": {
"HDFS": {}
},
"auto_security_group": false,
"volumes_availability_zone": null,
"volume_mount_prefix": "/volumes/disk",
"floating_ip_pool": "7c6358b6-11da-4da5-9ae6-cb42dd51f483",
"image_id": "f56cc7c5-9588-49fa-8bcd-5c5d5eda5466",
"volumes_size": null,
"is_proxy_gateway": false,
"count": 1,
"name": "master",
"created_at": "2017-08-04T08:33:43",
"volume_type": null,
"node_processes": [
"namenode",
"master"
],
"flavor_id": "ef7f4d7f-267f-4762-abe2-729fe350256c",
"use_autoconfig": true
}
],
"management_public_key": "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDaOGD3eWnT48rv3MlnF5D9z8B0Xw76IjlSt2fijRKUnlB3EAdMsi8QutO2Z+uPJXtZZsxfBvKabCpuSh5C6NWq2cjvEcb+HisXcRD5LDfJ7KUkNJRqJcHMiLMZv3WSfUSn37HXhPW09vz72Td8e0bkr7kbcbO5goc/hMoszP/Q5Yt62I6j6tgPBTr4mZuSXTlhIxg27RIvrHueWUhKL4iT3yCBgk+QU9PNRFDzHPg5ZLCAoQerOkj7yuN0iwWUvXx5McLQiLI5bB3th1Ov65rsLw+Me/WrTLHmbY2AK24gTQ8XtuLbKei84WGMkmZBQ15B27kC0uoos8EeiZcvO9cb Generated-by-Sahara\n",
"id": "c7ce1662-0ee7-4307-a7ed-a77fa1b0e667",
"trust_id": null,
"cluster_configs": {
"HDFS": {
"dfs.replication": 1
},
"Spark": {
"spark.executor.memory": "1g",
"spark.executor.cores": 2
}
},
"default_image_id": "f56cc7c5-9588-49fa-8bcd-5c5d5eda5466",
"domain_name": null,
"shares": null,
"status": "Validating",
"neutron_management_network": "4c065f9c-ad1b-43c4-ba1e-893d330da079",
"description": null,
"plugin_name": "spark",
"anti_affinity": [],
"is_public": false,
"status_description": "",
"hadoop_version": "1.6.2",
"info": {},
"cluster_template_id": null,
"name": "cluster-test-1",
"tenant_id": "31f13106bac54c1992077cca18f02c6e",
"created_at": "2017-08-04T08:33:43",
"is_protected": false
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@ public interface ServiceConfig extends ModelEntity, Buildable<ServiceConfigBuild
* @param name the name of the parameter
* @return the value of the parameter
*/
String get(String name);
Object get(String name);

/**
* @return map of all configurations or null
*/
Map<String, String> getConfigs();
Map<String, Object> getConfigs();

}
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,6 @@ public interface ServiceConfigBuilder extends Builder<ServiceConfigBuilder, Serv
* @param value value of the parameter
* @return ServiceConfigBuilder
*/
ServiceConfigBuilder set(String name, String value);
ServiceConfigBuilder set(String name, Object value);

}
Original file line number Diff line number Diff line change
Expand Up @@ -14,23 +14,23 @@
*/

@JsonIgnoreProperties(ignoreUnknown=true)
public class SaharaServiceConfig extends HashMap<String,String> implements ServiceConfig {
public class SaharaServiceConfig extends HashMap<String,Object> implements ServiceConfig {

public static final long serialVersionUID = 1L;

/**
* {@inheritDoc}
*/
@Override
public String get(String name) {
public Object get(String name) {
return super.get(name);
}

/**
* {@inheritDoc}
*/
@Override
public Map<String,String> getConfigs() {
public Map<String,Object> getConfigs() {
return this;
}

Expand Down Expand Up @@ -63,7 +63,7 @@ public static class ConcreteServiceConfigBuilder implements ServiceConfigBuilder
}

@Override
public ServiceConfigBuilder set(String name, String value) {
public ServiceConfigBuilder set(String name, Object value) {
m.put(name,value);
return this;
}
Expand Down