目前仅支持
Yarn-Per-Job
模式,即一个sql执行一个yarn容器。
<dependency>
<groupId>com.isxcode.star</groupId>
<artifactId>star-client</artifactId>
<version>1.2.0</version>
</dependency>
star:
check-servers: true
servers:
default:
host: isxcode
port: 30155
key: acorn-key
package com.isxcode.star.demo.controller;
import com.isxcode.star.api.pojo.StarResponse;
import com.isxcode.star.api.pojo.dto.YarnJobConfig;
import com.isxcode.star.client.template.StarTemplate;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.web.bind.annotation.*;
import java.util.HashMap;
import java.util.Map;
@RestController
@RequestMapping
@RequiredArgsConstructor
@Slf4j
public class ExampleController {
private final StarTemplate starTemplate;
@GetMapping("/execute")
public StarResponse execute(@RequestParam String sql) {
// 配置spark运行的环境和资源
Map<String, String> sparkConfig = new HashMap<>();
sparkConfig.put("spark.executor.memory", "2g");
sparkConfig.put("spark.driver.memory", "1g");
sparkConfig.put("hive.metastore.uris", "thrift://localhost:9083");
return starTemplate.build()
.sql(sql)
.sparkConfig(sparkConfig)
.execute();
}
@GetMapping("/getData")
public StarResponse getData(@RequestParam String applicationId) {
return starTemplate.build()
.applicationId(applicationId)
.getData();
}
}
{
"code": "200",
"msg": "提交成功",
"data": {
"applicationId": "application_1671005804173_0001"
}
}
{
"code": "200",
"msg": "获取数据成功",
"data": {
"columnNames": [
"username",
"age",
"birth"
],
"dataList": [
[
"ispong",
"18",
"2020-12-12"
]
]
}
}
mvn clean package -DskipTests
docker build -t isxcode/spark-star:test .
docker run -d isxcode/spark-star:test
Thanks for free JetBrains Open Source license