티스토리 뷰
728x90
function requestUtils(method, url, payload) {
var xhr = new XMLHttpRequest();
xhr.open(method, url, true);
xhr.withCredentials = true;
xhr.setRequestHeader("Content-Type", "application/json");
xhr.onreadystatechange = function() {
if (this.readyState === XMLHttpRequest.DONE && this.status === 200) {
console.log(this);
console.log(url, payload);
}
}
xhr.send(payload);
}
var NAME = "spark-seunggabi"
var STAGINGDIR = "s3://log/hive-staging/"
//var METHOD = "POST"
//var URL = "/api/interpreter/setting/"
var METHOD = "PUT"
var URL = "/api/interpreter/setting/" + NAME
var PAYLOAD = {"name":NAME,"group":"spark","option":{"remote":true,"port":-1,"perNote":"shared","perUser":"shared","isExistingProcess":false,"setPermission":false,"owners":[],"isUserImpersonate":false},"properties":{"spark.master":{"name":"spark.master","value":"yarn","type":"string","description":"Spark master uri. local | yarn-client | yarn-cluster | spark master address of standalone mode, ex) spark://master_host:7077"},"spark.submit.deployMode":{"name":"spark.submit.deployMode","value":"cluster","type":"string","description":"The deploy mode of Spark driver program, either \"client\" or \"cluster\", Which means to launch driver program locally (\"client\") or remotely (\"cluster\") on one of the nodes inside the cluster."},"spark.app.name":{"name":"spark.app.name","value":NAME,"type":"string","description":"The name of spark application."},"spark.driver.cores":{"name":"spark.driver.cores","value":"5","type":"number","description":"Number of cores to use for the driver process, only in cluster mode."},"spark.driver.memory":{"name":"spark.driver.memory","value":"10g","type":"string","description":"Amount of memory to use for the driver process, i.e. where SparkContext is initialized, in the same format as JVM memory strings with a size unit suffix (\"k\", \"m\", \"g\" or \"t\") (e.g. 512m, 2g)."},"spark.executor.cores":{"name":"spark.executor.cores","value":"5","type":"number","description":"The number of cores to use on each executor"},"spark.executor.memory":{"name":"spark.executor.memory","value":"10g","type":"string","description":"Executor memory per worker instance. ex) 512m, 32g"},"zeppelin.spark.useHiveContext":{"name":"zeppelin.spark.useHiveContext","value":true,"type":"checkbox","description":"Use HiveContext instead of SQLContext if it is true. Enable hive for SparkSession."},"zeppelin.spark.run.asLoginUser":{"name":"zeppelin.spark.run.asLoginUser","value":true,"type":"checkbox","description":"Whether run spark job as the zeppelin login user, it is only applied when running spark job in hadoop yarn cluster and shiro is enabled"},"zeppelin.spark.printREPLOutput":{"name":"zeppelin.spark.printREPLOutput","value":true,"type":"checkbox","description":"Print REPL output"},"zeppelin.spark.maxResult":{"name":"zeppelin.spark.maxResult","value":"100000","type":"number","description":"Max number of result to display."},"zeppelin.spark.enableSupportedVersionCheck":{"name":"zeppelin.spark.enableSupportedVersionCheck","value":true,"type":"checkbox","description":"Whether checking supported spark version. Developer only setting, not for production use"},"zeppelin.spark.ui.hidden":{"name":"zeppelin.spark.ui.hidden","value":false,"type":"checkbox","description":"Whether hide spark ui in zeppelin ui"},"spark.webui.yarn.useProxy":{"name":"spark.webui.yarn.useProxy","value":false,"type":"checkbox","description":"whether use yarn proxy url as spark weburl, e.g. http://localhost:8088/proxy/application_1583396598068_0004"},"zeppelin.spark.scala.color":{"name":"zeppelin.spark.scala.color","value":true,"type":"checkbox","description":"Whether enable color output of spark scala interpreter"},"zeppelin.spark.deprecatedMsg.show":{"name":"zeppelin.spark.deprecatedMsg.show","value":true,"type":"checkbox","description":"Whether show the spark deprecated message, spark 2.2 and before are deprecated. Zeppelin will display warning message by default"},"zeppelin.spark.concurrentSQL":{"name":"zeppelin.spark.concurrentSQL","value":true,"type":"checkbox","description":"Execute multiple SQL concurrently if set true."},"zeppelin.spark.concurrentSQL.max":{"name":"zeppelin.spark.concurrentSQL.max","value":"10","type":"number","description":"Max number of SQL concurrently executed"},"zeppelin.spark.sql.stacktrace":{"name":"zeppelin.spark.sql.stacktrace","value":true,"type":"checkbox","description":"Show full exception stacktrace for SQL queries if set to true."},"zeppelin.spark.sql.interpolation":{"name":"zeppelin.spark.sql.interpolation","value":false,"type":"checkbox","description":"Enable ZeppelinContext variable interpolation into spark sql"},"PYSPARK_PYTHON":{"name":"PYSPARK_PYTHON","value":"python","type":"string","description":"Python binary executable to use for PySpark in both driver and workers (default is python2.7 if available, otherwise python). Property `spark.pyspark.python` take precedence if it is set"},"PYSPARK_DRIVER_PYTHON":{"name":"PYSPARK_DRIVER_PYTHON","value":"python","type":"string","description":"Python binary executable to use for PySpark in driver only (default is `PYSPARK_PYTHON`). Property `spark.pyspark.driver.python` take precedence if it is set"},"zeppelin.pyspark.useIPython":{"name":"zeppelin.pyspark.useIPython","value":true,"type":"checkbox","description":"Whether use IPython when it is available"},"zeppelin.R.knitr":{"name":"zeppelin.R.knitr","value":true,"type":"checkbox","description":"Whether use knitr or not"},"zeppelin.R.cmd":{"name":"zeppelin.R.cmd","value":"R","type":"string","description":"R binary executable path"},"zeppelin.R.image.width":{"name":"zeppelin.R.image.width","value":"100%","type":"number","description":"Image width of R plotting"},"zeppelin.R.render.options":{"name":"zeppelin.R.render.options","value":"out.format = 'html', comment = NA, echo = FALSE, results = 'asis', message = F, warning = F, fig.retina = 2","type":"textarea","description":""},"zeppelin.R.shiny.portRange":{"name":"zeppelin.R.shiny.portRange","value":":","type":"string","description":"Shiny app would launch a web app at some port, this property is to specify the portRange via format '<start>:<end>', e.g. '5000:5001'. By default it is ':' which means any port"},"zeppelin.kotlin.shortenTypes":{"name":"zeppelin.kotlin.shortenTypes","value":true,"type":"checkbox","description":"Show short types instead of full, e.g. List<String> or kotlin.collections.List<kotlin.String>"},"spark.dynamicAllocation.initialExecutors":{"name":"spark.dynamicAllocation.initialExecutors","value":"1","type":"textarea"},"spark.dynamicAllocation.enabled":{"name":"spark.dynamicAllocation.enabled","value":"true","type":"textarea"},"spark.dynamicAllocation.minExecutors":{"name":"spark.dynamicAllocation.minExecutors","value":"1","type":"textarea"},"spark.dynamicAllocation.maxExecutors":{"name":"spark.dynamicAllocation.maxExecutors","value":"300","type":"textarea"},"spark.sql.hive.convertMetastoreOrc":{"name":"spark.sql.hive.convertMetastoreOrc","value":"false","type":"textarea"},"spark.driver.maxResultSize":{"name":"spark.driver.maxResultSize","value":"10g","type":"textarea"},"spark.sql.parquet.mergeSchema":{"name":"spark.sql.parquet.mergeSchema","value":"true","type":"textarea"},"spark.hadoop.hive.exec.stagingdir":{"name":"spark.hadoop.hive.exec.stagingdir","value":STAGINGDIR,"type":"textarea"},"spark.sql.autoBroadcastJoinThreshold":{"name":"spark.sql.autoBroadcastJoinThreshold","value":"-1","type":"textarea"}},"dependencies":[]}
requestUtils(METHOD, URL, JSON.stringify(PAYLOAD))
728x90
'공부' 카테고리의 다른 글
[spark] Pyspark: Serialized task exceeds max allowed. Consider increasing spark.rpc.message.maxSize or using broadcast variables for large values (0) | 2023.02.23 |
---|---|
[python] mecab (형태소 분석기) (0) | 2023.02.23 |
[yarn] cost tool `migration-hadoop-to-emr-tco-simulator` (0) | 2023.02.16 |
[glue] crawler (load s3) (0) | 2023.02.16 |
[yarn] yarn.resourcemanager.am.max-attempts=1 (0) | 2023.02.16 |
댓글
250x250
공지사항
최근에 올라온 글
최근에 달린 댓글
- Total
- Today
- Yesterday
TAG
- 테슬라 레퍼럴 적용 확인
- follower
- 테슬라 리퍼럴 코드
- Bot
- 테슬라 리퍼럴 코드 생성
- 메디파크 내과 전문의 의학박사 김영수
- 레퍼럴
- Kluge
- 테슬라 추천
- 연애학개론
- 어떻게 능력을 보여줄 것인가?
- 테슬라 레퍼럴 코드 확인
- 개리마커스
- wlw
- 김달
- 테슬라 리퍼럴 코드 혜택
- 테슬라 크레딧 사용
- 유투브
- 테슬라
- 인스타그램
- 팔로워 수 세기
- 모델 Y 레퍼럴
- 할인
- 테슬라 레퍼럴
- 클루지
- 책그림
- 모델y
- COUNT
일 | 월 | 화 | 수 | 목 | 금 | 토 |
---|---|---|---|---|---|---|
1 | 2 | 3 | 4 | 5 | ||
6 | 7 | 8 | 9 | 10 | 11 | 12 |
13 | 14 | 15 | 16 | 17 | 18 | 19 |
20 | 21 | 22 | 23 | 24 | 25 | 26 |
27 | 28 | 29 | 30 | 31 |
글 보관함