This repository was archived by the owner on Aug 19, 2020. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 35
Expand file tree
/
Copy path5-run-spark-mesos-dockerworker-ipython.sh
More file actions
executable file
·73 lines (54 loc) · 2.24 KB
/
5-run-spark-mesos-dockerworker-ipython.sh
File metadata and controls
executable file
·73 lines (54 loc) · 2.24 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
#!/bin/bash
# import functions
source variables_and_helpers.sh
# set user
__spark_user=$1
if [ "$__spark_user" == "" ]; then
echo "You must provide a user. Usage:"
echo "$0 username mesos://ip:port"
exit 1
fi
# set master
__spark_master=$2
if [ "$__spark_master" == "" ]; then
echo "You must provide a master. Usage:"
echo "$0 username mesos://ip:port"
exit 1
fi
# set docker image
__image=$__image_client_mesos_dockerworker
# update repo and images
#git pull origin master && \
#docker pull $__image # alternatively: ./1-build.sh
# get host DNS server (for internal resolution)
__dns=$(dns_detect)
# set additional spark options
__spark_worker_config="--executor-memory 32G \
--conf spark.executor.memory=32G \
--conf spark.driver.memory=32G \
--conf spark.driver.maxResultSize=8g \
--conf spark.serializer=org.apache.spark.serializer.KryoSerializer \
--conf spark.core.connection.ack.wait.timeout=600 \
--conf spark.worker.cleanup.enabled=true \
--conf spark.shuffle.io.preferDirectBufs=15 \
--conf spark.akka.frameSize=100"
# run container
echo "starting $__image..."
__container=$(docker run -d \
--net="host" \
--publish=8888:8888 \
--env "SPARK_MASTER=$__spark_master" \
--env "SPARK_WORKER_CONFIG=$__spark_worker_config" \
--env "CONTAINER_USER=$__spark_user" \
--env "IPYTHON_OPTS=notebook /ipython" \
--env "LANG=en_US.UTF-8" \
--volume=$__host_dir_hadoop_conf:/etc/hadoop/conf \
--volume=$__host_dir_hive_conf:/etc/hive/conf \
--volume=$__host_dir_ipython_notebook:/ipython \
--volume=$__host_dir_data:/data \
$__image)
#TODO: determine how to forward ports instead of binding --net="host" above
# forward host ports to container
#host_forward_multiple_ports_to_container $__container
# notify user
echo "Visit IPython notebook at http://$__hostname:8888"