Skip to content

Commit 91bf57f

Browse files
committed
Code review changes upate
1 parent 912c0fd commit 91bf57f

File tree

1 file changed

+38
-39
lines changed

1 file changed

+38
-39
lines changed

.github/workflows/build.yml

Lines changed: 38 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@ jobs:
1919
with:
2020
nix_path: nixpkgs=./nixpkgs.nix
2121

22-
# - name: Build and test with Bazel
2322
- run: echo "build --host_platform=@rules_haskell//haskell/platforms:linux_x86_64_nixpkgs" > .bazelrc.local
2423
- run: nix-shell --pure --run 'bazel build //apps/hello:sparkle-example-hello_deploy.jar'
2524
- run: nix-shell --pure --run 'bazel build //apps/rdd-ops:sparkle-example-rddops_deploy.jar'
@@ -44,50 +43,50 @@ jobs:
4443
- run: nix-shell --pure --run "bazel run spark-submit-with-data -- --packages io.delta:delta-core_2.11:0.4.0,io.projectglow:glow-spark2_2.11:1.1.2 $(pwd)/bazel-bin/apps/deltalake-glow/sparkle-example-deltalake-glow_deploy.jar"
4544
- run: spark-submit bazel-bin/apps/osthreads/sparkle-example-osthreads_deploy.jar | tee out.txt || grep "Job | pool | start time (s) | end time (s)" out.txt
4645

47-
# build_and_test_with_macos:
48-
# name: Build and Test with MacOs Runner
49-
# runs-on: macos-11
50-
# steps:
46+
build_and_test_with_macos:
47+
name: Build and Test with MacOs Runner
48+
runs-on: macos-11
49+
steps:
5150

52-
# - name: Checkout
53-
# uses: actions/checkout@v2
51+
- name: Checkout
52+
uses: actions/checkout@v2
5453

55-
# - name: Install NixOS
56-
# uses: cachix/install-nix-action@v18
57-
# with:
58-
# nix_path: nixpkgs=./nixpkgs.nix
54+
- name: Install NixOS
55+
uses: cachix/install-nix-action@v18
56+
with:
57+
nix_path: nixpkgs=./nixpkgs.nix
5958

60-
# - name: Install cachix
61-
# uses: cachix/cachix-action@v10
62-
# with:
63-
# name: tweag
59+
- name: Install cachix
60+
uses: cachix/cachix-action@v10
61+
with:
62+
name: tweag
6463

65-
# - name: Run cachix
66-
# run: cachix watch-store tweag &
64+
- name: Run cachix
65+
run: cachix watch-store tweag &
6766

68-
# - name: Configure
69-
# run: mkdir -p ~/repo-cache ~/disk-cache
67+
- name: Configure
68+
run: mkdir -p ~/repo-cache ~/disk-cache
7069

71-
# - name: Prefetch Stackage snapshot
72-
# run: nix-shell --pure --run "cmd='bazel fetch @stackage//... $BAZEL_ARGS'; \$cmd || \$cmd || \$cmd"
70+
- name: Prefetch Stackage snapshot
71+
run: nix-shell --pure --run "cmd='bazel fetch @stackage//... $BAZEL_ARGS'; \$cmd || \$cmd || \$cmd"
7372

74-
# - name: Build all
75-
# run: |
76-
# while true; do echo "."; sleep 60; done &
77-
# nix-shell --pure --run "bazel build //apps/hello:sparkle-example-hello_deploy.jar $BAZEL_ARGS"
78-
# nix-shell --pure --run "bazel build //apps/rdd-ops:sparkle-example-rddops_deploy.jar $BAZEL_ARGS"
79-
# nix-shell --pure --run "bazel build //apps/osthreads:sparkle-example-osthreads_deploy.jar $BAZEL_ARGS"
73+
- name: Build all
74+
run: |
75+
while true; do echo "."; sleep 60; done &
76+
nix-shell --pure --run "bazel build //apps/hello:sparkle-example-hello_deploy.jar $BAZEL_ARGS"
77+
nix-shell --pure --run "bazel build //apps/rdd-ops:sparkle-example-rddops_deploy.jar $BAZEL_ARGS"
78+
nix-shell --pure --run "bazel build //apps/osthreads:sparkle-example-osthreads_deploy.jar $BAZEL_ARGS"
8079
81-
# - name: Install Apache Spark and Hadoop
82-
# run: |
83-
# curl -OL https://repo1.maven.org/maven2/org/slf4j/slf4j-api/${SFL4J_VERSION}/slf4j-api-${SFL4J_VERSION}.jar
84-
# curl -OL https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-without-hadoop.tgz
85-
# tar -xzf spark-${SPARK_VERSION}-bin-without-hadoop.tgz
86-
# curl -OL https://archive.apache.org/dist/hadoop/core/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}.tar.gz
87-
# tar -xzf hadoop-${HADOOP_VERSION}.tar.gz
80+
- name: Install Apache Spark and Hadoop
81+
run: |
82+
curl -OL https://repo1.maven.org/maven2/org/slf4j/slf4j-api/${SFL4J_VERSION}/slf4j-api-${SFL4J_VERSION}.jar
83+
curl -OL https://archive.apache.org/dist/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-without-hadoop.tgz
84+
tar -xzf spark-${SPARK_VERSION}-bin-without-hadoop.tgz
85+
curl -OL https://archive.apache.org/dist/hadoop/core/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}.tar.gz
86+
tar -xzf hadoop-${HADOOP_VERSION}.tar.gz
8887
89-
# - name: Run tests
90-
# run: |
91-
# export SPARK_DIST_CLASSPATH=$PWD/slf4j-api-${SFL4J_VERSION}.jar:$(hadoop-${HADOOP_VERSION}/bin/hadoop classpath)
92-
# export PATH="$PWD/spark-${SPARK_VERSION}-bin-without-hadoop/bin:$PATH"
93-
# spark-submit -v --executor-cores 1 --packages com.amazonaws:aws-java-sdk:1.11.920,org.apache.hadoop:hadoop-aws:2.8.4 bazel-bin/apps/hello/sparkle-example-hello_deploy.jar
88+
- name: Run tests
89+
run: |
90+
export SPARK_DIST_CLASSPATH=$PWD/slf4j-api-${SFL4J_VERSION}.jar:$(hadoop-${HADOOP_VERSION}/bin/hadoop classpath)
91+
export PATH="$PWD/spark-${SPARK_VERSION}-bin-without-hadoop/bin:$PATH"
92+
spark-submit -v --executor-cores 1 --packages com.amazonaws:aws-java-sdk:1.11.920,org.apache.hadoop:hadoop-aws:2.8.4 bazel-bin/apps/hello/sparkle-example-hello_deploy.jar

0 commit comments

Comments
 (0)