-
Notifications
You must be signed in to change notification settings - Fork 26
/
Makefile
128 lines (94 loc) · 3.79 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
SHELL = /bin/bash
VERSION = $(shell cat version.txt)
SCALA_TARGET_VERSION=2.11
CASSANDRA_VERSION ?= 3.11.13
PYTHON=python3.7
PIP=pip3
all:;: '$(CASSANDRA_VERSION)'
.PHONY: clean clean-pyc clean-dist dist test-travis
clean: clean-dist clean-pyc
clean-pyc:
find . -name '*.pyc' -exec rm -f {} +
find . -name '*.pyo' -exec rm -f {} +
find . -name '*~' -exec rm -f {} +
find . -name '__pycache__' -exec rm -fr {} +
clean-dist:
rm -rf target
rm -rf python/build/
rm -rf python/*.egg-info
rm -rf .tox
rm -rf .ccm
rm -rf venv
install-venv:
test -d venv || virtualenv venv --python=$(PYTHON)
. venv/bin/activate
venv/bin/$(PIP) install -r python/requirements.txt
venv/bin/$(PIP) install -r python/requirements_dev.txt
install-cassandra-driver: install-venv
venv/bin/$(PIP) install cassandra-driver
install-ccm: install-venv
venv/bin/$(PIP) install ccm
start-cassandra: install-ccm
mkdir -p .ccm
venv/bin/ccm status || venv/bin/ccm create pyspark_cassandra_test -v $(CASSANDRA_VERSION) -n 1 -s && venv/bin/ccm start
stop-cassandra:
venv/bin/ccm remove
test: test-python test-scala test-integration
test-python:
test-scala:
test-integration: test-integration-setup test-integration-matrix test-integration-teardown
test-integration-setup: start-cassandra
test-integration-teardown: stop-cassandra
test-integration-matrix: \
install-cassandra-driver \
test-integration-spark-2.4.8
test-travis: install-cassandra-driver
$(call test-integration-for-version,$$SPARK_VERSION,$$SPARK_PACKAGE_TYPE)
test-integration-spark-2.4.8:
$(call test-integration-for-version,2.4.8,hadoop2.7)
define test-integration-for-version
echo ======================================================================
echo testing integration with spark-$1
mkdir -p lib && test -d lib/spark-$1-bin-$2 || \
(pushd lib && curl https://archive.apache.org/dist/spark/spark-$1/spark-$1-bin-$2.tgz | tar xz && popd)
cp log4j.properties lib/spark-$1-bin-$2/conf/
source venv/bin/activate ; \
lib/spark-$1-bin-$2/bin/spark-submit \
--master local[*] \
--driver-memory 512m \
--conf spark.cassandra.connection.host="localhost" \
--jars target/scala-$(SCALA_TARGET_VERSION)/pyspark-cassandra-assembly-$(VERSION).jar \
--py-files target/scala-$(SCALA_TARGET_VERSION)/pyspark-cassandra-assembly-$(VERSION).jar \
python/tests.py
echo ======================================================================
endef
dist: clean-pyc
sbt -batch assembly
cd python ; \
find . -mindepth 2 -name '*.py' -print | \
zip ../target/scala-$(SCALA_TARGET_VERSION)/pyspark-cassandra-assembly-$(VERSION).jar -@
all: clean lint dist
publish: clean
# use spark packages to create the distribution
sbt -batch spDist
# push the python source files into the jar
cd python ; \
find . -mindepth 2 -name '*.py' -print | \
zip ../target/scala-$(SCALA_TARGET_VERSION)/pyspark-cassandra_$(SCALA_TARGET_VERSION)-$(VERSION).jar -@
# copy it to the right name, and update the jar in the zip
cp target/scala-$(SCALA_TARGET_VERSION)/pyspark-cassandra{_$(SCALA_TARGET_VERSION),}-$(VERSION).jar
cd target/scala-$(SCALA_TARGET_VERSION) ;\
zip ../pyspark-cassandra-$(VERSION).zip pyspark-cassandra-$(VERSION).jar
# send the package to spark-packages
spark-package publish -c ".sp-creds.txt" -n "anguenot/pyspark-cassandra" -v "$(VERSION)" -f . -z target/pyspark-cassandra-$(VERSION).zip
lint: python-tox scala-style
python-tox: ## check style with flake8
tox
scala-style: ## check style with scalastyle
sbt -batch scalastyle
release-staging: clean ## package and upload a release to staging PyPi
cd python && $(PYTHON) setup.py sdist bdist_wheel
cd python && twine upload dist/* -r staging
release-prod: clean ## package and upload a release to prod PyPi
cd python && $(PYTHON) setup.py sdist bdist_wheel
cd python && twine upload dist/* -r prod