blob: 69f0ae3dad5e6be0e72a34a14910de7e342a8acc (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
|
image: ubuntu:xenial
before_script:
- apt update -qy
- apt install -y apt-transport-https software-properties-common
- echo "deb https://dl.bintray.com/sbt/debian /" | tee -a /etc/apt/sources.list.d/sbt.list
- apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 2EE0EA64E40A89B84B2DF73499E82A75642AC823
- add-apt-repository -y ppa:deadsnakes/ppa
- add-apt-repository -y ppa:cran/poppler
- apt update -qy
- apt install -y python3-dev python3-pip python3-wheel libjpeg-dev openjdk-8-jdk-headless sbt libpq-dev python-dev python3.7 python3.7-dev python3.7-venv python3.7-distutils pkg-config python3-pytest git libsnappy-dev libsodium-dev cmake libpoppler-cpp-dev
- pip3 install pipenv
- pipenv --version
variables:
LC_ALL: "C.UTF-8"
LANG: "C.UTF-8"
test_python:
script:
- cd python
- pipenv install --dev --deploy
- pipenv run pytest --cov
test_python_hadoop:
script:
- cd python_hadoop
- pipenv install --dev --deploy
- pipenv run pytest --cov
# needs fixing; some upstream com.hadoop.gplcompression#hadoop-lzo;0.4.16: java.lang.NullPointerException
# change happened
test_scalding:
when: manual
script:
- ./please -h
- cd scalding
- sbt -mem 1024 test
- sbt -mem 1024 assembly
# Needs fixing
test_pig:
when: manual
script:
- ./fetch_hadoop.sh
- cd pig
- pipenv install --dev --deploy
- JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::") pipenv run pytest
|