Skip site navigation (1)Skip section navigation (2)
Date:      Sat, 20 Jun 2020 07:23:09 GMT
From:      pkg-fallout@FreeBSD.org
To:        yuri@FreeBSD.org
Cc:        pkg-fallout@FreeBSD.org
Subject:   [package - 121i386-quarterly][devel/spark] Failed for apache-spark-2.1.1_1 in build
Message-ID:  <202006200723.05K7N8QO035381@beefy4.nyi.freebsd.org>

next in thread | raw e-mail | index | archive | help
You are receiving this mail as a port that you maintain
is failing to build on the FreeBSD package build server.
Please investigate the failure and submit a PR to fix
build.

Maintainer:     yuri@FreeBSD.org
Last committer: antoine@FreeBSD.org
Ident:          $FreeBSD: branches/2020Q2/devel/spark/Makefile 526925 2020-02-23 15:25:53Z antoine $
Log URL:        http://beefy4.nyi.freebsd.org/data/121i386-quarterly/539614/logs/apache-spark-2.1.1_1.log
Build URL:      http://beefy4.nyi.freebsd.org/build.html?mastername=121i386-quarterly&build=539614
Log:

=>> Building devel/spark
build started at Sat Jun 20 07:10:15 UTC 2020
port directory: /usr/ports/devel/spark
package name: apache-spark-2.1.1_1
building for: FreeBSD 121i386-quarterly-job-14 12.1-RELEASE-p6 FreeBSD 12.1-RELEASE-p6 i386
maintained by: yuri@FreeBSD.org
Makefile ident:      $FreeBSD: branches/2020Q2/devel/spark/Makefile 526925 2020-02-23 15:25:53Z antoine $
Poudriere version: 3.2.8-5-gc81843e5
Host OSVERSION: 1300094
Jail OSVERSION: 1201000
Job Id: 14

---Begin Environment---
SHELL=/bin/csh
UNAME_p=i386
UNAME_m=i386
OSVERSION=1201000
UNAME_v=FreeBSD 12.1-RELEASE-p6
UNAME_r=12.1-RELEASE-p6
BLOCKSIZE=K
MAIL=/var/mail/root
STATUS=1
HOME=/root
PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:/root/bin
LOCALBASE=/usr/local
USER=root
LIBEXECPREFIX=/usr/local/libexec/poudriere
POUDRIERE_VERSION=3.2.8-5-gc81843e5
MASTERMNT=/usr/local/poudriere/data/.m/121i386-quarterly/ref
POUDRIERE_BUILD_TYPE=bulk
PACKAGE_BUILDING=yes
SAVED_TERM=
PWD=/usr/local/poudriere/data/.m/121i386-quarterly/ref/.p/pool
P_PORTS_FEATURES=FLAVORS SELECTED_OPTIONS
MASTERNAME=121i386-quarterly
SCRIPTPREFIX=/usr/local/share/poudriere
OLDPWD=/usr/local/poudriere/data/.m/121i386-quarterly/ref/.p
SCRIPTPATH=/usr/local/share/poudriere/bulk.sh
POUDRIEREPATH=/usr/local/bin/poudriere
---End Environment---

---Begin Poudriere Port Flags/Env---
PORT_FLAGS=
PKGENV=
FLAVOR=
DEPENDS_ARGS=
MAKE_ARGS=
---End Poudriere Port Flags/Env---

---Begin OPTIONS List---
---End OPTIONS List---

--MAINTAINER--
yuri@FreeBSD.org
--End MAINTAINER--

--CONFIGURE_ARGS--

--End CONFIGURE_ARGS--

--CONFIGURE_ENV--
PYTHON="/usr/local/bin/python2.7" XDG_DATA_HOME=/wrkdirs/usr/ports/devel/spark/work  XDG_CONFIG_HOME=/wrkdirs/usr/ports/devel/spark/work  HOME=/wrkdirs/usr/ports/devel/spark/work TMPDIR="/tmp" PATH=/wrkdirs/usr/ports/devel/spark/work/.bin:/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:/root/bin SHELL=/bin/sh CONFIG_SHELL=/bin/sh
--End CONFIGURE_ENV--

--MAKE_ENV--
JAVA_HOME=/usr/local/openjdk8 MAVEN_OPTS="-Xmx2g -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=512m" XDG_DATA_HOME=/wrkdirs/usr/ports/devel/spark/work  XDG_CONFIG_HOME=/wrkdirs/usr/ports/devel/spark/work  HOME=/wrkdirs/usr/ports/devel/spark/work TMPDIR="/tmp" PATH=/wrkdirs/usr/ports/devel/spark/work/.bin:/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:/root/bin NO_PIE=yes MK_DEBUG_FILES=no MK_KERNEL_SYMBOLS=no SHELL=/bin/sh NO_LINT=YES PREFIX=/usr/local  LOCALBASE=/usr/local  CC="cc" CFLAGS="-O2 -pipe  -fstack-protector-strong -fno-strict-aliasing "  CPP="cpp" CPPFLAGS=""  LDFLAGS=" -fstack-protector-strong " LIBS=""  CXX="c++" CXXFLAGS="-O2 -pipe -fstack-protector-strong -fno-strict-aliasing  "  MANPREFIX="/usr/local" BSD_INSTALL_PROGRAM="install  -s -m 555"  BSD_INSTALL_LIB="install  -s -m 0644"  BSD_INSTALL_SCRIPT="install  -m 555"  BSD_INSTALL_DATA="install  -m 0644"  BSD_INSTALL_MAN="install  -m 444"
--End MAKE_ENV--

--PLIST_SUB--
SPARK_USER=spark SPARK_GROUP=spark VER=2.1.1 JAVASHAREDIR="share/java"  JAVAJARDIR="share/java/classes" PYTHON_INCLUDEDIR=include/python2.7  PYTHON_LIBDIR=lib/python2.7  PYTHON_PLATFORM=freebsd12  PYTHON_SITELIBDIR=lib/python2.7/site-packages  PYTHON_SUFFIX=27  PYTHON_VER=2.7  PYTHON_VERSION=python2.7 PYTHON2="" PYTHON3="@comment " OSREL=12.1 PREFIX=%D LOCALBASE=/usr/local  RESETPREFIX=/usr/local LIB32DIR=lib DOCSDIR="share/doc/spark"  EXAMPLESDIR="share/examples/spark"  DATADIR="share/spark"  WWWDIR="www/spark"  ETCDIR="etc/spark"
--End PLIST_SUB--

--SUB_LIST--
SPARK_USER=spark SPARK_GROUP=spark JAVASHAREDIR="/usr/local/share/java"  JAVAJARDIR="/usr/local/share/java/classes"  JAVALIBDIR="/usr/local/share/java/classes" PREFIX=/usr/local LOCALBASE=/usr/local  DATADIR=/usr/local/share/spark DOCSDIR=/usr/local/share/doc/spark EXAMPLESDIR=/usr/local/share/examples/spark  WWWDIR=/usr/local/www/spark ETCDIR=/usr/local/etc/spark
--End SUB_LIST--

---Begin make.conf---
USE_PACKAGE_DEPENDS=yes
BATCH=yes
WRKDIRPREFIX=/wrkdirs
PORTSDIR=/usr/ports
PACKAGES=/packages
DISTDIR=/distfiles
PACKAGE_BUILDING=yes
PACKAGE_BUILDING_FLAVORS=yes
MACHINE=i386
MACHINE_ARCH=i386
ARCH=${MACHINE_ARCH}
#### /usr/local/etc/poudriere.d/make.conf ####
# XXX: We really need this but cannot use it while 'make checksum' does not
# try the next mirror on checksum failure.  It currently retries the same
# failed mirror and then fails rather then trying another.  It *does*
# try the next if the size is mismatched though.
#MASTER_SITE_FREEBSD=yes
# Build ALLOW_MAKE_JOBS_PACKAGES with 2 jobs
MAKE_JOBS_NUMBER=2
#### /usr/ports/Mk/Scripts/ports_env.sh ####
_CCVERSION_921dbbb2=FreeBSD clang version 8.0.1 (tags/RELEASE_801/final 366581) (based on LLVM 8.0.1) Target: i386-unknown-freebsd12.1 Thread model: posix InstalledDir: /usr/bin
_ALTCCVERSION_921dbbb2=none
_CXXINTERNAL_acaad9ca=FreeBSD clang version 8.0.1 (tags/RELEASE_801/final 366581) (based on LLVM 8.0.1) Target: i386-unknown-freebsd12.1 Thread model: posix InstalledDir: /usr/bin "/usr/bin/ld" "--eh-frame-hdr" "-dynamic-linker" "/libexec/ld-elf.so.1" "--hash-style=both" "--enable-new-dtags" "-m" "elf_i386_fbsd" "-o" "a.out" "/usr/lib/crt1.o" "/usr/lib/crti.o" "/usr/lib/crtbegin.o" "-L/usr/lib" "/dev/null" "-lc++" "-lm" "-lgcc" "--as-needed" "-lgcc_s" "--no-as-needed" "-lc" "-lgcc" "--as-needed" "-lgcc_s" "--no-as-needed" "/usr/lib/crtend.o" "/usr/lib/crtn.o"
CC_OUTPUT_921dbbb2_58173849=yes
CC_OUTPUT_921dbbb2_9bdba57c=yes
CC_OUTPUT_921dbbb2_6a4fe7f5=yes
CC_OUTPUT_921dbbb2_6bcac02b=yes
CC_OUTPUT_921dbbb2_67d20829=yes
CC_OUTPUT_921dbbb2_bfa62e83=yes
CC_OUTPUT_921dbbb2_f0b4d593=yes
CC_OUTPUT_921dbbb2_308abb44=yes
CC_OUTPUT_921dbbb2_f00456e5=yes
CC_OUTPUT_921dbbb2_65ad290d=yes
CC_OUTPUT_921dbbb2_f2776b26=yes
CC_OUTPUT_921dbbb2_b2657cc3=yes
CC_OUTPUT_921dbbb2_380987f7=yes
CC_OUTPUT_921dbbb2_160933ec=yes
CC_OUTPUT_921dbbb2_fb62803b=yes
_OBJC_CCVERSION_921dbbb2=FreeBSD clang version 8.0.1 (tags/RELEASE_801/final 366581) (based on LLVM 8.0.1) Target: i386-unknown-freebsd12.1 Thread model: posix InstalledDir: /usr/bin
_OBJC_ALTCCVERSION_921dbbb2=none
ARCH=i386
OPSYS=FreeBSD
_OSRELEASE=12.1-RELEASE-p6
OSREL=12.1
OSVERSION=1201000
PYTHONBASE=/usr/local
CONFIGURE_MAX_CMD_LEN=524288
HAVE_PORTS_ENV=1
#### Misc Poudriere ####
GID=0
UID=0
DISABLE_MAKE_JOBS=poudriere
---End make.conf---
--Resource limits--
cpu time               (seconds, -t)  unlimited
file size           (512-blocks, -f)  unlimited
data seg size           (kbytes, -d)  524288
stack size              (kbytes, -s)  65536
core file size      (512-blocks, -c)  unlimited
max memory size         (kbytes, -m)  unlimited
locked memory           (kbytes, -l)  unlimited
max user processes              (-u)  89999
open files                      (-n)  1024
virtual mem size        (kbytes, -v)  unlimited
swap limit              (kbytes, -w)  unlimited
socket buffer size       (bytes, -b)  unlimited
pseudo-terminals                (-p)  unlimited
kqueues                         (-k)  unlimited
umtx shared locks               (-o)  unlimited
--End resource limits--
=======================<phase: check-sanity   >============================
===>   NOTICE:

This port is deprecated; you may wish to reconsider installing it:

Uses deprecated version of python.

It is scheduled to be removed on or after 2020-08-15.

===>  License APACHE20 accepted by the user
===========================================================================
=======================<phase: pkg-depends    >============================
===>   apache-spark-2.1.1_1 depends on file: /usr/local/sbin/pkg - not found
===>   Installing existing package /packages/All/pkg-1.13.2.txz
[121i386-quarterly-job-14] Installing pkg-1.13.2...
[121i386-quarterly-job-14] Extracting pkg-1.13.2: .......... done
===>   apache-spark-2.1.1_1 depends on file: /usr/local/sbin/pkg - found
===>   Returning to build of apache-spark-2.1.1_1
===========================================================================
=======================<phase: fetch-depends  >============================
===========================================================================
=======================<phase: fetch          >============================
===>   NOTICE:

This port is deprecated; you may wish to reconsider installing it:

Uses deprecated version of python.

It is scheduled to be removed on or after 2020-08-15.

===>  License APACHE20 accepted by the user
===> Fetching all distfiles required by apache-spark-2.1.1_1 for building
===========================================================================
=======================<phase: checksum       >============================
===>   NOTICE:

This port is deprecated; you may wish to reconsider installing it:

Uses deprecated version of python.

It is scheduled to be removed on or after 2020-08-15.

===>  License APACHE20 accepted by the user
===> Fetching all distfiles required by apache-spark-2.1.1_1 for building
=> SHA256 Checksum OK for hadoop/spark-2.1.1.tgz.
=> SHA256 Checksum OK for hadoop/FreeBSD-spark-2.1.1-maven-repository.tar.gz.
===========================================================================
=======================<phase: extract-depends>============================
===========================================================================
=======================<phase: extract        >============================
===>   NOTICE:

<snip>
[INFO] Excluding org.apache.httpcomponents:httpcore:jar:4.4.4 from the shaded jar.
[INFO] Excluding org.apache.httpcomponents:httpclient:jar:4.5.2 from the shaded jar.
[INFO] Excluding javax.activation:activation:jar:1.1.1 from the shaded jar.
[INFO] Excluding mx4j:mx4j:jar:3.0.2 from the shaded jar.
[INFO] Excluding javax.mail:mail:jar:1.4.7 from the shaded jar.
[INFO] Excluding org.bouncycastle:bcprov-jdk15on:jar:1.51 from the shaded jar.
[INFO] Excluding com.jamesmurty.utils:java-xmlbuilder:jar:1.0 from the shaded jar.
[INFO] Excluding net.iharder:base64:jar:2.3.8 from the shaded jar.
[INFO] Excluding org.apache.curator:curator-recipes:jar:2.6.0 from the shaded jar.
[INFO] Excluding org.apache.curator:curator-framework:jar:2.6.0 from the shaded jar.
[INFO] Excluding org.apache.zookeeper:zookeeper:jar:3.4.6 from the shaded jar.
[INFO] Excluding javax.servlet:javax.servlet-api:jar:3.1.0 from the shaded jar.
[INFO] Excluding org.apache.commons:commons-lang3:jar:3.5 from the shaded jar.
[INFO] Excluding org.apache.commons:commons-math3:jar:3.4.1 from the shaded jar.
[INFO] Excluding com.google.code.findbugs:jsr305:jar:1.3.9 from the shaded jar.
[INFO] Excluding org.slf4j:slf4j-api:jar:1.7.16 from the shaded jar.
[INFO] Excluding org.slf4j:jul-to-slf4j:jar:1.7.16 from the shaded jar.
[INFO] Excluding org.slf4j:jcl-over-slf4j:jar:1.7.16 from the shaded jar.
[INFO] Excluding log4j:log4j:jar:1.2.17 from the shaded jar.
[INFO] Excluding org.slf4j:slf4j-log4j12:jar:1.7.16 from the shaded jar.
[INFO] Excluding com.ning:compress-lzf:jar:1.0.3 from the shaded jar.
[INFO] Excluding org.xerial.snappy:snappy-java:jar:1.1.2.6 from the shaded jar.
[INFO] Excluding net.jpountz.lz4:lz4:jar:1.3.0 from the shaded jar.
[INFO] Excluding org.roaringbitmap:RoaringBitmap:jar:0.5.11 from the shaded jar.
[INFO] Excluding commons-net:commons-net:jar:2.2 from the shaded jar.
[INFO] Excluding org.json4s:json4s-jackson_2.11:jar:3.2.11 from the shaded jar.
[INFO] Excluding org.json4s:json4s-core_2.11:jar:3.2.11 from the shaded jar.
[INFO] Excluding org.json4s:json4s-ast_2.11:jar:3.2.11 from the shaded jar.
[INFO] Excluding com.thoughtworks.paranamer:paranamer:jar:2.6 from the shaded jar.
[INFO] Excluding org.scala-lang:scalap:jar:2.11.8 from the shaded jar.
[INFO] Excluding org.scala-lang:scala-compiler:jar:2.11.8 from the shaded jar.
[INFO] Excluding org.scala-lang.modules:scala-parser-combinators_2.11:jar:1.0.4 from the shaded jar.
[INFO] Excluding org.glassfish.jersey.core:jersey-client:jar:2.22.2 from the shaded jar.
[INFO] Excluding javax.ws.rs:javax.ws.rs-api:jar:2.0.1 from the shaded jar.
[INFO] Excluding org.glassfish.hk2:hk2-api:jar:2.4.0-b34 from the shaded jar.
[INFO] Excluding org.glassfish.hk2:hk2-utils:jar:2.4.0-b34 from the shaded jar.
[INFO] Excluding org.glassfish.hk2.external:aopalliance-repackaged:jar:2.4.0-b34 from the shaded jar.
[INFO] Excluding org.glassfish.hk2.external:javax.inject:jar:2.4.0-b34 from the shaded jar.
[INFO] Excluding org.glassfish.hk2:hk2-locator:jar:2.4.0-b34 from the shaded jar.
[INFO] Excluding org.javassist:javassist:jar:3.18.1-GA from the shaded jar.
[INFO] Excluding org.glassfish.jersey.core:jersey-common:jar:2.22.2 from the shaded jar.
[INFO] Excluding javax.annotation:javax.annotation-api:jar:1.2 from the shaded jar.
[INFO] Excluding org.glassfish.jersey.bundles.repackaged:jersey-guava:jar:2.22.2 from the shaded jar.
[INFO] Excluding org.glassfish.hk2:osgi-resource-locator:jar:1.0.1 from the shaded jar.
[INFO] Excluding org.glassfish.jersey.core:jersey-server:jar:2.22.2 from the shaded jar.
[INFO] Excluding org.glassfish.jersey.media:jersey-media-jaxb:jar:2.22.2 from the shaded jar.
[INFO] Excluding javax.validation:validation-api:jar:1.1.0.Final from the shaded jar.
[INFO] Excluding org.glassfish.jersey.containers:jersey-container-servlet:jar:2.22.2 from the shaded jar.
[INFO] Excluding org.glassfish.jersey.containers:jersey-container-servlet-core:jar:2.22.2 from the shaded jar.
[INFO] Excluding io.netty:netty-all:jar:4.0.42.Final from the shaded jar.
[INFO] Excluding io.netty:netty:jar:3.8.0.Final from the shaded jar.
[INFO] Excluding com.clearspring.analytics:stream:jar:2.7.0 from the shaded jar.
[INFO] Excluding io.dropwizard.metrics:metrics-core:jar:3.1.2 from the shaded jar.
[INFO] Excluding io.dropwizard.metrics:metrics-jvm:jar:3.1.2 from the shaded jar.
[INFO] Excluding io.dropwizard.metrics:metrics-json:jar:3.1.2 from the shaded jar.
[INFO] Excluding io.dropwizard.metrics:metrics-graphite:jar:3.1.2 from the shaded jar.
[INFO] Excluding com.fasterxml.jackson.core:jackson-databind:jar:2.6.5 from the shaded jar.
[INFO] Excluding com.fasterxml.jackson.core:jackson-core:jar:2.6.5 from the shaded jar.
[INFO] Excluding com.fasterxml.jackson.module:jackson-module-scala_2.11:jar:2.6.5 from the shaded jar.
[INFO] Excluding com.fasterxml.jackson.module:jackson-module-paranamer:jar:2.6.5 from the shaded jar.
[INFO] Excluding org.apache.ivy:ivy:jar:2.4.0 from the shaded jar.
[INFO] Excluding oro:oro:jar:2.0.8 from the shaded jar.
[INFO] Excluding net.razorvine:pyrolite:jar:4.13 from the shaded jar.
[INFO] Excluding net.sf.py4j:py4j:jar:0.10.4 from the shaded jar.
[INFO] Excluding org.apache.commons:commons-crypto:jar:1.0.0 from the shaded jar.
[INFO] Excluding org.apache.spark:spark-tags_2.11:jar:2.1.1 from the shaded jar.
[INFO] Excluding org.apache.spark:spark-unsafe_2.11:jar:2.1.1 from the shaded jar.
[INFO] Excluding org.codehaus.janino:janino:jar:3.0.0 from the shaded jar.
[INFO] Excluding org.codehaus.janino:commons-compiler:jar:3.0.0 from the shaded jar.
[INFO] Excluding org.antlr:antlr4-runtime:jar:4.5.3 from the shaded jar.
[INFO] Excluding commons-codec:commons-codec:jar:1.10 from the shaded jar.
[INFO] Including org.spark-project.spark:unused:jar:1.0.0 in the shaded jar.
[INFO] Excluding org.scala-lang.modules:scala-xml_2.11:jar:1.0.2 from the shaded jar.
[INFO] Replacing original artifact with shaded artifact.
[INFO] Replacing /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/catalyst/target/spark-catalyst_2.11-2.1.1.jar with /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/catalyst/target/spark-catalyst_2.11-2.1.1-shaded.jar
[INFO] Dependency-reduced POM written at: /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/catalyst/dependency-reduced-pom.xml
[INFO] Dependency-reduced POM written at: /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/catalyst/dependency-reduced-pom.xml
[INFO] Dependency-reduced POM written at: /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/catalyst/dependency-reduced-pom.xml
[INFO] 
[INFO] --- maven-source-plugin:2.4:jar-no-fork (create-source-jar) @ spark-catalyst_2.11 ---
[INFO] Building jar: /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/catalyst/target/spark-catalyst_2.11-2.1.1-sources.jar
[INFO] 
[INFO] --- maven-source-plugin:2.4:test-jar-no-fork (create-source-jar) @ spark-catalyst_2.11 ---
[INFO] Building jar: /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/catalyst/target/spark-catalyst_2.11-2.1.1-test-sources.jar
[INFO] 
[INFO] --- maven-jar-plugin:2.6:test-jar (default) @ spark-catalyst_2.11 ---
[INFO] Building jar: /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/catalyst/target/spark-catalyst_2.11-2.1.1-tests.jar
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Building Spark Project SQL 2.1.1
[INFO] ------------------------------------------------------------------------
[INFO] 
[INFO] --- maven-clean-plugin:3.0.0:clean (default-clean) @ spark-sql_2.11 ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.4.1:enforce (enforce-versions) @ spark-sql_2.11 ---
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:add-source (eclipse-add-source) @ spark-sql_2.11 ---
[INFO] Add Source directory: /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/main/scala
[INFO] Add Test Source directory: /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/scala
[INFO] 
[INFO] --- maven-dependency-plugin:2.10:build-classpath (default-cli) @ spark-sql_2.11 ---
[INFO] Dependencies classpath:
/wrkdirs/usr/ports/devel/spark/work/m2/mx4j/mx4j/3.0.2/mx4j-3.0.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/hadoop/hadoop-annotations/2.7.2/hadoop-annotations-2.7.2.jar:/wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/launcher/target/spark-launcher_2.11-2.1.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/univocity/univocity-parsers/2.2.1/univocity-parsers-2.2.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/io/dropwizard/metrics/metrics-core/3.1.2/metrics-core-3.1.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/parquet/parquet-jackson/1.8.1/parquet-jackson-1.8.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/htrace/htrace-core/3.1.0-incubating/htrace-core-3.1.0-incubating.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/net/java/dev/jets3t/jets3t/0.9.3/jets3t-0.9.3.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/fasterxml/jackson/core/jackson-databind/2.6.5/jackson-d
 atabind-2.6.5.jar:/wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/common/network-common/target/spark-network-common_2.11-2.1.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/hadoop/hadoop-mapreduce-client-shuffle/2.7.2/hadoop-mapreduce-client-shuffle-2.7.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/io/dropwizard/metrics/metrics-jvm/3.1.2/metrics-jvm-3.1.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/codehaus/janino/janino/3.0.0/janino-3.0.0.jar:/wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/common/sketch/target/spark-sketch_2.11-2.1.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/fusesource/leveldbjni/leveldbjni-all/1.8/leveldbjni-all-1.8.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/parquet/parquet-format/2.3.0-incubating/parquet-format-2.3.0-incubating.jar:/wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/common/network-shuffle/target/spark-network-shuffle_2.11-2.1.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/glassfish/jersey/bundles/repackaged/jersey-guava/2.
 22.2/jersey-guava-2.22.2.jar:/wrkdirs/usr/po!
 rts/devel/spark/work/m2/io/dropwizard/metrics/metrics-json/3.1.2/metrics-json-3.1.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/commons/commons-crypto/1.0.0/commons-crypto-1.0.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/commons-io/commons-io/2.4/commons-io-2.4.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/commons/commons-lang3/3.5/commons-lang3-3.5.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/codehaus/jackson/jackson-jaxrs/1.9.13/jackson-jaxrs-1.9.13.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/hadoop/hadoop-mapreduce-client-core/2.7.2/hadoop-mapreduce-client-core-2.7.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/scala-lang/scala-reflect/2.11.8/scala-reflect-2.11.8.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/json4s/json4s-jackson_2.11/3.2.11/json4s-jackson_2.11-3.2.11.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/esotericsoftware/minlog/1.3.0/minlog-1.3.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/javax/mail/mail/1.4.7/mail-1.4.7.jar:/wrkdirs/usr
 /ports/devel/spark/work/m2/org/codehaus/jackson/jackson-core-asl/1.9.13/jackson-core-asl-1.9.13.jar:/wrkdirs/usr/ports/devel/spark/work/m2/javax/validation/validation-api/1.1.0.Final/validation-api-1.1.0.Final.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/bouncycastle/bcprov-jdk15on/1.51/bcprov-jdk15on-1.51.jar:/wrkdirs/usr/ports/devel/spark/work/m2/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/catalyst/target/spark-catalyst_2.11-2.1.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/glassfish/jersey/core/jersey-common/2.22.2/jersey-common-2.22.2.jar:/wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/common/unsafe/target/spark-unsafe_2.11-2.1.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/io/netty/netty/3.8.0.Final/netty-3.8.0.Final.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/hadoop/hadoop-mapreduce-client-jobclient/2.7.2/hadoop-mapreduce-client-jobclient-2.7.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/google/code/findbugs/jsr30
 5/1.3.9/jsr305-1.3.9.jar:/wrkdirs/usr/ports/!
 devel/spa!
 rk/work/m2/javax/xml/bind/jaxb-api/2.2.2/jaxb-api-2.2.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/glassfish/hk2/hk2-api/2.4.0-b34/hk2-api-2.4.0-b34.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/hadoop/hadoop-mapreduce-client-common/2.7.2/hadoop-mapreduce-client-common-2.7.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/glassfish/jersey/core/jersey-server/2.22.2/jersey-server-2.22.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/twitter/chill-java/0.8.0/chill-java-0.8.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/commons-lang/commons-lang/2.6/commons-lang-2.6.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/zookeeper/zookeeper/3.4.6/zookeeper-3.4.6.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/tukaani/xz/1.0/xz-1.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/javax/activation/activation/1.1.1/activation-1.1.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/hadoop/hadoop-client/2.7.2/hadoop-client-2.7.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/commons-codec/co
 mmons-codec/1.10/commons-codec-1.10.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/javassist/javassist/3.18.1-GA/javassist-3.18.1-GA.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/ivy/ivy/2.4.0/ivy-2.4.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/commons-beanutils/commons-beanutils-core/1.8.0/commons-beanutils-core-1.8.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/hadoop/hadoop-hdfs/2.7.2/hadoop-hdfs-2.7.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/javax/servlet/javax.servlet-api/3.1.0/javax.servlet-api-3.1.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/glassfish/hk2/osgi-resource-locator/1.0.1/osgi-resource-locator-1.0.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/commons/commons-math3/3.4.1/commons-math3-3.4.1.jar:/wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/common/tags/target/spark-tags_2.11-2.1.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/javax/xml/stream/stax-api/1.0-2/stax-api-1.0-2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/avro/avro
 /1.7.7/avro-1.7.7.jar:/wrkdirs/usr/ports/dev!
 el/spark/!
 work/m2/org/codehaus/jackson/jackson-xc/1.9.13/jackson-xc-1.9.13.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/hadoop/hadoop-yarn-client/2.7.2/hadoop-yarn-client-2.7.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/glassfish/hk2/hk2-utils/2.4.0-b34/hk2-utils-2.4.0-b34.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/glassfish/hk2/external/aopalliance-repackaged/2.4.0-b34/aopalliance-repackaged-2.4.0-b34.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/glassfish/hk2/external/javax.inject/2.4.0-b34/javax.inject-2.4.0-b34.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/glassfish/jersey/containers/jersey-container-servlet-core/2.22.2/jersey-container-servlet-core-2.22.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/oro/oro/2.0.8/oro-2.0.8.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/scala-lang/scala-compiler/2.11.8/scala-compiler-2.11.8.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/curator/curator-framework/2.6.0/curator-framework-2.6.0.jar:/wrkdirs/usr/ports/devel/spark/work/
 m2/org/roaringbitmap/RoaringBitmap/0.5.11/RoaringBitmap-0.5.11.jar:/wrkdirs/usr/ports/devel/spark/work/m2/log4j/log4j/1.2.17/log4j-1.2.17.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/scala-lang/scala-library/2.11.8/scala-library-2.11.8.jar:/wrkdirs/usr/ports/devel/spark/work/m2/io/netty/netty-all/4.0.42.Final/netty-all-4.0.42.Final.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/commons/commons-compress/1.4.1/commons-compress-1.4.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/directory/server/apacheds-kerberos-codec/2.0.0-M15/apacheds-kerberos-codec-2.0.0-M15.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/curator/curator-recipes/2.6.0/curator-recipes-2.6.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/fasterxml/jackson/core/jackson-annotations/2.6.5/jackson-annotations-2.6.5.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/jamesmurty/utils/java-xmlbuilder/1.0/java-xmlbuilder-1.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/parquet/parquet-encoding/1.8
 .1/parquet-encoding-1.8.1.jar:/wrkdirs/usr/p!
 orts/deve!
 l/spark/work/m2/org/slf4j/slf4j-api/1.7.16/slf4j-api-1.7.16.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/xbean/xbean-asm5-shaded/4.4/xbean-asm5-shaded-4.4.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/json4s/json4s-core_2.11/3.2.11/json4s-core_2.11-3.2.11.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/mortbay/jetty/jetty-util/6.1.26/jetty-util-6.1.26.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/parquet/parquet-column/1.8.1/parquet-column-1.8.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/antlr/antlr4-runtime/4.5.3/antlr4-runtime-4.5.3.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/parquet/parquet-common/1.8.1/parquet-common-1.8.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/codehaus/jackson/jackson-mapper-asl/1.9.13/jackson-mapper-asl-1.9.13.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/esotericsoftware/kryo-shaded/3.0.3/kryo-shaded-3.0.3.jar:/wrkdirs
 /usr/ports/devel/spark/work/m2/javax/ws/rs/javax.ws.rs-api/2.0.1/javax.ws.rs-api-2.0.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/javax/servlet/jsp/jsp-api/2.1/jsp-api-2.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/glassfish/jersey/core/jersey-client/2.22.2/jersey-client-2.22.2.jar:/wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/core/target/spark-core_2.11-2.1.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/avro/avro-mapred/1.7.7/avro-mapred-1.7.7-hadoop2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/fasterxml/jackson/module/jackson-module-paranamer/2.6.5/jackson-module-paranamer-2.6.5.jar:/wrkdirs/usr/ports/devel/spark/work/m2/xmlenc/xmlenc/0.52/xmlenc-0.52.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/ning/compress-lzf/1.0.3/compress-lzf-1.0.3.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/parquet/parquet-hadoop/1.8.1/parquet-hadoop-1.8.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/thoughtworks/paranamer/paranamer/2.3/paranamer-2.3.jar:/wrkdirs/usr/ports/de
 vel/spark/work/m2/org/apache/directory/api/a!
 pi-util/1!
 .0.0-M20/api-util-1.0.0-M20.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/fasterxml/jackson/module/jackson-module-scala_2.11/2.6.5/jackson-module-scala_2.11-2.6.5.jar:/wrkdirs/usr/ports/devel/spark/work/m2/commons-net/commons-net/2.2/commons-net-2.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/google/code/gson/gson/2.2.4/gson-2.2.4.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/json4s/json4s-ast_2.11/3.2.11/json4s-ast_2.11-3.2.11.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/directory/api/api-asn1-api/1.0.0-M20/api-asn1-api-1.0.0-M20.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/scala-lang/scalap/2.11.8/scalap-2.11.8.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/clearspring/analytics/stream/2.7.0/stream-2.7.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/curator/curator-client/2.6.0/curator-client-2.6.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/commons-configuration/commons-configuration/1.6/commons-configuration-1.6.jar:/wrkdirs/usr/ports/devel/spark/work/m2/o
 rg/glassfish/jersey/containers/jersey-container-servlet/2.22.2/jersey-container-servlet-2.22.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/commons-digester/commons-digester/1.8/commons-digester-1.8.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/slf4j/jul-to-slf4j/1.7.16/jul-to-slf4j-1.7.16.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/directory/server/apacheds-i18n/2.0.0-M15/apacheds-i18n-2.0.0-M15.jar:/wrkdirs/usr/ports/devel/spark/work/m2/commons-beanutils/commons-beanutils/1.7.0/commons-beanutils-1.7.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/hadoop/hadoop-mapreduce-client-app/2.7.2/hadoop-mapreduce-client-app-2.7.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/codehaus/janino/commons-compiler/3.0.0/commons-compiler-3.0.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/scala-lang/modules/scala-xml_2.11/1.0.2/scala-xml_2.11-1.0.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/fasterxml/jackson/core/jackson-core/2.6.5/jackson-core-2.6.5.jar:/wrkdirs/usr/ports/de
 vel/spark/work/m2/org/scala-lang/modules/sca!
 la-parser!
 -combinators_2.11/1.0.4/scala-parser-combinators_2.11-1.0.4.jar:/wrkdirs/usr/ports/devel/spark/work/m2/commons-cli/commons-cli/1.2/commons-cli-1.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/io/dropwizard/metrics/metrics-graphite/3.1.2/metrics-graphite-3.1.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/xerces/xercesImpl/2.9.1/xercesImpl-2.9.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/net/iharder/base64/2.3.8/base64-2.3.8.jar:/wrkdirs/usr/ports/devel/spark/work/m2/javax/annotation/javax.annotation-api/1.2/javax.annotation-api-1.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/objenesis/objenesis/2.1/objenesis-2.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/slf4j/slf4j-log4j12/1.7.16/slf4j-log4j12-1.7.16.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/xerial/snappy/snappy-java/1.1.2.6/snappy-java-1.1.2.6.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/hadoop/hadoop-yarn-server-common/2.7.2/hadoop-yarn-server-common-2.7.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/glassfish/j
 ersey/media/jersey-media-jaxb/2.22.2/jersey-media-jaxb-2.22.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/spark-project/spark/unused/1.0.0/unused-1.0.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/avro/avro-ipc/1.7.7/avro-ipc-1.7.7.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/hadoop/hadoop-yarn-common/2.7.2/hadoop-yarn-common-2.7.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/net/sf/py4j/py4j/0.10.4/py4j-0.10.4.jar:/wrkdirs/usr/ports/devel/spark/work/m2/commons-collections/commons-collections/3.2.2/commons-collections-3.2.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/com/twitter/chill_2.11/0.8.0/chill_2.11-0.8.0.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/hadoop/hadoop-common/2.7.2/hadoop-common-2.7.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/hadoop/hadoop-auth/2.7.2/hadoop-auth-2.7.2.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/glassfish/hk2/hk2-locator/2.4.0-b34/hk2-locator-2.4.0-b34.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/ha
 doop/hadoop-yarn-api/2.7.2/hadoop-yarn-api-2!
 .7.2.jar:!
 /wrkdirs/usr/ports/devel/spark/work/m2/net/razorvine/pyrolite/4.13/pyrolite-4.13.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/slf4j/jcl-over-slf4j/1.7.16/jcl-over-slf4j-1.7.16.jar:/wrkdirs/usr/ports/devel/spark/work/m2/commons-httpclient/commons-httpclient/3.1/commons-httpclient-3.1.jar:/wrkdirs/usr/ports/devel/spark/work/m2/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar
[INFO] 
[INFO] --- maven-remote-resources-plugin:1.5:process (default) @ spark-sql_2.11 ---
[INFO] 
[INFO] --- maven-resources-plugin:2.6:resources (default-resources) @ spark-sql_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] Copying 4 resources
[INFO] Copying 3 resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:compile (scala-compile-first) @ spark-sql_2.11 ---
[WARNING] Zinc server is not available at port 3030 - reverting to normal incremental compile
[INFO] Using incremental compilation
[INFO] Compiling 247 Scala sources and 42 Java sources to /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/target/scala-2.11/classes...
[WARNING] warning: [options] bootstrap class path not set in conjunction with -source 1.7
[WARNING] /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/SpecificParquetRecordReaderBase.java:160: warning: [cast] redundant cast to Option<AccumulatorV2<?,?>>
[WARNING]       Option<AccumulatorV2<?, ?>> accu = (Option<AccumulatorV2<?, ?>>) taskContext.taskMetrics()
[WARNING]                                          ^
[WARNING] 2 warnings
[INFO] 
[INFO] --- maven-compiler-plugin:3.5.1:compile (default-compile) @ spark-sql_2.11 ---
[INFO] Changes detected - recompiling the module!
[INFO] Compiling 42 source files to /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/target/scala-2.11/classes
[INFO] 
[INFO] --- build-helper-maven-plugin:1.10:add-test-source (add-scala-test-sources) @ spark-sql_2.11 ---
[INFO] Test Source directory: /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/gen-java added.
[INFO] 
[INFO] --- maven-antrun-plugin:1.8:run (create-tmp-dir) @ spark-sql_2.11 ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/target/tmp
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-resources-plugin:2.6:testResources (default-testResources) @ spark-sql_2.11 ---
[INFO] Using 'UTF-8' encoding to copy filtered resources.
[INFO] Copying 214 resources
[INFO] Copying 3 resources
[INFO] 
[INFO] --- scala-maven-plugin:3.2.2:testCompile (scala-test-compile-first) @ spark-sql_2.11 ---
[WARNING] Zinc server is not available at port 3030 - reverting to normal incremental compile
[INFO] Using incremental compilation
[INFO] Compiling 188 Scala sources and 20 Java sources to /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/target/scala-2.11/test-classes...
[WARNING] /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala:38: reflective access of structural type member method mode should be enabled
by making the implicit value scala.language.reflectiveCalls visible.
This can be achieved by adding the import clause 'import scala.language.reflectiveCalls'
or by setting the compiler option -language:reflectiveCalls.
See the Scaladoc for value scala.language.reflectiveCalls for a discussion
why the feature should be explicitly enabled.
[WARNING]     badRule.mode = ""
[WARNING]             ^
[WARNING] /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala:42: reflective access of structural type member method mode should be enabled
by making the implicit value scala.language.reflectiveCalls visible.
[WARNING]     badRule.mode = "exception"
[WARNING]             ^
[WARNING] /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala:46: reflective access of structural type member method mode should be enabled
by making the implicit value scala.language.reflectiveCalls visible.
[WARNING]     badRule.mode = "error"
[WARNING]             ^
[WARNING] /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala:273: reflective access of structural type member method numTriggers should be enabled
by making the implicit value scala.language.reflectiveCalls visible.
[WARNING]             assert(input.numTriggers > 100) // at least 100 triggers have occurred
[WARNING]                          ^
[WARNING] /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryStatusAndProgressSuite.scala:200: postfix operator minute should be enabled
by making the implicit value scala.language.postfixOps visible.
This can be achieved by adding the import clause 'import scala.language.postfixOps'
or by setting the compiler option -language:postfixOps.
See the Scaladoc for value scala.language.postfixOps for a discussion
why the feature should be explicitly enabled.
[WARNING]         eventually(timeout(1 minute)) {
[WARNING]                              ^
[WARNING] /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala:457: a pure expression does nothing in statement position; you may be omitting necessary parentheses
[WARNING]       q1
[WARNING]       ^
[WARNING] /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala:136: method explode in class Dataset is deprecated: use flatMap() or select() with functions.explode() instead
[WARNING]       df.explode("words", "word") { word: String => word.split(" ").toSeq }.select('word),
[WARNING]          ^
[WARNING] /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala:144: method explode in class Dataset is deprecated: use flatMap() or select() with functions.explode() instead
[WARNING]       df.explode('letters) {
[WARNING]          ^
[WARNING] /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala:194: method explode in class Dataset is deprecated: use flatMap() or select() with functions.explode() instead
[WARNING]       df.explode($"*") { case Row(prefix: String, csv: String) =>
[WARNING]          ^
[WARNING] /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala:201: method explode in class Dataset is deprecated: use flatMap() or select() with functions.explode() instead
[WARNING]       df.explode('prefix, 'csv) { case Row(prefix: String, csv: String) =>
[WARNING]          ^
[WARNING] /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/LocalSparkSession.scala:32: constructor Slf4JLoggerFactory in class Slf4JLoggerFactory is deprecated: see corresponding Javadoc for more information.
[WARNING]     InternalLoggerFactory.setDefaultFactory(new Slf4JLoggerFactory())
[WARNING]                                             ^
[WARNING] /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/streaming/EventTimeWatermarkSuite.scala:184: method getYear in class Date is deprecated: see corresponding Javadoc for more information.
[WARNING]     def monthsSinceEpoch(date: Date): Int = { date.getYear * 12 + date.getMonth }
[WARNING]                                                    ^
[WARNING] /wrkdirs/usr/ports/devel/spark/work/spark-2.1.1/sql/core/src/test/scala/org/apache/spark/sql/streaming/EventTimeWatermarkSuite.scala:184: method getMonth in class Date is deprecated: see corresponding Javadoc for more information.
[WARNING]     def monthsSinceEpoch(date: Date): Int = { date.getYear * 12 + date.getMonth }
[WARNING]                                                                        ^
*** Signal 9

Stop.
make: stopped in /usr/ports/devel/spark



Want to link to this message? Use this URL: <https://mail-archive.FreeBSD.org/cgi/mid.cgi?202006200723.05K7N8QO035381>