Update artifactId for TensorFlow Hadoop and spark-connector jars

This commit is contained in:
Soila Kavulya 2018-05-30 19:23:08 -07:00
parent 4a4eb47e61
commit b0ec8d2c46
5 changed files with 14 additions and 12 deletions

View File

@ -53,10 +53,10 @@ There are seven artifacts and thus `pom.xml`s involved in this release:
7. [`parentpom`](https://maven.apache.org/pom/index.html): Common settings
shared by all of the above.
8. `tensorflow-hadoop`: The TensorFlow TFRecord InputFormat/OutputFormat for Apache Hadoop.
8. `hadoop`: The TensorFlow TFRecord InputFormat/OutputFormat for Apache Hadoop.
The source code for this package is available in the [TensorFlow Ecosystem](https://github.com/tensorflow/ecosystem/tree/master/hadoop)
9. `spark-tensorflow-connector`: A Scala library for loading and storing TensorFlow TFRecord
9. `spark-connector`: A Scala library for loading and storing TensorFlow TFRecord
using Apache Spark DataFrames. The source code for this package is available
in the [TensorFlow Ecosystem](https://github.com/tensorflow/ecosystem/tree/master/spark/spark-tensorflow-connector)

View File

@ -5,7 +5,7 @@
<!-- Placeholder pom which is replaced by TensorFlow ecosystem Hadoop pom during build -->
<modelVersion>4.0.0</modelVersion>
<description>TensorFlow TFRecord InputFormat/OutputFormat for Apache Hadoop</description>
<artifactId>tensorflow-hadoop</artifactId>
<artifactId>hadoop</artifactId>
<packaging>jar</packaging>
<scm>

View File

@ -32,8 +32,8 @@
<module>libtensorflow_jni_gpu</module>
<module>tensorflow</module>
<module>proto</module>
<module>tensorflow-hadoop</module>
<module>spark-tensorflow-connector</module>
<module>hadoop</module>
<module>spark-connector</module>
</modules>
<!-- Two profiles are used:

View File

@ -48,7 +48,7 @@ clean() {
mvn -q clean
rm -rf libtensorflow_jni/src libtensorflow_jni/target libtensorflow_jni_gpu/src libtensorflow_jni_gpu/target \
libtensorflow/src libtensorflow/target tensorflow-android/target \
tensorflow-hadoop/src spark-tensorflow-connector/src
hadoop/src spark-connector/src
}
update_version_in_pom() {
@ -193,8 +193,8 @@ generate_java_protos() {
# is updated for each module.
download_tf_ecosystem() {
ECOSYSTEM_DIR="/tmp/tensorflow-ecosystem"
HADOOP_DIR="${DIR}/tensorflow-hadoop"
SPARK_DIR="${DIR}/spark-tensorflow-connector"
HADOOP_DIR="${DIR}/hadoop"
SPARK_DIR="${DIR}/spark-connector"
# Clean any previous attempts
rm -rf "${ECOSYSTEM_DIR}"
@ -203,6 +203,8 @@ download_tf_ecosystem() {
mkdir -p "${ECOSYSTEM_DIR}"
cd "${ECOSYSTEM_DIR}"
git clone "${TF_ECOSYSTEM_URL}"
cd ecosystem
git checkout r${TF_VERSION}
# Copy the TensorFlow Hadoop source
cp -r "${ECOSYSTEM_DIR}/ecosystem/hadoop/src" "${HADOOP_DIR}"
@ -279,7 +281,7 @@ cd "${DIR}"
# Comment lines out appropriately if debugging/tinkering with the release
# process.
# gnupg2 is required for signing
apt-get -qq update && apt-get -qqq install -y gnupg2 && apt-get -qqq install -y git
apt-get -qq update && apt-get -qqq install -y gnupg2 git
clean
update_version_in_pom

View File

@ -5,7 +5,7 @@
<!-- Placeholder pom which is replaced by TensorFlow ecosystem Spark pom during build -->
<modelVersion>4.0.0</modelVersion>
<description>TensorFlow TFRecord connector for Apache Spark DataFrames</description>
<artifactId>spark-tensorflow-connector</artifactId>
<artifactId>spark-connector</artifactId>
<packaging>jar</packaging>
<scm>