From 22d30d2ea707de0845431dd75e151a0136772e46 Mon Sep 17 00:00:00 2001 From: Avijit <30507445+avijit-nervana@users.noreply.github.com> Date: Tue, 19 Feb 2019 23:08:01 -0800 Subject: [PATCH] Add bazel support to build nGraph (#434) * Added bazel scripts for building libngraph_bridge.so and the INTERPRETER backend * Added support for using the proper CXX ABI based on what is used to buld TensorFlow. Also upgraded the JSON library and fixed the interpreter build * Fixed the example so that it works on CentOS. Also added the NDEBUG flag to ensure that the graph mods work. * Fixed a makefile bug and updated the documentation * Cleaned up and added a README for the bazel build setup * Update README.md * Update examples/tf_cpp_examples/README.md * Added SDL flags and also updated based on PR comments --- .gitignore | 3 + README.md | 2 +- bazel/BUILD | 88 +++++++++++ bazel/README.md | 45 ++++++ bazel/WORKSPACE | 44 ++++++ bazel/ngraph.BUILD | 115 ++++++++++++++ bazel/nlohmann_json.BUILD | 31 ++++ bazel/tf_configure/BUILD | 0 bazel/tf_configure/BUILD.tpl | 18 +++ bazel/tf_configure/tf_configure.bzl | 206 ++++++++++++++++++++++++++ configure_bazel.sh | 48 ++++++ examples/tf_cpp_examples/Makefile | 20 ++- examples/tf_cpp_examples/README.md | 14 +- examples/tf_cpp_examples/hello_tf.cpp | 16 +- test_ngtf.py | 2 +- 15 files changed, 628 insertions(+), 24 deletions(-) create mode 100644 bazel/BUILD create mode 100644 bazel/README.md create mode 100644 bazel/WORKSPACE create mode 100644 bazel/ngraph.BUILD create mode 100644 bazel/nlohmann_json.BUILD create mode 100644 bazel/tf_configure/BUILD create mode 100644 bazel/tf_configure/BUILD.tpl create mode 100644 bazel/tf_configure/tf_configure.bzl create mode 100755 configure_bazel.sh diff --git a/.gitignore b/.gitignore index 6147c22c..77c207f6 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,9 @@ cpu_codegen/ *.pbtxt *.dot +*.whl +*.bzl +.bazelrc .*.swp .nfs* diff --git a/README.md b/README.md index d466e106..43f4a467 100644 --- a/README.md +++ b/README.md @@ -53,7 +53,7 @@ TensorFlow model scripts and running them the usual way: Note: The version of the ngraph-tensorflow-bridge is not going to be exactly the same as when you build from source. This is due to delay in the source release and publishing the corresponding Python wheel. -### Option 2: Build nGraph bridge from source using TensorFlow source +### Option 2: Build nGraph bridge from source To use the latest version, or to run unit tests, or if you are planning to contribute, install the nGraph bridge using the TensorFlow source tree as follows: diff --git a/bazel/BUILD b/bazel/BUILD new file mode 100644 index 00000000..fc8bc522 --- /dev/null +++ b/bazel/BUILD @@ -0,0 +1,88 @@ +# ============================================================================== +# Copyright 2019 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +load("//:cxx_abi_option.bzl", "CXX_ABI") + +cc_binary( + name = 'libngraph_bridge.so', + srcs = [ + "src/ngraph_api.cc", + "src/ngraph_api.h", + "src/ngraph_assign_clusters.cc", + "src/ngraph_assign_clusters.h", + "src/ngraph_builder.cc", + "src/ngraph_builder.h", + "src/ngraph_backend_manager.h", + "src/ngraph_backend_manager.cc", + "src/ngraph_capture_variables.cc", + "src/ngraph_capture_variables.h", + "src/ngraph_cluster_manager.cc", + "src/ngraph_cluster_manager.h", + "src/ngraph_conversions.h", + "src/ngraph_deassign_clusters.cc", + "src/ngraph_deassign_clusters.h", + "src/ngraph_encapsulate_clusters.cc", + "src/ngraph_encapsulate_clusters.h", + "src/ngraph_encapsulate_op.cc", + "src/ngraph_freshness_tracker.cc", + "src/ngraph_freshness_tracker.h", + "src/ngraph_mark_for_clustering.cc", + "src/ngraph_mark_for_clustering.h", + "src/ngraph_rewrite_for_tracking.cc", + "src/ngraph_rewrite_for_tracking.h", + "src/ngraph_rewrite_pass.cc", + "src/ngraph_tracked_variable.cc", + "src/ngraph_utils.cc", + "src/ngraph_utils.h", + "src/ngraph_version_utils.h", + "src/tf_deadness_analysis.cc", + "src/tf_deadness_analysis.h", + "src/tf_graphcycles.cc", + "src/tf_graphcycles.h", + "src/version.h", + "src/version.cc", + "logging/ngraph_log.h", + "logging/ngraph_log.cc", + "logging/tf_graph_writer.h", + "logging/tf_graph_writer.cc", + ], + linkshared = 1, + deps = [ + "@local_config_tf//:libtensorflow_framework", + "@local_config_tf//:tf_header_lib", + "@ngraph//:ngraph_headers", + "@ngraph//:ngraph_core", + ], + copts = [ + "-pthread", + "-std=c++11", + "-D_FORTIFY_SOURCE=2", + "-Wformat", + "-Wformat-security", + "-Wformat", + "-fstack-protector-strong", + "-D NDEBUG", + '-D SHARED_LIB_PREFIX=\\"lib\\"', + '-D SHARED_LIB_SUFFIX=\\".so\\"', + "-I logging", + "-I external/ngraph/src", + ] + CXX_ABI, + linkopts = [ + "-Wl,-z,noexecstack", + "-Wl,-z,relro", + "-Wl,-z,now", + ], + visibility = ["//visibility:public"], +) diff --git a/bazel/README.md b/bazel/README.md new file mode 100644 index 00000000..de72e8e7 --- /dev/null +++ b/bazel/README.md @@ -0,0 +1,45 @@ +# Build nGraph TensorFlow bridge using bazel + +This directory contains scripts necessary to build the nGraph TensorFlow bridge using `bazel`. + +:warning: This is experimental and will change over time. + +## Prerequisites + +Please ensure that bazel and Python is installed on your system and you are able to build TensorFlow from source (though not needed for building the bridge). Please see the [build preperation] for details. + +## Build C++ library + +Go to the ngraph-tf directory and execute these commands to build the C++ library for nGraph-TensorFlow bridge: + + ./configure_bazel.sh + bazel build libngraph_bridge.so + bazel build @ngraph//:libinterpreter_backend.so + +This will produce the following binary files: + +``` + bazel-bin/libngraph_bridge.so + bazel-bin/external/ngraph/libinterpreter_backend.so +``` + +### How to use the C++ library + +The C++ library `libngraph_bridge.so` can be used with a TensorFlow C++ application as described in the examples/tf_cpp_examples ([TensorFlow C++ example]) directory. Basic steps are the following: + +1. Get a copy of the TensorFlow C++ library by building one. Use [Option 2] to build all the necessary libraries as described in the [TensorFlow C++ example] + +2. Replace the `libngraph_bridge.so` built by this bazel script in the `build/artifacts/lib<64>` directory. + +3. Run `make` to relink and run the example as described in the [TensorFlow C++ example] document. + +**Note** Currently only the INTERPRETER backend may be built using bazel. However other backends built using cmake system is fully binary compatible with the bridge built with bazel based build system as described here. + +## Build the Python wheel + +Coming up soon. For now please use the cmake based build system described in the [main README] + +[build preperation]: ../README.md#prepare-the-build-environment +[Option 2]: ../README.md#option-2-build-ngraph-bridge-from-source +[TensorFlow C++ example]: ../examples/tf_cpp_examples/README.md +[main README]: ../README.md diff --git a/bazel/WORKSPACE b/bazel/WORKSPACE new file mode 100644 index 00000000..6be54d10 --- /dev/null +++ b/bazel/WORKSPACE @@ -0,0 +1,44 @@ +# ============================================================================== +# Copyright 2019 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +workspace(name = "ngraph_bridge") +load("//tf_configure:tf_configure.bzl", "tf_configure") + +tf_configure( + name = "local_config_tf", +) + +new_http_archive( + name = "ngraph", + build_file = "bazel/ngraph.BUILD", + sha256 = "1efd0cae2bc8febe40863727fcadf7eecbf7724073c5ddd2c95c6db00dd70985", + strip_prefix = "ngraph-0.14.0-rc.1", + urls = [ + "https://mirror.bazel.build/github.com/NervanaSystems/ngraph/archive/v0.14.0-rc.1.tar.gz", + "https://github.com/NervanaSystems/ngraph/archive/v0.14.0-rc.1.tar.gz", + ], +) + +new_http_archive( + name = "nlohmann_json_lib", + build_file = "bazel/nlohmann_json.BUILD", + sha256 = "e0b1fc6cc6ca05706cce99118a87aca5248bd9db3113e703023d23f044995c1d", + strip_prefix = "json-3.5.0", + urls = [ + "https://mirror.bazel.build/github.com/nlohmann/json/archive/v3.5.0.tar.gz", + "https://github.com/nlohmann/json/archive/v3.5.0.tar.gz", + ], +) diff --git a/bazel/ngraph.BUILD b/bazel/ngraph.BUILD new file mode 100644 index 00000000..3eba4e4d --- /dev/null +++ b/bazel/ngraph.BUILD @@ -0,0 +1,115 @@ +# ============================================================================== +# Copyright 2019 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +licenses(["notice"]) +exports_files(["LICENSE"]) + +load("@ngraph_bridge//:cxx_abi_option.bzl", "CXX_ABI") + +cc_library( + name = "ngraph_headers", + hdrs = glob(["src/ngraph/**/*.hpp"]), + visibility = ["//visibility:public"], +) + +cc_library( + name = "ngraph_core", + srcs = glob([ + "src/ngraph/*.cpp", + "src/ngraph/autodiff/*.cpp", + "src/ngraph/builder/*.cpp", + "src/ngraph/descriptor/*.cpp", + "src/ngraph/descriptor/layout/*.cpp", + "src/ngraph/op/*.cpp", + "src/ngraph/op/experimental/generate_mask.cpp", + "src/ngraph/op/experimental/quantized_avg_pool.cpp", + "src/ngraph/op/experimental/quantized_conv.cpp", + "src/ngraph/op/experimental/quantized_conv_bias.cpp", + "src/ngraph/op/experimental/quantized_conv_relu.cpp", + "src/ngraph/op/experimental/quantized_max_pool.cpp", + "src/ngraph/op/experimental/shape_of.cpp", + "src/ngraph/op/util/*.cpp", + "src/ngraph/pattern/*.cpp", + "src/ngraph/pattern/*.hpp", + "src/ngraph/pass/*.cpp", + "src/ngraph/pass/*.hpp", + "src/ngraph/runtime/*.cpp", + "src/ngraph/type/*.cpp", + ], + exclude = [ + "src/ngraph/ngraph.cpp", + ]), + deps = [ + ":ngraph_headers", + "@nlohmann_json_lib", + ], + copts = [ + "-I external/ngraph/src", + "-I external/nlohmann_json_lib/include/", + "-D_FORTIFY_SOURCE=2", + "-Wformat", + "-Wformat-security", + "-Wformat", + "-fstack-protector-strong", + '-D SHARED_LIB_PREFIX=\\"lib\\"', + '-D SHARED_LIB_SUFFIX=\\".so\\"', + '-D NGRAPH_VERSION=\\"0.14.0-rc.1\\"', + "-D NGRAPH_DEX_ONLY", + '-D PROJECT_ROOT_DIR=\\"\\"', + ] + CXX_ABI, + linkopts = [ + "-Wl,-z,noexecstack", + "-Wl,-z,relro", + "-Wl,-z,now", + ], + visibility = ["//visibility:public"], + alwayslink = 1, +) + +cc_binary( + name = 'libinterpreter_backend.so', + srcs = glob([ + "src/ngraph/except.hpp", + "src/ngraph/runtime/interpreter/*.cpp", + "src/ngraph/state/rng_state.cpp", + ]), + deps = [ + ":ngraph_headers", + ":ngraph_core", + ], + copts = [ + "-I external/ngraph/src", + "-I external/ngraph/src/ngraph", + "-I external/nlohmann_json_lib/include/", + "-D_FORTIFY_SOURCE=2", + "-Wformat", + "-Wformat-security", + "-Wformat", + "-fstack-protector-strong", + '-D SHARED_LIB_PREFIX=\\"lib\\"', + '-D SHARED_LIB_SUFFIX=\\".so\\"', + '-D NGRAPH_VERSION=\\"0.14.0-rc.1\\"', + "-D NGRAPH_DEX_ONLY", + '-D PROJECT_ROOT_DIR=\\"\\"', + ] + CXX_ABI, + linkopts = [ + "-Wl,-z,noexecstack", + "-Wl,-z,relro", + "-Wl,-z,now", + ], + linkshared = 1, + visibility = ["//visibility:public"], +) + diff --git a/bazel/nlohmann_json.BUILD b/bazel/nlohmann_json.BUILD new file mode 100644 index 00000000..094e5996 --- /dev/null +++ b/bazel/nlohmann_json.BUILD @@ -0,0 +1,31 @@ +# ============================================================================== +# Copyright 2019 Intel Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +licenses(["notice"]) + +exports_files(["LICENSE.MIT"]) +load("@ngraph_bridge//:cxx_abi_option.bzl", "CXX_ABI") + +cc_library( + name = "nlohmann_json_lib", + hdrs = glob([ + "include/nlohmann/**/*.hpp", + ]), + copts = [ + "-I external/nlohmann_json_lib", + ]+ CXX_ABI, + visibility = ["//visibility:public"], + alwayslink = 1, +) diff --git a/bazel/tf_configure/BUILD b/bazel/tf_configure/BUILD new file mode 100644 index 00000000..e69de29b diff --git a/bazel/tf_configure/BUILD.tpl b/bazel/tf_configure/BUILD.tpl new file mode 100644 index 00000000..bee021f1 --- /dev/null +++ b/bazel/tf_configure/BUILD.tpl @@ -0,0 +1,18 @@ +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "tf_header_lib", + hdrs = [":tf_header_include"], + includes = ["include"], + visibility = ["//visibility:public"], +) + +cc_library( + name = "libtensorflow_framework", + srcs = [":libtensorflow_framework.so"], + #data = ["lib/libtensorflow_framework.so"], + visibility = ["//visibility:public"], +) + +%{TF_HEADER_GENRULE} +%{TF_SHARED_LIBRARY_GENRULE} \ No newline at end of file diff --git a/bazel/tf_configure/tf_configure.bzl b/bazel/tf_configure/tf_configure.bzl new file mode 100644 index 00000000..3595c7b4 --- /dev/null +++ b/bazel/tf_configure/tf_configure.bzl @@ -0,0 +1,206 @@ +"""Setup TensorFlow as external dependency""" + +_TF_HEADER_DIR = "TF_HEADER_DIR" +_TF_SHARED_LIBRARY_DIR = "TF_SHARED_LIBRARY_DIR" + +def _tpl(repository_ctx, tpl, substitutions = {}, out = None): + if not out: + out = tpl + repository_ctx.template( + out, + Label("//tf_configure:%s.tpl" % tpl), + substitutions, + ) + +def _fail(msg): + """Output failure message when auto configuration fails.""" + red = "\033[0;31m" + no_color = "\033[0m" + fail("%sPython Configuration Error:%s %s\n" % (red, no_color, msg)) + +def _is_windows(repository_ctx): + """Returns true if the host operating system is windows.""" + os_name = repository_ctx.os.name.lower() + if os_name.find("windows") != -1: + return True + return False + +def _execute( + repository_ctx, + cmdline, + error_msg = None, + error_details = None, + empty_stdout_fine = False): + """Executes an arbitrary shell command. + + Helper for executes an arbitrary shell command. + + Args: + repository_ctx: the repository_ctx object. + cmdline: list of strings, the command to execute. + error_msg: string, a summary of the error if the command fails. + error_details: string, details about the error or steps to fix it. + empty_stdout_fine: bool, if True, an empty stdout result is fine, otherwise + it's an error. + + Returns: + The result of repository_ctx.execute(cmdline). + """ + result = repository_ctx.execute(cmdline) + if result.stderr or not (empty_stdout_fine or result.stdout): + _fail("\n".join([ + error_msg.strip() if error_msg else "Repository command failed", + result.stderr.strip(), + error_details if error_details else "", + ])) + return result + +def _read_dir(repository_ctx, src_dir): + """Returns a string with all files in a directory. + + Finds all files inside a directory, traversing subfolders and following + symlinks. The returned string contains the full path of all files + separated by line breaks. + + Args: + repository_ctx: the repository_ctx object. + src_dir: directory to find files from. + + Returns: + A string of all files inside the given dir. + """ + if _is_windows(repository_ctx): + src_dir = src_dir.replace("/", "\\") + find_result = _execute( + repository_ctx, + ["cmd.exe", "/c", "dir", src_dir, "/b", "/s", "/a-d"], + empty_stdout_fine = True, + ) + + # src_files will be used in genrule.outs where the paths must + # use forward slashes. + result = find_result.stdout.replace("\\", "/") + else: + find_result = _execute( + repository_ctx, + ["find", src_dir, "-follow", "-type", "f"], + empty_stdout_fine = True, + ) + result = find_result.stdout + return result + +def _genrule(genrule_name, command, outs): + """Returns a string with a genrule. + + Genrule executes the given command and produces the given outputs. + + Args: + genrule_name: A unique name for genrule target. + command: The command to run. + outs: A list of files generated by this rule. + + Returns: + A genrule target. + """ + return ( + "genrule(\n" + + ' name = "' + + genrule_name + '",\n' + + " outs = [\n" + + outs + + "\n ],\n" + + ' cmd = """\n' + + command + + '\n """,\n' + + ")\n" + ) + +def _norm_path(path): + """Returns a path with '/' and remove the trailing slash.""" + path = path.replace("\\", "/") + if path[-1] == "/": + path = path[:-1] + return path + +def _symlink_genrule_for_dir( + repository_ctx, + src_dir, + dest_dir, + genrule_name, + src_files = [], + dest_files = []): + """Returns a genrule to symlink(or copy if on Windows) a set of files. + + If src_dir is passed, files will be read from the given directory; otherwise + we assume files are in src_files and dest_files. + + Args: + repository_ctx: the repository_ctx object. + src_dir: source directory. + dest_dir: directory to create symlink in. + genrule_name: genrule name. + src_files: list of source files instead of src_dir. + dest_files: list of corresonding destination files. + + Returns: + genrule target that creates the symlinks. + """ + if src_dir != None: + src_dir = _norm_path(src_dir) + dest_dir = _norm_path(dest_dir) + files = "\n".join(sorted(_read_dir(repository_ctx, src_dir).splitlines())) + + # Create a list with the src_dir stripped to use for outputs. + dest_files = files.replace(src_dir, "").splitlines() + src_files = files.splitlines() + command = [] + outs = [] + for i in range(len(dest_files)): + if dest_files[i] != "": + # If we have only one file to link we do not want to use the dest_dir, as + # $(@D) will include the full path to the file. + dest = "$(@D)/" + dest_dir + dest_files[i] if len(dest_files) != 1 else "$(@D)/" + dest_files[i] + + # Copy the headers to create a sandboxable setup. + cmd = "cp -f" + command.append(cmd + ' "%s" "%s"' % (src_files[i], dest)) + outs.append(' "' + dest_dir + dest_files[i] + '",') + genrule = _genrule( + genrule_name, + " && ".join(command), + "\n".join(outs), + ) + return genrule + +def _tf_pip_impl(repository_ctx): + tf_header_dir = repository_ctx.os.environ[_TF_HEADER_DIR] + tf_header_rule = _symlink_genrule_for_dir( + repository_ctx, + tf_header_dir, + "include", + "tf_header_include", + ) + + tf_shared_library_dir = repository_ctx.os.environ[_TF_SHARED_LIBRARY_DIR] + tf_shared_library_path = "%s/libtensorflow_framework.so" % tf_shared_library_dir + tf_shared_library_rule = _symlink_genrule_for_dir( + repository_ctx, + None, + "", + "libtensorflow_framework.so", + [tf_shared_library_path], + ["libtensorflow_framework.so"], + ) + + _tpl(repository_ctx, "BUILD", { + "%{TF_HEADER_GENRULE}": tf_header_rule, + "%{TF_SHARED_LIBRARY_GENRULE}": tf_shared_library_rule, + }) + +tf_configure = repository_rule( + implementation = _tf_pip_impl, + environ = [ + _TF_HEADER_DIR, + _TF_SHARED_LIBRARY_DIR, + ], +) diff --git a/configure_bazel.sh b/configure_bazel.sh new file mode 100755 index 00000000..23963faa --- /dev/null +++ b/configure_bazel.sh @@ -0,0 +1,48 @@ +#!/bin/bash +# Copyright 2018 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +function write_to_bazelrc() { + echo "$1" >> .bazelrc +} + +function write_action_env_to_bazelrc() { + write_to_bazelrc "build --action_env $1=\"$2\"" +} + +rm -f .bazelrc +if python -c "import tensorflow" &> /dev/null; then + echo 'using installed tensorflow' +else + pip install tensorflow + pip install tensorflow_estimator +fi + +TF_CFLAGS=( $(python -c 'import tensorflow as tf; print(" ".join(tf.sysconfig.get_compile_flags()))') ) +TF_LFLAGS=( $(python -c 'import tensorflow as tf; print(" ".join(tf.sysconfig.get_link_flags()))') ) +TF_CXX_ABI=( $(python -c 'import tensorflow as tf; print(" ".join(str(tf.__cxx11_abi_flag__)))') ) + + +write_action_env_to_bazelrc "TF_HEADER_DIR" ${TF_CFLAGS:2} +write_action_env_to_bazelrc "TF_SHARED_LIBRARY_DIR" ${TF_LFLAGS:2} + +# Write the CXX ABI to the file +echo "CXX_ABI = ['-D_GLIBCXX_USE_CXX11_ABI=$TF_CXX_ABI']" > cxx_abi_option.bzl + +# Create symbolic links to WORKSPACE and BUILD so that this directory +# is now ready to run bazel based builds +ln -sf bazel/BUILD . +ln -sf bazel/WORKSPACE . +ln -sf bazel/tf_configure . + diff --git a/examples/tf_cpp_examples/Makefile b/examples/tf_cpp_examples/Makefile index b138c789..e0cb946b 100644 --- a/examples/tf_cpp_examples/Makefile +++ b/examples/tf_cpp_examples/Makefile @@ -2,9 +2,21 @@ # Location of the ngraph-tf directory NGRAPH_TF_DIR=../../ -# Set the CXX_ABI_FLAG to 0 if you are using Clang or gcc 4.8 -# Also ensure that TensorFlow is built with the same compiler version. -CXX_ABI_FLAG=1 + +define GET_TF_CXX_ABI +import tensorflow as tf +print (" ".join(str(tf.__cxx11_abi_flag__))) +endef + +ifneq ("$(wildcard /etc/redhat-release)","") +LIB_DIR := lib64 +else +LIB_DIR := lib +endif + +# Set the CXX_ABI_FLAG based on what is used to build TensorFlow that is being used +CXX_ABI_FLAG := $(shell python3 -c '$(GET_TF_CXX_ABI)') +$(info CXX_ABI_FLAG $(CXX_ABI_FLAG)) CXXFLAGS=-std=c++11 -Wall -pedantic \ -I $(NGRAPH_TF_DIR)/build/artifacts/include \ @@ -17,7 +29,7 @@ CXXFLAGS=-std=c++11 -Wall -pedantic \ ODIR=obj INCDIR = include LD_FLAGS=\ - -L $(NGRAPH_TF_DIR)/build/artifacts/lib \ + -L $(NGRAPH_TF_DIR)/build/artifacts/$(LIB_DIR) \ -L $(NGRAPH_TF_DIR)/build/artifacts/tensorflow BIN = hello_tf diff --git a/examples/tf_cpp_examples/README.md b/examples/tf_cpp_examples/README.md index eab78beb..63b0b308 100644 --- a/examples/tf_cpp_examples/README.md +++ b/examples/tf_cpp_examples/README.md @@ -6,7 +6,7 @@ The application is linked with TensorFlow C++ library and nGraph-TensorFlow brid ## prerequisites -The example application requires nGraph-TensorFlow bridge to be built first. Build the ngraph-tf by executing `build_gtf.py` as per the [Option 1] instructions in the main readme. All the files needed to build this example application are located in the ngraph-tf/build directory. +The example application requires nGraph-TensorFlow bridge to be built first. Build the ngraph-tf by executing `build_ngtf.py` as per the [Option 2] instructions in the main readme. All the files needed to build this example application are located in the ngraph-tf/build directory. ### Dependencies @@ -22,15 +22,8 @@ The application links with the following dynamic shared object (DSO) libraries 2. libtensorflow_framework.so 3. libtensorflow_cc.so - ## Build the example -### Update the Makefile - -If you are using clang or gcc 4.8 to build ngraph-tf then change the `CXX_ABI_FLAG` value to 0. Also, change the `NGRAPH_TF_DIR` pointing to the location of the `ngraph-tf` on your system as appropriate. - -### Build - Run the `make` command to build the application that will produce the executable: `hello_tf`. ### Run @@ -41,6 +34,9 @@ Before running the application, set the `LD_LIBRARY_PATH` (or `DYLD_LIBRARY_PATH Where `NGRAPH_TF_DIR` should point to the directory where ngraph-tf was cloned. +:warning: Note: If this example is built on CentOS then the library directory +is `lib64` - so please set the `LD_LIBRARY_PATH` accordingly + Next run the executable `./hello_tf` -[Option 1]: ../../README.md#option-1-use-a-pre-built-ngraph-tensorflow-bridge +[Option 2]: ../../README.md#option-2-build-ngraph-bridge-from-source diff --git a/examples/tf_cpp_examples/hello_tf.cpp b/examples/tf_cpp_examples/hello_tf.cpp index 99b37255..9a57dc05 100644 --- a/examples/tf_cpp_examples/hello_tf.cpp +++ b/examples/tf_cpp_examples/hello_tf.cpp @@ -95,20 +95,18 @@ void RunSimpleNetworkExample() { int main(int argc, char** argv) { PrintAvailableBackends(); - if (SetNGraphBackend("CPU") != tensorflow::Status::OK()) { - std::cout << "Error: Cannot set the backend" << std::endl; - return -1; - } + const char* backend = "INTERPRETER"; - // Run the MatMul example - RunSimpleNetworkExample(); + if (argc > 1) { + backend = argv[1]; + } - // Now set the backend to INTERPRETER - if (SetNGraphBackend("INTERPRETER") != tensorflow::Status::OK()) { - std::cout << "Error: Cannot set the backend" << std::endl; + if (SetNGraphBackend(backend) != tensorflow::Status::OK()) { + std::cout << "Error: Cannot set the backend: " << backend << std::endl; return -1; } + // Run the MatMul example RunSimpleNetworkExample(); return 0; diff --git a/test_ngtf.py b/test_ngtf.py index 957f55e9..2acef917 100755 --- a/test_ngtf.py +++ b/test_ngtf.py @@ -244,6 +244,7 @@ def main(): run_ngtf_gtests(build_dir) # Next run Python unit tests + load_venv(venv_dir) run_ngtf_pytests(venv_dir, build_dir) if (arguments.test_examples): @@ -258,6 +259,5 @@ def main(): os.chdir(root_pwd) - if __name__ == '__main__': main()