Skip to content

Commit

Permalink
try artifacts and remove torch-mlir
Browse files Browse the repository at this point in the history
  • Loading branch information
saienduri committed Mar 20, 2024
1 parent 224cbaf commit 5ede0cc
Show file tree
Hide file tree
Showing 2 changed files with 53 additions and 109 deletions.
123 changes: 24 additions & 99 deletions .github/workflows/test_e2eshark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,109 +18,32 @@ concurrency:
cancel-in-progress: true

jobs:
torch-mlir:
strategy:
matrix:
version: [3.11]
os: [nodai-amdgpu-w7900-x86-64]
runs-on: ${{matrix.os}}
# steps:
# - name: Checkout repo
# uses: actions/checkout@v2
# with:
# submodules: true
# repository: llvm/torch-mlir
# path: torch-mlir

# - name: "Setting up Python"
# uses: actions/setup-python@v5
# with:
# python-version: ${{matrix.version}}

# - name: Build torch-mlir
# run: |
# git submodule update --init --progres
# python -m venv mlir_venv
# source mlir_venv/bin/activate
# pip install --upgrade pip
# pip install -r ./externals/llvm-project/mlir/python/requirements.txt
# pip install -r requirements.txt
# pip install -r torchvision-requirements.txt
# pip install "pybind11[global]"
# cmake -S "$repo_root/externals/llvm-project/llvm" -B "$build_dir" \
# -GNinja \
# -DCMAKE_BUILD_TYPE=Release \
# -DPython3_FIND_VIRTUALENV=ONLY \
# -DLLVM_ENABLE_PROJECTS=mlir \
# -DLLVM_EXTERNAL_PROJECTS="torch-mlir" \
# -DLLVM_EXTERNAL_TORCH_MLIR_SOURCE_DIR="$PWD" \
# -DMLIR_ENABLE_BINDINGS_PYTHON=ON \
# -DLLVM_TARGETS_TO_BUILD=host
# cmake --build build
# CMAKE_GENERATOR=Ninja python setup.py bdist_wheel --dist-dir ./torch-mlir-wheel -v
# pip uninstall torch-mlir
# pip install torch-mlir-wheel/torch_mlir-0.0.1-cp311-cp311-linux_x86_64.whl
# deactivate
steps:
- name: "Checking out repository"
uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # v3.5.0
with:
submodules: true
repository: saienduri/torch-mlir-fork
path: torch-mlir

- name: Install python deps (nightly)
run: |
bash build_tools/ci/install_python_deps.sh nightly
python3 -m pip uninstall -y PyYAML
python3 -m pip install PyYAML==6.0.1
python3 -m pip list
working-directory: ./torch-mlir

- name: Build
run: |
bash build_tools/ci/build_posix.sh
working-directory: ./torch-mlir

# iree:
# torch-mlir:
# strategy:
# matrix:
# version: [3.11]
# os: [nodai-amdgpu-w7900-x86-64]
# runs-on: ${{matrix.os}}
# env:
# IREE_VENV_DIR: ${{ github.workspace }}/iree_venv
# steps:
# - name: Checkout repo
# uses: actions/checkout@v2
# - name: "Checking out repository"
# uses: actions/checkout@8f4b7f84864484a7bf31766abe9204da3cbe65b3 # v3.5.0
# with:
# repository: openxla/iree
# path: iree

# - name: "Setting up Python"
# uses: actions/setup-python@v5
# with:
# python-version: ${{matrix.version}}
# submodules: true
# repository: saienduri/torch-mlir-fork
# path: torch-mlir

# - name: "Setup Python venv"
# run: python -m venv ${IREE_VENV_DIR}
# - name: Install python deps (nightly)
# run: |
# bash build_tools/ci/install_python_deps.sh nightly
# python3 -m pip uninstall -y PyYAML
# python3 -m pip install PyYAML==6.0.1
# python3 -m pip list
# working-directory: ./torch-mlir

# - name: Build iree
# - name: Build
# run: |
# source ${IREE_VENV_DIR}/bin/activate
# git submodule foreach git config --get remote.origin.fetch
# pip install --upgrade pip
# pip install -r runtime/bindings/python/iree/runtime/build_requirements.txt
# cmake -G Ninja -B ../iree-build/ -S . \
# -DCMAKE_BUILD_TYPE=RelWithDebInfo \
# -DIREE_ENABLE_ASSERTIONS=ON \
# -DIREE_ENABLE_SPLIT_DWARF=ON \
# -DIREE_ENABLE_THIN_ARCHIVES=ON \
# -DCMAKE_C_COMPILER=clang \
# -DCMAKE_CXX_COMPILER=clang++ \
# -DIREE_ENABLE_LLD=ON
# cmake --build ../iree-build/
# working-directory: ./iree
# bash build_tools/ci/build_posix.sh
# working-directory: ./torch-mlir

turbine:
strategy:
Expand All @@ -135,7 +58,7 @@ jobs:
path: turbine

e2eshark:
needs: [turbine, torch-mlir]
needs: [turbine]
strategy:
matrix:
version: [3.11]
Expand Down Expand Up @@ -163,9 +86,9 @@ jobs:
source ${E2E_VENV_DIR}/bin/activate
git submodule update --init
pip install --upgrade pip
pip install -r ../torch-mlir/requirements.txt
pip install -r ../torch-mlir/torchvision-requirements.txt
pip install ../torch-mlir/torch-mlir-wheel/torch_mlir-0.0.1-cp311-cp311-linux_x86_64.whl
pip install -r /home/esaimana/torch-mlir/requirements.txt
pip install -r /home/esaimana/torch-mlir/torchvision-requirements.txt
pip install /home/esaimana/torch-mlir/torch-mlir-wheel/torch_mlir-0.0.1-cp311-cp311-linux_x86_64.whl
pip install -r ./e2eshark/requirements.txt
working-directory: ./test-suite

Expand All @@ -174,7 +97,7 @@ jobs:
source ${E2E_VENV_DIR}/bin/activate
cd e2eshark
free -mh
python ./run.py -c ../../torch-mlir/build --report --cachedir ~/.cache/huggingface --mode onnx --tests pytorch/models/bert-large-uncased --cleanup --postprocess -v
python ./run.py -r ./test-onnx -c /home/esaimana/torch-mlir/build --report --cachedir ~/.cache/huggingface --mode onnx --tests pytorch/models/bert-large-uncased --cleanup --postprocess --ci -v
working-directory: ./test-suite

- name: Run Turbine Mode
Expand All @@ -185,5 +108,7 @@ jobs:
pip install -e ../turbine/models
cd e2eshark
free -mh
python ./run.py -c ../../torch-mlir/build --report --cachedir ~/.cache/huggingface --mode turbine --tests pytorch/models/bert-large-uncased --cleanup --postprocess -v
python ./run.py -r ./test-turbine -c /home/esaimana/torch-mlir/build --report --cachedir ~/.cache/huggingface --mode turbine --tests pytorch/models/bert-large-uncased --cleanup --postprocess --ci -v
working-directory: ./test-suite

-name: Upload Artifacts
39 changes: 29 additions & 10 deletions e2eshark/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -1028,16 +1028,17 @@ def runFrameworkTests(
if args.verbose:
print("Following tests will be run:", uniqueTestList)

for i in range(0, len(tupleOfListArg)):
initializer(TORCH_MLIR_BUILD, IREE_BUILD)
runTest(tupleOfListArg[i])

# with Pool(poolSize, initializer, (TORCH_MLIR_BUILD, IREE_BUILD)) as p:
# print("BEGIN")
# result = p.map_async(runTest, tupleOfListArg)
# result.wait()
# if args.verbose:
# print("All tasks submitted to process pool completed")
if args.ci:
for i in range(0, len(tupleOfListArg)):
initializer(TORCH_MLIR_BUILD, IREE_BUILD)
runTest(tupleOfListArg[i])
else:
with Pool(poolSize, initializer, (TORCH_MLIR_BUILD, IREE_BUILD)) as p:
result = p.map_async(runTest, tupleOfListArg)
result.wait()
if args.verbose:
print("All tasks submitted to process pool completed")

with open('upload_urls.json', 'w') as convert_file:
# convert_file.write(json.dumps(uploadDict._getvalue()))
convert_file.write(
Expand Down Expand Up @@ -1396,6 +1397,12 @@ def main():
action="store_true",
default=False,
)
parser.add_argument(
"--ci",
help="Adjusted behavior, so that CI works and artifacts are in right place",
action="store_true",
default=False,
)

args = parser.parse_args()
cache_dir = args.cachedir
Expand Down Expand Up @@ -1536,6 +1543,18 @@ def main():
# report generation
if args.report:
generateReport(run_dir, totalTestList, args)

if args.ci:
today = datetime.date.today()
path = script_dir + "/" + today
if not os.path.exists(path):
os.mkdir(path)
mode_path = path + f"/{args.mode}_reports"
if not os.path.exists(mode_path):
os.mkdir(mode_path)
shutil.move(run_dir + "/statusreport.md", mode_path + "/statusreport.md")
shutil.move(run_dir + "/summaryreport.md", mode_path + "/summaryreport.md")
shutil.move(run_dir + "/timereport.md", mode_path + "/timereport.md")

# When all processes are done, print
print("\nCompleted run of e2e shark tests")
Expand Down

0 comments on commit 5ede0cc

Please sign in to comment.