Skip to content
Merged
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/scripts/check-ut.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,6 @@ def print_failures(failure_list=None):

def generate_failures_log():
if not failures:
print("No failures found, skipping log file creation.")
return

for case in failures:
Expand Down Expand Up @@ -245,6 +244,8 @@ def determine_category(ut):
return 'op_extended'
elif ut == 'op_transformers':
return 'op_transformers'
elif ut == 'test_xpu':
return 'test_xpu'
elif 'op_ut' in ut:
return 'op_ut'
else:
Expand Down Expand Up @@ -296,7 +297,6 @@ def process_xml_file(xml_file):

def generate_passed_log():
if not passed_cases:
print("No passed cases found, skipping log file creation.")
return

for category, category_passed in passed_by_category.items():
Expand Down
40 changes: 10 additions & 30 deletions .github/scripts/ut_result_check.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ check_new_failed() {

# Filter the same content from file_UT as file_known_issue
echo "Filtering $file_known_issue for $file_UT"
if grep -q $'\r' "$file_UT"; then
echo "Detected log from windows"
sed -i 's/\r$//' "$file_UT"
fi
grep -vFxf "$file_known_issue" "$file_UT" > "$output_file"

echo -e "\n\033[1;31m[New failed cases Summary]\033[0m"
Expand Down Expand Up @@ -54,6 +58,10 @@ check_passed_known_issues() {
return 1
fi
echo "Checking for known issues that are now passing in $file_passed_UT"
if grep -q $'\r' "$file_passed_UT"; then
echo "Detected log from windows"
sed -i 's/\r$//' "$file_passed_UT"
fi
grep -Fxf "$file_passed_UT" "$file_known_issue" > "$output_file"
echo -e "\n\033[1;32m[New passed cases Summary]\033[0m"
if [[ -s "$output_file" ]]; then
Expand All @@ -74,6 +82,7 @@ check_test_cases() {
["op_regression_dev1"]=1
["op_transformers"]=237
["op_ut"]=120408
["test_xpu"]=69
)

if [[ ! -f "$log_file" ]]; then
Expand Down Expand Up @@ -115,7 +124,7 @@ check_test_cases() {
}


if [[ "${ut_suite}" == 'op_regression' || "${ut_suite}" == 'op_regression_dev1' || "${ut_suite}" == 'op_extended' || "${ut_suite}" == 'op_transformers' || "${ut_suite}" == 'op_ut' ]]; then
if [[ "${ut_suite}" == 'op_regression' || "${ut_suite}" == 'op_regression_dev1' || "${ut_suite}" == 'op_extended' || "${ut_suite}" == 'op_transformers' || "${ut_suite}" == 'op_ut' || "${ut_suite}" == 'test_xpu' ]]; then
echo -e "========================================================================="
echo -e "Show Failed cases in ${ut_suite}"
echo -e "========================================================================="
Expand Down Expand Up @@ -188,35 +197,6 @@ if [[ "${ut_suite}" == 'op_regression' || "${ut_suite}" == 'op_regression_dev1'
fi
fi

if [[ "${ut_suite}" == 'torch_xpu' ]]; then
echo "Pytorch XPU binary UT checking"
cd ../../pytorch || exit
for xpu_case in build/bin/*{xpu,sycl}*; do
if [[ "$xpu_case" != *"*"* && "$xpu_case" != *.so && "$xpu_case" != *.a ]]; then
case_name=$(basename "$xpu_case")
cd ../ut_log/torch_xpu || exit
grep -E "FAILED" binary_ut_"${ut_suite}"_"${case_name}"_test.log | awk '{print $2}' > ./binary_ut_"${ut_suite}"_"${case_name}"_failed.log
wc -l < "./binary_ut_${ut_suite}_${case_name}_failed.log" | tee -a ./binary_ut_"${ut_suite}"_failed_summary.log
grep -E "PASSED|Pass" binary_ut_"${ut_suite}"_"${case_name}"_test.log | awk '{print $2}' > ./binary_ut_"${ut_suite}"_"${case_name}"_passed.log
wc -l < "./binary_ut_${ut_suite}_${case_name}_passed.log" | tee -a ./binary_ut_"${ut_suite}"_passed_summary.log
cd - || exit
fi
done
echo -e "========================================================================="
echo -e "Show Failed cases in ${ut_suite}"
echo -e "========================================================================="
cd ../ut_log/torch_xpu || exit
cat "./binary_ut_${ut_suite}_${case_name}_failed.log"
num_failed_binary_ut=$(awk '{sum += $1};END {print sum}' binary_ut_"${ut_suite}"_failed_summary.log)
num_passed_binary_ut=$(awk '{sum += $1};END {print sum}' binary_ut_"${ut_suite}"_passed_summary.log)
((num_failed=num_failed_binary_ut))
if [[ $num_failed -gt 0 ]] || [[ $num_passed_binary_ut -le 0 ]]; then
echo -e "[ERROR] UT ${ut_suite} test Fail"
exit 1
else
echo -e "[PASS] UT ${ut_suite} test Pass"
fi
fi
if [[ "${ut_suite}" == 'xpu_distributed' ]]; then
grep -E "^FAILED" xpu_distributed_test.log | awk '{print $2}' > ./"${ut_suite}"_xpu_distributed_test_failed.log
grep "PASSED" xpu_distributed_test.log | awk '{print $1}' > ./"${ut_suite}"_xpu_distributed_test_passed.log
Expand Down
108 changes: 102 additions & 6 deletions .github/workflows/_windows_ut.yml
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ jobs:
call conda activate windows_ci
cd ../pytorch
pip install -r requirements.txt
pip install cmake setuptools==72.1.0 clang-format
pip install cmake setuptools clang-format
pip install mkl-static mkl-include
set USE_STATIC_MKL=1
copy "%CONDA_PREFIX%\Library\bin\libiomp*5md.dll" .\torch\lib
Expand All @@ -123,7 +123,7 @@ jobs:
set CMAKE_PREFIX_PATH="%CONDA_PREFIX%\Library"
)
python setup.py clean
set MAX_JOBS=4
set MAX_JOBS=32
python setup.py bdist_wheel > build_torch_wheel_log.log
echo "[INFO] begin to install torch whls"
for /r C:\actions-runner\_work\torch-xpu-ops\pytorch\dist %%i in (torch*.whl) do (
Expand All @@ -141,24 +141,22 @@ jobs:
python -c "import torch; print(torch.__config__.show())"
python -c "import torch; print(torch.__config__.parallel_info())"
python -c "import torch; print(torch.__config__.torch.xpu.device_count())"

- name: Upload Windows build log
if: ${{ ! cancelled() }}
uses: actions/upload-artifact@v4
with:
name: Torch-XPU-Windows-Log-${{ github.event.pull_request.number || github.sha }}
path: 'C:\actions-runner\_work\torch-xpu-ops\pytorch\build_torch_wheel_log.log'

- name: Upload Windows binary
if: ${{ ! cancelled() }}
uses: actions/upload-artifact@v4
with:
name: Torch-XPU-Windows-Binary-${{ github.event.pull_request.number || github.sha }}
path: 'C:\actions-runner\_work\torch-xpu-ops\pytorch\dist'

- name: Run XPU OP Extended UT
if: contains(inputs.ut, 'op_extended') || github.event_name == 'schedule'
shell: cmd
continue-on-error: true
run: |
call "C:\ProgramData\miniforge3\Scripts\activate.bat"
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat"
Expand All @@ -168,13 +166,111 @@ jobs:
cd ../pytorch/third_party/torch-xpu-ops/test/xpu/extended/
python run_test_with_skip_mtl.py

if not exist "%GITHUB_WORKSPACE%\ut_log" mkdir "%GITHUB_WORKSPACE%\ut_log"
copy op_extended.xml %GITHUB_WORKSPACE%\ut_log /Y
- name: Run Test XPU UT
if: contains(inputs.ut, 'torch_xpu') || github.event_name == 'schedule'
if: contains(inputs.ut, 'test_xpu') || github.event_name == 'schedule'
shell: cmd
continue-on-error: true
run: |
call "C:\ProgramData\miniforge3\Scripts\activate.bat"
call "C:\Program Files (x86)\Microsoft Visual Studio\2022\BuildTools\VC\Auxiliary\Build\vcvars64.bat"
call conda activate windows_ci
call "C:\Program Files (x86)\Intel\oneAPI\setvars.bat"
cd ../pytorch/third_party/torch-xpu-ops/test/xpu/
python run_test_win_with_skip_mtl.py

if not exist "%GITHUB_WORKSPACE%\ut_log" mkdir "%GITHUB_WORKSPACE%\ut_log"
copy test_xpu.xml %GITHUB_WORKSPACE%\ut_log /Y
- name: UT Test Results Summary
shell: cmd
run: |
call conda activate windows_ci
pip install junitparser
echo "GITHUB_WORKSPACE: %GITHUB_WORKSPACE%"
for %%i in ("%GITHUB_WORKSPACE%\ut_log\*.xml") do (
python .\.github\scripts\check-ut.py "%%i" >> "%GITHUB_STEP_SUMMARY%"
)
@echo off

REM Check the failure logs
if exist "%GITHUB_WORKSPACE%\failures*.log" (
echo Exist Failure logs
echo Found Failure logs as below:
for %%f in ("%GITHUB_WORKSPACE%\failures*.log") do (
echo - %%f
copy "%%f" "%GITHUB_WORKSPACE%\ut_log\"
)
echo Failure logs Copied
) else (
echo No Failure logs
)

REM Copied the passed logs
if exist "passed*.log" (
copy "passed*.log" "%GITHUB_WORKSPACE%\ut_log\"
echo Passed logs Copied
) else (
echo No Passed logs
)

REM Copied the Summary logs
if exist "category*.log" (
copy "category*.log" "%GITHUB_WORKSPACE%\ut_log\"
echo Category logs Copied
) else (
echo No Category logs
)
- name: Upload Inductor XPU UT Log
if: ${{ ! cancelled() }}
uses: actions/upload-artifact@v4
with:
name: Inductor-XPU-UT-Data-${{ github.event.pull_request.number || github.sha }}-Windows
path: "${{ github.workspace }}/ut_log"
if-no-files-found: ignore

summary:
needs: [ut_test]
runs-on: ubuntu-24.04
timeout-minutes: 30
env:
GH_TOKEN: ${{ github.token }}
steps:
- name: Checkout torch-xpu-ops
uses: actions/checkout@v4
- name: Download XPU UT Logs
uses: actions/download-artifact@v4
with:
name: Inductor-XPU-UT-Data-${{ github.event.pull_request.number || github.sha }}-Windows
path: ${{ github.workspace }}/ut_log
- name: Check UT Results
shell: bash
run: |
ls -al ${{ github.workspace }}/ut_log
cd ${{ github.workspace }}/ut_log

# get skipped known issues
count=$(gh api "repos/${{ github.repository }}/issues?labels=skipped" --jq 'length')
if [ "$count" -gt 0 ]; then
echo -e "$count issues with skipped label found"
gh api "repos/${{ github.repository }}/issues?labels=skipped" \
--jq '.[] | select(.pull_request == null) | "Issue #\(.number): \(.title)\n\(.body)\n"' > issues.log
fi

cp ${{ github.workspace }}/.github/scripts/ut_result_check.sh ./
for ut_name in $(echo ${{ inputs.ut }} |sed 's/,/ /g')
do
awk -v r="${ut_name}" 'BEGIN{ print_row = 0 }{
if ( ! ( $0 ~ /[a-zA-Z0-9]/ ) ) { print_row = 0 };
if ( print_row == 1 && $1 ~ r ) { print $0 };
if ( $0 ~ /Cases:/ ) { print_row = 1 };
}' issues.log > Known_issue.log
bash ut_result_check.sh ${ut_name}
done
- name: Upload Inductor XPU UT Log
if: ${{ ! cancelled() }}
uses: actions/upload-artifact@v4
with:
name: Inductor-XPU-UT-Data-${{ github.event.pull_request.number || github.sha }}-Windows
path: ${{ github.workspace }}/ut_log
overwrite: true
2 changes: 1 addition & 1 deletion .github/workflows/nightly_ondemand.yml
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ jobs:
needs: [Conditions-Filter]
uses: ./.github/workflows/_windows_ut.yml
with:
ut: 'op_extended,torch_xpu'
ut: 'op_extended,test_xpu'
python: ${{ needs.Conditions-Filter.outputs.python }}
src_changed: false
has_label: true
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/pull.yml
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ jobs:
needs: [conditions-filter, preci-lint-check]
uses: ./.github/workflows/_windows_ut.yml
with:
ut: op_extended,torch_xpu
ut: op_extended,test_xpu
runner: Windows_CI
src_changed: ${{ needs.conditions-filter.outputs.src_changed }}
has_label: ${{ needs.conditions-filter.outputs.has_label }}
1 change: 1 addition & 0 deletions test/xpu/extended/run_test_with_skip_mtl.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,6 @@

os.environ["PYTORCH_TEST_WITH_SLOW"] = "1"
test_command = ["-k", skip_options, "test_ops_xpu.py", "-v"]
test_command.extend(["--junit-xml", "./op_extended.xml"])
res = pytest.main(test_command)
sys.exit(res)
1 change: 1 addition & 0 deletions test/xpu/run_test_win_with_skip_mtl.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
sys.stdout = StringIO()

test_command = ["-k", skip_options, "../../../../test/test_xpu.py", "-v"]
test_command.extend(["--junit-xml", "./test_xpu.xml"])
res = pytest.main(test_command)

output = sys.stdout.getvalue()
Expand Down