Skip to content

Commit 5187d94

Browse files
Silv3Smengfei25
authored andcommitted
Remove unused variables (intel#2044)
- remove unused variables, - add compiler flag to prevent this in the future
1 parent 6390215 commit 5187d94

File tree

4 files changed

+4
-4
lines changed

4 files changed

+4
-4
lines changed

.github/workflows/_linux_build.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,7 @@ jobs:
9494
- name: Build Pytorch on ${{ needs.runner.outputs.hostname }}
9595
run: |
9696
export USE_XCCL=1
97+
export IS_XPU_CI=1
9798
# only build pvc for CI
9899
if [ "${{ github.event_name }}" == "pull_request" ];then
99100
export TORCH_XPU_ARCH_LIST='pvc'

CMakeLists.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,9 @@ set(PROJECT_VERSION "2.3.0")
2525
# Avoid SYCL compiler error
2626
if(NOT WIN32)
2727
string(APPEND CMAKE_CXX_FLAGS " -Wno-error")
28+
if("$ENV{IS_XPU_CI}" STREQUAL "1")
29+
string(APPEND CMAKE_CXX_FLAGS " -Werror=unused-variable")
30+
endif()
2831
endif()
2932

3033
cmake_policy(SET CMP0048 NEW)

src/ATen/native/xpu/sycl/LayerNormKernels.cpp

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1103,9 +1103,6 @@ void _layer_norm_backward_kernel(
11031103
// affecting performance and behavior.
11041104
const scalar_t* dY_data = dY.const_data_ptr<scalar_t>();
11051105
const scalar_t* X_data = X.const_data_ptr<scalar_t>();
1106-
weight_t* dg_data =
1107-
dgamma.defined() ? dgamma.data_ptr<weight_t>() : nullptr;
1108-
weight_t* db_data = dbeta.defined() ? dbeta.data_ptr<weight_t>() : nullptr;
11091106
Tensor dgamma_blocks;
11101107
Tensor dbeta_blocks;
11111108
weight_t* dgamma_blocks_ptr = nullptr;

src/ATen/native/xpu/sycl/RoiAlignKernels.cpp

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -440,7 +440,6 @@ Tensor roi_align_kernel(
440440

441441
at::Tensor output = at::zeros(
442442
{num_rois, channels, pooled_height, pooled_width}, input.options());
443-
auto output_size = num_rois * pooled_height * pooled_width * channels;
444443

445444
if (output.numel() == 0) {
446445
return output;

0 commit comments

Comments
 (0)