[BE] Remove unusued channels arg in col2im (#142336)

Number of channels is passed to col2im kernel/device function, but is not used during the computations at all
Pull Request resolved: https://github.com/pytorch/pytorch/pull/142336
Approved by: https://github.com/Skylion007, https://github.com/eqy
This commit is contained in:
Nikita Shulga 2024-12-08 08:13:07 -08:00 committed by PyTorch MergeBot
parent 75e72e1408
commit 7435f57f60

View file

@ -107,7 +107,6 @@ __forceinline__ __device__ void col2im_device(
const dt* data_col,
const int64_t height,
const int64_t width,
const int64_t channels,
const int64_t kernel_h,
const int64_t kernel_w,
const int64_t pad_height,
@ -162,7 +161,6 @@ __global__ void col2im_kernel(
const dt* data_col,
const int64_t height,
const int64_t width,
const int64_t channels,
const int64_t kernel_h,
const int64_t kernel_w,
const int64_t pad_height,
@ -180,7 +178,6 @@ __global__ void col2im_kernel(
data_col,
height,
width,
channels,
kernel_h,
kernel_w,
pad_height,
@ -223,7 +220,6 @@ void col2im(
data_col,
height,
width,
channels,
patch_height,
patch_width,
pad_height,
@ -247,7 +243,6 @@ __global__ void col2im_batched_kernel(
const int64_t nbatch,
const int64_t height,
const int64_t width,
const int64_t channels,
const int64_t kernel_h,
const int64_t kernel_w,
const int64_t pad_height,
@ -272,7 +267,6 @@ __global__ void col2im_batched_kernel(
data_col + ibatch * col_batch_stride,
height,
width,
channels,
kernel_h,
kernel_w,
pad_height,
@ -324,7 +318,6 @@ void col2im_batched(
nbatch,
height,
width,
channels,
patch_height,
patch_width,
pad_height,