Error: the server doesn't have a resource type "job"

Hello! i try use reportportal in Microk8s.
I try install reportportal and delete reportportal.
But i can not delete job.batch

kubectl get all -A
NAMESPACE        NAME                                          READY   STATUS    RESTARTS   AGE
default          pod/elasticsearch-master-0                    1/1     Running   0          8m2s
default          pod/minio-b56fc68b5-cf22w                     1/1     Running   0          7m49s
default          pod/postgresql-postgresql-0                   1/1     Running   0          8m38s
default          pod/rabbitmq-0                                1/1     Running   0          8m17s
ingress          pod/nginx-ingress-microk8s-controller-sd8vg   1/1     Running   1          4d2h
kube-system      pod/coredns-588fd544bf-m66g9                  1/1     Running   1          4d2h
kube-system      pod/hostpath-provisioner-75fdc8fccd-jmwwr     1/1     Running   1          4d2h
metallb-system   pod/controller-5f98465b6b-nvsj2               1/1     Running   1          4d2h
metallb-system   pod/speaker-twx7q                             1/1     Running   1          4d2h

NAMESPACE     NAME                                    TYPE        CLUSTER-IP       EXTERNAL-IP   PORT(S)                                 AGE
default       service/elasticsearch-master            ClusterIP   10.152.183.85    <none>        9200/TCP,9300/TCP                       8m3s
default       service/elasticsearch-master-headless   ClusterIP   None             <none>        9200/TCP,9300/TCP                       8m3s
default       service/kubernetes                      ClusterIP   10.152.183.1     <none>        443/TCP                                 4d4h
default       service/minio                           ClusterIP   10.152.183.158   <none>        9000/TCP                                7m51s
default       service/postgresql                      ClusterIP   10.152.183.119   <none>        5432/TCP                                8m39s
default       service/postgresql-headless             ClusterIP   None             <none>        5432/TCP                                8m39s
default       service/rabbitmq                        ClusterIP   10.152.183.240   <none>        5672/TCP,4369/TCP,25672/TCP,15672/TCP   8m18s
default       service/rabbitmq-headless               ClusterIP   None             <none>        4369/TCP,5672/TCP,25672/TCP,15672/TCP   8m18s
kube-system   service/kube-dns                        ClusterIP   10.152.183.10    <none>        53/UDP,53/TCP,9153/TCP                  4d2h

NAMESPACE        NAME                                               DESIRED   CURRENT   READY   UP-TO-DATE   AVAILABLE   NODE SELECTOR                 AGE
ingress          daemonset.apps/nginx-ingress-microk8s-controller   1         1         1       1            1           <none>                        4d2h
metallb-system   daemonset.apps/speaker                             1         1         1       1            1           beta.kubernetes.io/os=linux   4d2h

NAMESPACE        NAME                                   READY   UP-TO-DATE   AVAILABLE   AGE
default          deployment.apps/minio                  1/1     1            1           7m50s
kube-system      deployment.apps/coredns                1/1     1            1           4d2h
kube-system      deployment.apps/hostpath-provisioner   1/1     1            1           4d2h
metallb-system   deployment.apps/controller             1/1     1            1           4d2h

NAMESPACE        NAME                                              DESIRED   CURRENT   READY   AGE
default          replicaset.apps/minio-b56fc68b5                   1         1         1       7m50s
kube-system      replicaset.apps/coredns-588fd544bf                1         1         1       4d2h
kube-system      replicaset.apps/hostpath-provisioner-75fdc8fccd   1         1         1       4d2h
metallb-system   replicaset.apps/controller-5f98465b6b             1         1         1       4d2h

NAMESPACE   NAME                                     READY   AGE
default     statefulset.apps/elasticsearch-master    1/1     8m3s
default     statefulset.apps/postgresql-postgresql   1/1     8m39s
default     statefulset.apps/rabbitmq                1/1     8m18s

NAMESPACE   NAME                                COMPLETIONS   DURATION   AGE
default     job.batch/reportportal-migrations   0/1           32m        32m

kubectl delete job.batch\reportportal-migrations --v=6

I0705 15:07:28.586420 12897 loader.go:372] Config loaded from file: /home/apatsev/.kube/config
I0705 15:07:28.597155 12897 discovery.go:214] Invalidating discovery information

F0705 15:07:29.200204 12897 helpers.go:115] error: the server doesn’t have a resource type “job”
goroutine 1 [running]:
k8s.io/kubernetes/vendor/k8s.io/klog/v2.stacks(0xc00000e001, 0xc000ba6000, 0x63, 0xe0)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/k8s.io/klog/v2/klog.go:1021 +0xb9
k8s.io/kubernetes/vendor/k8s.io/klog/v2.(*loggingT).output(0x3054420, 0xc000000003, 0x0, 0x0, 0xc000bd0000, 0x25f1c90, 0xa, 0x73, 0x40e300)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/k8s.io/klog/v2/klog.go:970 +0x191
k8s.io/kubernetes/vendor/k8s.io/klog/v2.(*loggingT).printDepth(0x3054420, 0xc000000003, 0x0, 0x0, 0x0, 0x0, 0x2, 0xc000b74420, 0x1, 0x1)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/k8s.io/klog/v2/klog.go:733 +0x16f
k8s.io/kubernetes/vendor/k8s.io/klog/v2.FatalDepth(...)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/k8s.io/klog/v2/klog.go:1495
k8s.io/kubernetes/vendor/k8s.io/kubectl/pkg/cmd/util.fatal(0xc00053ba80, 0x34, 0x1)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/k8s.io/kubectl/pkg/cmd/util/helpers.go:93 +0x288
k8s.io/kubernetes/vendor/k8s.io/kubectl/pkg/cmd/util.checkErr(0x2079120, 0xc000b743a0, 0x1f06e70)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/k8s.io/kubectl/pkg/cmd/util/helpers.go:188 +0x935
k8s.io/kubernetes/vendor/k8s.io/kubectl/pkg/cmd/util.CheckErr(...)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/k8s.io/kubectl/pkg/cmd/util/helpers.go:115
k8s.io/kubernetes/vendor/k8s.io/kubectl/pkg/cmd/delete.NewCmdDelete.func1(0xc000353080, 0xc00053d5e0, 0x1, 0x2)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/k8s.io/kubectl/pkg/cmd/delete/delete.go:141 +0x149
k8s.io/kubernetes/vendor/github.com/spf13/cobra.(*Command).execute(0xc000353080, 0xc00053d580, 0x2, 0x2, 0xc000353080, 0xc00053d580)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/github.com/spf13/cobra/command.go:854 +0x2c2
k8s.io/kubernetes/vendor/github.com/spf13/cobra.(*Command).ExecuteC(0xc0000c4dc0, 0xc00007e180, 0xc00003a0c0, 0x4)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/github.com/spf13/cobra/command.go:958 +0x375
k8s.io/kubernetes/vendor/github.com/spf13/cobra.(*Command).Execute(...)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/github.com/spf13/cobra/command.go:895
main.main()
_output/dockerized/go/src/k8s.io/kubernetes/cmd/kubectl/kubectl.go:49 +0x21d

goroutine 6 [chan receive]:
k8s.io/kubernetes/vendor/k8s.io/klog/v2.(*loggingT).flushDaemon(0x3054420)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/k8s.io/klog/v2/klog.go:1164 +0x8b
created by k8s.io/kubernetes/vendor/k8s.io/klog/v2.init.0
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/k8s.io/klog/v2/klog.go:418 +0xdf

goroutine 9 [select]:
k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0x1f06d90, 0x207a580, 0xc000778000, 0x1, 0xc00009ab40)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:167 +0x118
k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/util/wait.JitterUntil(0x1f06d90, 0x12a05f200, 0x0, 0x1, 0xc00009ab40)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:133 +0x98
k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/util/wait.Until(0x1f06d90, 0x12a05f200, 0xc00009ab40)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/k8s.io/apimachinery/pkg/util/wait/wait.go:90 +0x4d
created by k8s.io/kubernetes/vendor/k8s.io/kubectl/pkg/util/logs.InitLogs
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/k8s.io/kubectl/pkg/util/logs/logs.go:51 +0x96

goroutine 195 [IO wait]:
internal/poll.runtime_pollWait(0x7ffafa3ce920, 0x72, 0xffffffffffffffff)
/usr/local/go/src/runtime/netpoll.go:222 +0x55
internal/poll.(*pollDesc).wait(0xc000c3af18, 0x72, 0x1c00, 0x1c37, 0xffffffffffffffff)
/usr/local/go/src/internal/poll/fd_poll_runtime.go:87 +0x45
internal/poll.(*pollDesc).waitRead(…)
/usr/local/go/src/internal/poll/fd_poll_runtime.go:92
internal/poll.(*FD).Read(0xc000c3af00, 0xc0002b0000, 0x1c37, 0x1c37, 0x0, 0x0, 0x0)
/usr/local/go/src/internal/poll/fd_unix.go:166 +0x1d5
net.(*netFD).Read(0xc000c3af00, 0xc0002b0000, 0x1c37, 0x1c37, 0x13e6, 0xc0002b084c, 0x5)
/usr/local/go/src/net/fd_posix.go:55 +0x4f
net.(*conn).Read(0xc0005aa058, 0xc0002b0000, 0x1c37, 0x1c37, 0x0, 0x0, 0x0)
/usr/local/go/src/net/net.go:183 +0x91
crypto/tls.(*atLeastReader).Read(0xc000116d68, 0xc0002b0000, 0x1c37, 0x1c37, 0x13e6, 0xc000500400, 0x0)
/usr/local/go/src/crypto/tls/conn.go:776 +0x63
bytes.(*Buffer).ReadFrom(0xc0005ce278, 0x2078ee0, 0xc000116d68, 0x40b985, 0x1bcfbc0, 0x1d8a680)
/usr/local/go/src/bytes/buffer.go:204 +0xbe
crypto/tls.(*Conn).readFromUntil(0xc0005ce000, 0x207bc80, 0xc0005aa058, 0x5, 0xc0005aa058, 0x400)
/usr/local/go/src/crypto/tls/conn.go:798 +0xf3
crypto/tls.(*Conn).readRecordOrCCS(0xc0005ce000, 0x0, 0x0, 0x0)
/usr/local/go/src/crypto/tls/conn.go:605 +0x115
crypto/tls.(*Conn).readRecord(…)
/usr/local/go/src/crypto/tls/conn.go:573
crypto/tls.(*Conn).Read(0xc0005ce000, 0xc0005e5000, 0x1000, 0x1000, 0x0, 0x0, 0x0)
/usr/local/go/src/crypto/tls/conn.go:1276 +0x165
bufio.(*Reader).Read(0xc000399980, 0xc0005e21f8, 0x9, 0x9, 0x959c4b, 0xc00068fc78, 0x407005)
/usr/local/go/src/bufio/bufio.go:227 +0x222
io.ReadAtLeast(0x2078d00, 0xc000399980, 0xc0005e21f8, 0x9, 0x9, 0x9, 0xc0007ece10, 0xb9a9cad800, 0xc0007ece10)
/usr/local/go/src/io/io.go:328 +0x87
io.ReadFull(…)
/usr/local/go/src/io/io.go:347
k8s.io/kubernetes/vendor/golang.org/x/net/http2.readFrameHeader(0xc0005e21f8, 0x9, 0x9, 0x2078d00, 0xc000399980, 0x0, 0x0, 0x0, 0x0)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/frame.go:237 +0x89
k8s.io/kubernetes/vendor/golang.org/x/net/http2.(*Framer).ReadFrame(0xc0005e21c0, 0xc00011cc30, 0x0, 0x0, 0x0)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/frame.go:492 +0xa5
k8s.io/kubernetes/vendor/golang.org/x/net/http2.(*clientConnReadLoop).run(0xc00068ffa8, 0x0, 0x0)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/transport.go:1819 +0xd8
k8s.io/kubernetes/vendor/golang.org/x/net/http2.(*ClientConn).readLoop(0xc0005cc180)
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/transport.go:1741 +0x6f
created by k8s.io/kubernetes/vendor/golang.org/x/net/http2.(*Transport).newClientConn
/workspace/src/k8s.io/kubernetes/_output/dockerized/go/src/k8s.io/kubernetes/vendor/golang.org/x/net/http2/transport.go:705 +0x6c5

Think it’s suposed to be like this.

kubectl delete jobs reportportal-migrations
kubectl delete job reportportal-migrations
kubectl delete jobs/reportportal-migrations
kubectl delete job/reportportal-migrations