0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018
0019
0020
0021
0022
0023 #ifndef KFD_IOCTL_H_INCLUDED
0024 #define KFD_IOCTL_H_INCLUDED
0025
0026 #include <drm/drm.h>
0027 #include <linux/ioctl.h>
0028
0029
0030
0031
0032
0033
0034
0035
0036
0037
0038
0039
0040
0041 #define KFD_IOCTL_MAJOR_VERSION 1
0042 #define KFD_IOCTL_MINOR_VERSION 11
0043
0044 struct kfd_ioctl_get_version_args {
0045 __u32 major_version;
0046 __u32 minor_version;
0047 };
0048
0049
0050 #define KFD_IOC_QUEUE_TYPE_COMPUTE 0x0
0051 #define KFD_IOC_QUEUE_TYPE_SDMA 0x1
0052 #define KFD_IOC_QUEUE_TYPE_COMPUTE_AQL 0x2
0053 #define KFD_IOC_QUEUE_TYPE_SDMA_XGMI 0x3
0054
0055 #define KFD_MAX_QUEUE_PERCENTAGE 100
0056 #define KFD_MAX_QUEUE_PRIORITY 15
0057
0058 struct kfd_ioctl_create_queue_args {
0059 __u64 ring_base_address;
0060 __u64 write_pointer_address;
0061 __u64 read_pointer_address;
0062 __u64 doorbell_offset;
0063
0064 __u32 ring_size;
0065 __u32 gpu_id;
0066 __u32 queue_type;
0067 __u32 queue_percentage;
0068 __u32 queue_priority;
0069 __u32 queue_id;
0070
0071 __u64 eop_buffer_address;
0072 __u64 eop_buffer_size;
0073 __u64 ctx_save_restore_address;
0074 __u32 ctx_save_restore_size;
0075 __u32 ctl_stack_size;
0076 };
0077
0078 struct kfd_ioctl_destroy_queue_args {
0079 __u32 queue_id;
0080 __u32 pad;
0081 };
0082
0083 struct kfd_ioctl_update_queue_args {
0084 __u64 ring_base_address;
0085
0086 __u32 queue_id;
0087 __u32 ring_size;
0088 __u32 queue_percentage;
0089 __u32 queue_priority;
0090 };
0091
0092 struct kfd_ioctl_set_cu_mask_args {
0093 __u32 queue_id;
0094 __u32 num_cu_mask;
0095 __u64 cu_mask_ptr;
0096 };
0097
0098 struct kfd_ioctl_get_queue_wave_state_args {
0099 __u64 ctl_stack_address;
0100 __u32 ctl_stack_used_size;
0101 __u32 save_area_used_size;
0102 __u32 queue_id;
0103 __u32 pad;
0104 };
0105
0106 struct kfd_ioctl_get_available_memory_args {
0107 __u64 available;
0108 __u32 gpu_id;
0109 __u32 pad;
0110 };
0111
0112
0113 #define KFD_IOC_CACHE_POLICY_COHERENT 0
0114 #define KFD_IOC_CACHE_POLICY_NONCOHERENT 1
0115
0116 struct kfd_ioctl_set_memory_policy_args {
0117 __u64 alternate_aperture_base;
0118 __u64 alternate_aperture_size;
0119
0120 __u32 gpu_id;
0121 __u32 default_policy;
0122 __u32 alternate_policy;
0123 __u32 pad;
0124 };
0125
0126
0127
0128
0129
0130
0131
0132
0133 struct kfd_ioctl_get_clock_counters_args {
0134 __u64 gpu_clock_counter;
0135 __u64 cpu_clock_counter;
0136 __u64 system_clock_counter;
0137 __u64 system_clock_freq;
0138
0139 __u32 gpu_id;
0140 __u32 pad;
0141 };
0142
0143 struct kfd_process_device_apertures {
0144 __u64 lds_base;
0145 __u64 lds_limit;
0146 __u64 scratch_base;
0147 __u64 scratch_limit;
0148 __u64 gpuvm_base;
0149 __u64 gpuvm_limit;
0150 __u32 gpu_id;
0151 __u32 pad;
0152 };
0153
0154
0155
0156
0157
0158
0159 #define NUM_OF_SUPPORTED_GPUS 7
0160 struct kfd_ioctl_get_process_apertures_args {
0161 struct kfd_process_device_apertures
0162 process_apertures[NUM_OF_SUPPORTED_GPUS];
0163
0164
0165 __u32 num_of_nodes;
0166 __u32 pad;
0167 };
0168
0169 struct kfd_ioctl_get_process_apertures_new_args {
0170
0171
0172
0173 __u64 kfd_process_device_apertures_ptr;
0174
0175
0176
0177
0178 __u32 num_of_nodes;
0179 __u32 pad;
0180 };
0181
0182 #define MAX_ALLOWED_NUM_POINTS 100
0183 #define MAX_ALLOWED_AW_BUFF_SIZE 4096
0184 #define MAX_ALLOWED_WAC_BUFF_SIZE 128
0185
0186 struct kfd_ioctl_dbg_register_args {
0187 __u32 gpu_id;
0188 __u32 pad;
0189 };
0190
0191 struct kfd_ioctl_dbg_unregister_args {
0192 __u32 gpu_id;
0193 __u32 pad;
0194 };
0195
0196 struct kfd_ioctl_dbg_address_watch_args {
0197 __u64 content_ptr;
0198 __u32 gpu_id;
0199 __u32 buf_size_in_bytes;
0200 };
0201
0202 struct kfd_ioctl_dbg_wave_control_args {
0203 __u64 content_ptr;
0204 __u32 gpu_id;
0205 __u32 buf_size_in_bytes;
0206 };
0207
0208 #define KFD_INVALID_FD 0xffffffff
0209
0210
0211 #define KFD_IOC_EVENT_SIGNAL 0
0212 #define KFD_IOC_EVENT_NODECHANGE 1
0213 #define KFD_IOC_EVENT_DEVICESTATECHANGE 2
0214 #define KFD_IOC_EVENT_HW_EXCEPTION 3
0215 #define KFD_IOC_EVENT_SYSTEM_EVENT 4
0216 #define KFD_IOC_EVENT_DEBUG_EVENT 5
0217 #define KFD_IOC_EVENT_PROFILE_EVENT 6
0218 #define KFD_IOC_EVENT_QUEUE_EVENT 7
0219 #define KFD_IOC_EVENT_MEMORY 8
0220
0221 #define KFD_IOC_WAIT_RESULT_COMPLETE 0
0222 #define KFD_IOC_WAIT_RESULT_TIMEOUT 1
0223 #define KFD_IOC_WAIT_RESULT_FAIL 2
0224
0225 #define KFD_SIGNAL_EVENT_LIMIT 4096
0226
0227
0228 #define KFD_HW_EXCEPTION_WHOLE_GPU_RESET 0
0229 #define KFD_HW_EXCEPTION_PER_ENGINE_RESET 1
0230
0231
0232 #define KFD_HW_EXCEPTION_GPU_HANG 0
0233 #define KFD_HW_EXCEPTION_ECC 1
0234
0235
0236 #define KFD_MEM_ERR_NO_RAS 0
0237 #define KFD_MEM_ERR_SRAM_ECC 1
0238 #define KFD_MEM_ERR_POISON_CONSUMED 2
0239 #define KFD_MEM_ERR_GPU_HANG 3
0240
0241 struct kfd_ioctl_create_event_args {
0242 __u64 event_page_offset;
0243 __u32 event_trigger_data;
0244 __u32 event_type;
0245 __u32 auto_reset;
0246 __u32 node_id;
0247
0248 __u32 event_id;
0249 __u32 event_slot_index;
0250 };
0251
0252 struct kfd_ioctl_destroy_event_args {
0253 __u32 event_id;
0254 __u32 pad;
0255 };
0256
0257 struct kfd_ioctl_set_event_args {
0258 __u32 event_id;
0259 __u32 pad;
0260 };
0261
0262 struct kfd_ioctl_reset_event_args {
0263 __u32 event_id;
0264 __u32 pad;
0265 };
0266
0267 struct kfd_memory_exception_failure {
0268 __u32 NotPresent;
0269 __u32 ReadOnly;
0270 __u32 NoExecute;
0271 __u32 imprecise;
0272 };
0273
0274
0275 struct kfd_hsa_memory_exception_data {
0276 struct kfd_memory_exception_failure failure;
0277 __u64 va;
0278 __u32 gpu_id;
0279 __u32 ErrorType;
0280
0281
0282
0283
0284
0285 };
0286
0287
0288 struct kfd_hsa_hw_exception_data {
0289 __u32 reset_type;
0290 __u32 reset_cause;
0291 __u32 memory_lost;
0292 __u32 gpu_id;
0293 };
0294
0295
0296 struct kfd_event_data {
0297 union {
0298 struct kfd_hsa_memory_exception_data memory_exception_data;
0299 struct kfd_hsa_hw_exception_data hw_exception_data;
0300 };
0301 __u64 kfd_event_data_ext;
0302
0303 __u32 event_id;
0304 __u32 pad;
0305 };
0306
0307 struct kfd_ioctl_wait_events_args {
0308 __u64 events_ptr;
0309
0310 __u32 num_events;
0311 __u32 wait_for_all;
0312 __u32 timeout;
0313 __u32 wait_result;
0314 };
0315
0316 struct kfd_ioctl_set_scratch_backing_va_args {
0317 __u64 va_addr;
0318 __u32 gpu_id;
0319 __u32 pad;
0320 };
0321
0322 struct kfd_ioctl_get_tile_config_args {
0323
0324 __u64 tile_config_ptr;
0325
0326 __u64 macro_tile_config_ptr;
0327
0328
0329
0330 __u32 num_tile_configs;
0331
0332
0333
0334 __u32 num_macro_tile_configs;
0335
0336 __u32 gpu_id;
0337 __u32 gb_addr_config;
0338 __u32 num_banks;
0339 __u32 num_ranks;
0340
0341
0342
0343 };
0344
0345 struct kfd_ioctl_set_trap_handler_args {
0346 __u64 tba_addr;
0347 __u64 tma_addr;
0348 __u32 gpu_id;
0349 __u32 pad;
0350 };
0351
0352 struct kfd_ioctl_acquire_vm_args {
0353 __u32 drm_fd;
0354 __u32 gpu_id;
0355 };
0356
0357
0358 #define KFD_IOC_ALLOC_MEM_FLAGS_VRAM (1 << 0)
0359 #define KFD_IOC_ALLOC_MEM_FLAGS_GTT (1 << 1)
0360 #define KFD_IOC_ALLOC_MEM_FLAGS_USERPTR (1 << 2)
0361 #define KFD_IOC_ALLOC_MEM_FLAGS_DOORBELL (1 << 3)
0362 #define KFD_IOC_ALLOC_MEM_FLAGS_MMIO_REMAP (1 << 4)
0363
0364 #define KFD_IOC_ALLOC_MEM_FLAGS_WRITABLE (1 << 31)
0365 #define KFD_IOC_ALLOC_MEM_FLAGS_EXECUTABLE (1 << 30)
0366 #define KFD_IOC_ALLOC_MEM_FLAGS_PUBLIC (1 << 29)
0367 #define KFD_IOC_ALLOC_MEM_FLAGS_NO_SUBSTITUTE (1 << 28)
0368 #define KFD_IOC_ALLOC_MEM_FLAGS_AQL_QUEUE_MEM (1 << 27)
0369 #define KFD_IOC_ALLOC_MEM_FLAGS_COHERENT (1 << 26)
0370 #define KFD_IOC_ALLOC_MEM_FLAGS_UNCACHED (1 << 25)
0371
0372
0373
0374
0375
0376
0377
0378
0379
0380
0381
0382
0383
0384 struct kfd_ioctl_alloc_memory_of_gpu_args {
0385 __u64 va_addr;
0386 __u64 size;
0387 __u64 handle;
0388 __u64 mmap_offset;
0389 __u32 gpu_id;
0390 __u32 flags;
0391 };
0392
0393
0394
0395
0396
0397 struct kfd_ioctl_free_memory_of_gpu_args {
0398 __u64 handle;
0399 };
0400
0401
0402
0403
0404
0405
0406
0407
0408
0409
0410
0411
0412
0413
0414
0415
0416 struct kfd_ioctl_map_memory_to_gpu_args {
0417 __u64 handle;
0418 __u64 device_ids_array_ptr;
0419 __u32 n_devices;
0420 __u32 n_success;
0421 };
0422
0423
0424
0425
0426
0427 struct kfd_ioctl_unmap_memory_from_gpu_args {
0428 __u64 handle;
0429 __u64 device_ids_array_ptr;
0430 __u32 n_devices;
0431 __u32 n_success;
0432 };
0433
0434
0435
0436
0437
0438
0439
0440
0441 struct kfd_ioctl_alloc_queue_gws_args {
0442 __u32 queue_id;
0443 __u32 num_gws;
0444 __u32 first_gws;
0445 __u32 pad;
0446 };
0447
0448 struct kfd_ioctl_get_dmabuf_info_args {
0449 __u64 size;
0450 __u64 metadata_ptr;
0451 __u32 metadata_size;
0452
0453
0454 __u32 gpu_id;
0455 __u32 flags;
0456 __u32 dmabuf_fd;
0457 };
0458
0459 struct kfd_ioctl_import_dmabuf_args {
0460 __u64 va_addr;
0461 __u64 handle;
0462 __u32 gpu_id;
0463 __u32 dmabuf_fd;
0464 };
0465
0466
0467
0468
0469 enum kfd_smi_event {
0470 KFD_SMI_EVENT_NONE = 0,
0471 KFD_SMI_EVENT_VMFAULT = 1,
0472 KFD_SMI_EVENT_THERMAL_THROTTLE = 2,
0473 KFD_SMI_EVENT_GPU_PRE_RESET = 3,
0474 KFD_SMI_EVENT_GPU_POST_RESET = 4,
0475 KFD_SMI_EVENT_MIGRATE_START = 5,
0476 KFD_SMI_EVENT_MIGRATE_END = 6,
0477 KFD_SMI_EVENT_PAGE_FAULT_START = 7,
0478 KFD_SMI_EVENT_PAGE_FAULT_END = 8,
0479 KFD_SMI_EVENT_QUEUE_EVICTION = 9,
0480 KFD_SMI_EVENT_QUEUE_RESTORE = 10,
0481 KFD_SMI_EVENT_UNMAP_FROM_GPU = 11,
0482
0483
0484
0485
0486
0487
0488
0489 KFD_SMI_EVENT_ALL_PROCESS = 64
0490 };
0491
0492 enum KFD_MIGRATE_TRIGGERS {
0493 KFD_MIGRATE_TRIGGER_PREFETCH,
0494 KFD_MIGRATE_TRIGGER_PAGEFAULT_GPU,
0495 KFD_MIGRATE_TRIGGER_PAGEFAULT_CPU,
0496 KFD_MIGRATE_TRIGGER_TTM_EVICTION
0497 };
0498
0499 enum KFD_QUEUE_EVICTION_TRIGGERS {
0500 KFD_QUEUE_EVICTION_TRIGGER_SVM,
0501 KFD_QUEUE_EVICTION_TRIGGER_USERPTR,
0502 KFD_QUEUE_EVICTION_TRIGGER_TTM,
0503 KFD_QUEUE_EVICTION_TRIGGER_SUSPEND,
0504 KFD_QUEUE_EVICTION_CRIU_CHECKPOINT,
0505 KFD_QUEUE_EVICTION_CRIU_RESTORE
0506 };
0507
0508 enum KFD_SVM_UNMAP_TRIGGERS {
0509 KFD_SVM_UNMAP_TRIGGER_MMU_NOTIFY,
0510 KFD_SVM_UNMAP_TRIGGER_MMU_NOTIFY_MIGRATE,
0511 KFD_SVM_UNMAP_TRIGGER_UNMAP_FROM_CPU
0512 };
0513
0514 #define KFD_SMI_EVENT_MASK_FROM_INDEX(i) (1ULL << ((i) - 1))
0515 #define KFD_SMI_EVENT_MSG_SIZE 96
0516
0517 struct kfd_ioctl_smi_events_args {
0518 __u32 gpuid;
0519 __u32 anon_fd;
0520 };
0521
0522
0523
0524
0525
0526
0527
0528
0529
0530
0531
0532
0533
0534
0535
0536
0537
0538
0539
0540 enum kfd_criu_op {
0541 KFD_CRIU_OP_PROCESS_INFO,
0542 KFD_CRIU_OP_CHECKPOINT,
0543 KFD_CRIU_OP_UNPAUSE,
0544 KFD_CRIU_OP_RESTORE,
0545 KFD_CRIU_OP_RESUME,
0546 };
0547
0548
0549
0550
0551
0552
0553
0554
0555
0556
0557
0558
0559
0560
0561
0562
0563
0564
0565 struct kfd_ioctl_criu_args {
0566 __u64 devices;
0567 __u64 bos;
0568 __u64 priv_data;
0569 __u64 priv_data_size;
0570 __u32 num_devices;
0571 __u32 num_bos;
0572 __u32 num_objects;
0573 __u32 pid;
0574 __u32 op;
0575 };
0576
0577 struct kfd_criu_device_bucket {
0578 __u32 user_gpu_id;
0579 __u32 actual_gpu_id;
0580 __u32 drm_fd;
0581 __u32 pad;
0582 };
0583
0584 struct kfd_criu_bo_bucket {
0585 __u64 addr;
0586 __u64 size;
0587 __u64 offset;
0588 __u64 restored_offset;
0589 __u32 gpu_id;
0590 __u32 alloc_flags;
0591 __u32 dmabuf_fd;
0592 __u32 pad;
0593 };
0594
0595
0596
0597
0598
0599
0600 enum kfd_mmio_remap {
0601 KFD_MMIO_REMAP_HDP_MEM_FLUSH_CNTL = 0,
0602 KFD_MMIO_REMAP_HDP_REG_FLUSH_CNTL = 4,
0603 };
0604
0605
0606 #define KFD_IOCTL_SVM_FLAG_HOST_ACCESS 0x00000001
0607
0608 #define KFD_IOCTL_SVM_FLAG_COHERENT 0x00000002
0609
0610 #define KFD_IOCTL_SVM_FLAG_HIVE_LOCAL 0x00000004
0611
0612 #define KFD_IOCTL_SVM_FLAG_GPU_RO 0x00000008
0613
0614 #define KFD_IOCTL_SVM_FLAG_GPU_EXEC 0x00000010
0615
0616 #define KFD_IOCTL_SVM_FLAG_GPU_READ_MOSTLY 0x00000020
0617
0618 #define KFD_IOCTL_SVM_FLAG_GPU_ALWAYS_MAPPED 0x00000040
0619
0620
0621
0622
0623
0624
0625
0626 enum kfd_ioctl_svm_op {
0627 KFD_IOCTL_SVM_OP_SET_ATTR,
0628 KFD_IOCTL_SVM_OP_GET_ATTR
0629 };
0630
0631
0632
0633
0634
0635
0636
0637 enum kfd_ioctl_svm_location {
0638 KFD_IOCTL_SVM_LOCATION_SYSMEM = 0,
0639 KFD_IOCTL_SVM_LOCATION_UNDEFINED = 0xffffffff
0640 };
0641
0642
0643
0644
0645
0646
0647
0648
0649
0650
0651
0652
0653
0654
0655
0656
0657
0658
0659
0660 enum kfd_ioctl_svm_attr_type {
0661 KFD_IOCTL_SVM_ATTR_PREFERRED_LOC,
0662 KFD_IOCTL_SVM_ATTR_PREFETCH_LOC,
0663 KFD_IOCTL_SVM_ATTR_ACCESS,
0664 KFD_IOCTL_SVM_ATTR_ACCESS_IN_PLACE,
0665 KFD_IOCTL_SVM_ATTR_NO_ACCESS,
0666 KFD_IOCTL_SVM_ATTR_SET_FLAGS,
0667 KFD_IOCTL_SVM_ATTR_CLR_FLAGS,
0668 KFD_IOCTL_SVM_ATTR_GRANULARITY
0669 };
0670
0671
0672
0673
0674
0675
0676
0677
0678
0679 struct kfd_ioctl_svm_attribute {
0680 __u32 type;
0681 __u32 value;
0682 };
0683
0684
0685
0686
0687
0688
0689
0690
0691
0692
0693
0694
0695
0696
0697
0698
0699
0700
0701
0702
0703
0704
0705
0706
0707
0708
0709
0710
0711
0712
0713
0714
0715
0716
0717
0718
0719
0720
0721
0722 struct kfd_ioctl_svm_args {
0723 __u64 start_addr;
0724 __u64 size;
0725 __u32 op;
0726 __u32 nattr;
0727
0728 struct kfd_ioctl_svm_attribute attrs[];
0729 };
0730
0731
0732
0733
0734
0735
0736
0737
0738
0739
0740
0741
0742
0743
0744
0745
0746
0747
0748
0749
0750
0751
0752
0753
0754
0755
0756
0757
0758
0759
0760
0761
0762
0763
0764
0765 struct kfd_ioctl_set_xnack_mode_args {
0766 __s32 xnack_enabled;
0767 };
0768
0769 #define AMDKFD_IOCTL_BASE 'K'
0770 #define AMDKFD_IO(nr) _IO(AMDKFD_IOCTL_BASE, nr)
0771 #define AMDKFD_IOR(nr, type) _IOR(AMDKFD_IOCTL_BASE, nr, type)
0772 #define AMDKFD_IOW(nr, type) _IOW(AMDKFD_IOCTL_BASE, nr, type)
0773 #define AMDKFD_IOWR(nr, type) _IOWR(AMDKFD_IOCTL_BASE, nr, type)
0774
0775 #define AMDKFD_IOC_GET_VERSION \
0776 AMDKFD_IOR(0x01, struct kfd_ioctl_get_version_args)
0777
0778 #define AMDKFD_IOC_CREATE_QUEUE \
0779 AMDKFD_IOWR(0x02, struct kfd_ioctl_create_queue_args)
0780
0781 #define AMDKFD_IOC_DESTROY_QUEUE \
0782 AMDKFD_IOWR(0x03, struct kfd_ioctl_destroy_queue_args)
0783
0784 #define AMDKFD_IOC_SET_MEMORY_POLICY \
0785 AMDKFD_IOW(0x04, struct kfd_ioctl_set_memory_policy_args)
0786
0787 #define AMDKFD_IOC_GET_CLOCK_COUNTERS \
0788 AMDKFD_IOWR(0x05, struct kfd_ioctl_get_clock_counters_args)
0789
0790 #define AMDKFD_IOC_GET_PROCESS_APERTURES \
0791 AMDKFD_IOR(0x06, struct kfd_ioctl_get_process_apertures_args)
0792
0793 #define AMDKFD_IOC_UPDATE_QUEUE \
0794 AMDKFD_IOW(0x07, struct kfd_ioctl_update_queue_args)
0795
0796 #define AMDKFD_IOC_CREATE_EVENT \
0797 AMDKFD_IOWR(0x08, struct kfd_ioctl_create_event_args)
0798
0799 #define AMDKFD_IOC_DESTROY_EVENT \
0800 AMDKFD_IOW(0x09, struct kfd_ioctl_destroy_event_args)
0801
0802 #define AMDKFD_IOC_SET_EVENT \
0803 AMDKFD_IOW(0x0A, struct kfd_ioctl_set_event_args)
0804
0805 #define AMDKFD_IOC_RESET_EVENT \
0806 AMDKFD_IOW(0x0B, struct kfd_ioctl_reset_event_args)
0807
0808 #define AMDKFD_IOC_WAIT_EVENTS \
0809 AMDKFD_IOWR(0x0C, struct kfd_ioctl_wait_events_args)
0810
0811 #define AMDKFD_IOC_DBG_REGISTER_DEPRECATED \
0812 AMDKFD_IOW(0x0D, struct kfd_ioctl_dbg_register_args)
0813
0814 #define AMDKFD_IOC_DBG_UNREGISTER_DEPRECATED \
0815 AMDKFD_IOW(0x0E, struct kfd_ioctl_dbg_unregister_args)
0816
0817 #define AMDKFD_IOC_DBG_ADDRESS_WATCH_DEPRECATED \
0818 AMDKFD_IOW(0x0F, struct kfd_ioctl_dbg_address_watch_args)
0819
0820 #define AMDKFD_IOC_DBG_WAVE_CONTROL_DEPRECATED \
0821 AMDKFD_IOW(0x10, struct kfd_ioctl_dbg_wave_control_args)
0822
0823 #define AMDKFD_IOC_SET_SCRATCH_BACKING_VA \
0824 AMDKFD_IOWR(0x11, struct kfd_ioctl_set_scratch_backing_va_args)
0825
0826 #define AMDKFD_IOC_GET_TILE_CONFIG \
0827 AMDKFD_IOWR(0x12, struct kfd_ioctl_get_tile_config_args)
0828
0829 #define AMDKFD_IOC_SET_TRAP_HANDLER \
0830 AMDKFD_IOW(0x13, struct kfd_ioctl_set_trap_handler_args)
0831
0832 #define AMDKFD_IOC_GET_PROCESS_APERTURES_NEW \
0833 AMDKFD_IOWR(0x14, \
0834 struct kfd_ioctl_get_process_apertures_new_args)
0835
0836 #define AMDKFD_IOC_ACQUIRE_VM \
0837 AMDKFD_IOW(0x15, struct kfd_ioctl_acquire_vm_args)
0838
0839 #define AMDKFD_IOC_ALLOC_MEMORY_OF_GPU \
0840 AMDKFD_IOWR(0x16, struct kfd_ioctl_alloc_memory_of_gpu_args)
0841
0842 #define AMDKFD_IOC_FREE_MEMORY_OF_GPU \
0843 AMDKFD_IOW(0x17, struct kfd_ioctl_free_memory_of_gpu_args)
0844
0845 #define AMDKFD_IOC_MAP_MEMORY_TO_GPU \
0846 AMDKFD_IOWR(0x18, struct kfd_ioctl_map_memory_to_gpu_args)
0847
0848 #define AMDKFD_IOC_UNMAP_MEMORY_FROM_GPU \
0849 AMDKFD_IOWR(0x19, struct kfd_ioctl_unmap_memory_from_gpu_args)
0850
0851 #define AMDKFD_IOC_SET_CU_MASK \
0852 AMDKFD_IOW(0x1A, struct kfd_ioctl_set_cu_mask_args)
0853
0854 #define AMDKFD_IOC_GET_QUEUE_WAVE_STATE \
0855 AMDKFD_IOWR(0x1B, struct kfd_ioctl_get_queue_wave_state_args)
0856
0857 #define AMDKFD_IOC_GET_DMABUF_INFO \
0858 AMDKFD_IOWR(0x1C, struct kfd_ioctl_get_dmabuf_info_args)
0859
0860 #define AMDKFD_IOC_IMPORT_DMABUF \
0861 AMDKFD_IOWR(0x1D, struct kfd_ioctl_import_dmabuf_args)
0862
0863 #define AMDKFD_IOC_ALLOC_QUEUE_GWS \
0864 AMDKFD_IOWR(0x1E, struct kfd_ioctl_alloc_queue_gws_args)
0865
0866 #define AMDKFD_IOC_SMI_EVENTS \
0867 AMDKFD_IOWR(0x1F, struct kfd_ioctl_smi_events_args)
0868
0869 #define AMDKFD_IOC_SVM AMDKFD_IOWR(0x20, struct kfd_ioctl_svm_args)
0870
0871 #define AMDKFD_IOC_SET_XNACK_MODE \
0872 AMDKFD_IOWR(0x21, struct kfd_ioctl_set_xnack_mode_args)
0873
0874 #define AMDKFD_IOC_CRIU_OP \
0875 AMDKFD_IOWR(0x22, struct kfd_ioctl_criu_args)
0876
0877 #define AMDKFD_IOC_AVAILABLE_MEMORY \
0878 AMDKFD_IOWR(0x23, struct kfd_ioctl_get_available_memory_args)
0879
0880 #define AMDKFD_COMMAND_START 0x01
0881 #define AMDKFD_COMMAND_END 0x24
0882
0883 #endif