Lines Matching refs:resources

297 static void save_dbc_buf(struct qaic_device *qdev, struct ioctl_resources *resources,  in save_dbc_buf()  argument
300 u32 dbc_id = resources->dbc_id; in save_dbc_buf()
302 if (resources->buf) { in save_dbc_buf()
304 qdev->dbc[dbc_id].req_q_base = resources->buf; in save_dbc_buf()
305 qdev->dbc[dbc_id].rsp_q_base = resources->rsp_q_base; in save_dbc_buf()
306 qdev->dbc[dbc_id].dma_addr = resources->dma_addr; in save_dbc_buf()
307 qdev->dbc[dbc_id].total_size = resources->total_size; in save_dbc_buf()
308 qdev->dbc[dbc_id].nelem = resources->nelem; in save_dbc_buf()
311 resources->buf = NULL; in save_dbc_buf()
315 static void free_dbc_buf(struct qaic_device *qdev, struct ioctl_resources *resources) in free_dbc_buf() argument
317 if (resources->buf) in free_dbc_buf()
318 dma_free_coherent(&qdev->pdev->dev, resources->total_size, resources->buf, in free_dbc_buf()
319 resources->dma_addr); in free_dbc_buf()
320 resources->buf = NULL; in free_dbc_buf()
323 static void free_dma_xfers(struct qaic_device *qdev, struct ioctl_resources *resources) in free_dma_xfers() argument
329 list_for_each_entry_safe(xfer, x, &resources->dma_xfers, list) { in free_dma_xfers()
393 struct ioctl_resources *resources, struct dma_xfer *xfer) in find_and_map_user_pages() argument
403 if (check_add_overflow(in_trans->addr, resources->xferred_dma_size, &xfer_start_addr)) in find_and_map_user_pages()
406 if (in_trans->size < resources->xferred_dma_size) in find_and_map_user_pages()
408 remaining = in_trans->size - resources->xferred_dma_size; in find_and_map_user_pages()
481 struct ioctl_resources *resources, u32 msg_hdr_len, u32 *size, in encode_addr_size_pairs() argument
543 resources->xferred_dma_size += dma_chunk_len; in encode_addr_size_pairs()
561 u32 *user_len, struct ioctl_resources *resources, struct qaic_user *usr) in encode_dma() argument
586 ret = find_and_map_user_pages(qdev, in_trans, resources, xfer); in encode_dma()
592 ret = encode_addr_size_pairs(xfer, wrappers, resources, msg_hdr_len, &size, &out_trans); in encode_dma()
609 if (resources->dma_chunk_id) { in encode_dma()
610 out_trans->dma_chunk_id = cpu_to_le32(resources->dma_chunk_id); in encode_dma()
612 while (resources->dma_chunk_id == 0) in encode_dma()
613 resources->dma_chunk_id = atomic_inc_return(&usr->chunk_id); in encode_dma()
615 out_trans->dma_chunk_id = cpu_to_le32(resources->dma_chunk_id); in encode_dma()
617 resources->trans_hdr = trans; in encode_dma()
619 list_add(&xfer->list, &resources->dma_xfers); in encode_dma()
630 u32 *user_len, struct ioctl_resources *resources) in encode_activate() argument
693 resources->buf = buf; in encode_activate()
694 resources->dma_addr = dma_addr; in encode_activate()
695 resources->total_size = size; in encode_activate()
696 resources->nelem = nelem; in encode_activate()
697 resources->rsp_q_base = buf + size - nelem * get_dbc_rsp_elem_size(); in encode_activate()
752 struct wrapper_list *wrappers, struct ioctl_resources *resources, in encode_message() argument
773 if (resources->dma_chunk_id) { in encode_message()
774 ret = encode_dma(qdev, resources->trans_hdr, wrappers, &user_len, resources, usr); in encode_message()
796 ret = encode_dma(qdev, trans_hdr, wrappers, &user_len, resources, usr); in encode_message()
799 ret = encode_activate(qdev, trans_hdr, wrappers, &user_len, resources); in encode_message()
820 free_dma_xfers(qdev, resources); in encode_message()
821 free_dbc_buf(qdev, resources); in encode_message()
854 u32 *msg_len, struct ioctl_resources *resources, struct qaic_user *usr) in decode_activate() argument
874 if (!resources->buf) in decode_activate()
892 resources->status = out_trans->status; in decode_activate()
893 resources->dbc_id = out_trans->dbc_id; in decode_activate()
894 save_dbc_buf(qdev, resources, usr); in decode_activate()
960 struct wire_msg *msg, struct ioctl_resources *resources, in decode_message() argument
993 ret = decode_activate(qdev, trans_hdr, user_msg, &msg_len, resources, usr); in decode_message()
1174 struct manage_msg *user_msg, struct ioctl_resources *resources, in qaic_manage_msg_xfer() argument
1197 ret = encode_message(qdev, user_msg, wrappers, resources, usr); in qaic_manage_msg_xfer()
1198 if (ret && resources->dma_chunk_id) in qaic_manage_msg_xfer()
1199 ret = abort_dma_cont(qdev, wrappers, resources->dma_chunk_id); in qaic_manage_msg_xfer()
1227 free_dma_xfers(qdev, resources); in qaic_manage_msg_xfer()
1243 struct ioctl_resources resources; in qaic_manage() local
1247 memset(&resources, 0, sizeof(struct ioctl_resources)); in qaic_manage()
1249 INIT_LIST_HEAD(&resources.dma_xfers); in qaic_manage()
1256 ret = qaic_manage_msg_xfer(qdev, usr, user_msg, &resources, &rsp); in qaic_manage()
1266 if (le32_to_cpu(dma_cont->dma_chunk_id) == resources.dma_chunk_id && in qaic_manage()
1267 le64_to_cpu(dma_cont->xferred_size) == resources.xferred_dma_size) { in qaic_manage()
1276 ret = decode_message(qdev, user_msg, rsp, &resources, usr); in qaic_manage()
1279 free_dbc_buf(qdev, &resources); in qaic_manage()