Searched refs:fnew (Results 1 – 6 of 6) sorted by relevance
/linux-6.1.9/net/sched/ |
D | cls_flow.c | 394 struct flow_filter *fold, *fnew; in flow_change() local 433 fnew = kzalloc(sizeof(*fnew), GFP_KERNEL); in flow_change() 434 if (!fnew) in flow_change() 437 err = tcf_em_tree_validate(tp, tb[TCA_FLOW_EMATCHES], &fnew->ematches); in flow_change() 441 err = tcf_exts_init(&fnew->exts, net, TCA_FLOW_ACT, TCA_FLOW_POLICE); in flow_change() 445 err = tcf_exts_validate(net, tp, tb, tca[TCA_RATE], &fnew->exts, flags, in flow_change() 457 fnew->tp = fold->tp; in flow_change() 458 fnew->handle = fold->handle; in flow_change() 459 fnew->nkeys = fold->nkeys; in flow_change() 460 fnew->keymask = fold->keymask; in flow_change() [all …]
|
D | cls_basic.c | 179 struct basic_filter *fnew; in basic_change() local 194 fnew = kzalloc(sizeof(*fnew), GFP_KERNEL); in basic_change() 195 if (!fnew) in basic_change() 198 err = tcf_exts_init(&fnew->exts, net, TCA_BASIC_ACT, TCA_BASIC_POLICE); in basic_change() 204 err = idr_alloc_u32(&head->handle_idr, fnew, &handle, in basic_change() 207 err = idr_alloc_u32(&head->handle_idr, fnew, &handle, in basic_change() 212 fnew->handle = handle; in basic_change() 213 fnew->pf = alloc_percpu(struct tc_basic_pcnt); in basic_change() 214 if (!fnew->pf) { in basic_change() 219 err = basic_set_parms(net, tp, fnew, base, tb, tca[TCA_RATE], flags, in basic_change() [all …]
|
D | cls_fw.c | 257 struct fw_filter *pfp, *fnew; in fw_change() local 263 fnew = kzalloc(sizeof(struct fw_filter), GFP_KERNEL); in fw_change() 264 if (!fnew) in fw_change() 267 fnew->id = f->id; in fw_change() 268 fnew->res = f->res; in fw_change() 269 fnew->ifindex = f->ifindex; in fw_change() 270 fnew->tp = f->tp; in fw_change() 272 err = tcf_exts_init(&fnew->exts, net, TCA_FW_ACT, in fw_change() 275 kfree(fnew); in fw_change() 279 err = fw_set_parms(net, tp, fnew, tb, tca, base, flags, extack); in fw_change() [all …]
|
D | cls_flower.c | 2033 struct cls_fl_filter *fnew, in fl_check_assign_mask() argument 2046 fnew->mask = rhashtable_lookup_get_insert_fast(&head->ht, in fl_check_assign_mask() 2049 if (!fnew->mask) { in fl_check_assign_mask() 2063 fnew->mask = newmask; in fl_check_assign_mask() 2065 } else if (IS_ERR(fnew->mask)) { in fl_check_assign_mask() 2066 ret = PTR_ERR(fnew->mask); in fl_check_assign_mask() 2067 } else if (fold && fold->mask != fnew->mask) { in fl_check_assign_mask() 2069 } else if (!refcount_inc_not_zero(&fnew->mask->refcnt)) { in fl_check_assign_mask() 2121 static int fl_ht_insert_unique(struct cls_fl_filter *fnew, in fl_ht_insert_unique() argument 2125 struct fl_flow_mask *mask = fnew->mask; in fl_ht_insert_unique() [all …]
|
/linux-6.1.9/drivers/hv/ |
D | channel_mgmt.c | 585 bool fnew = true; in vmbus_process_offer() local 619 fnew = false; in vmbus_process_offer() 637 if (fnew) { in vmbus_process_offer() 688 wq = fnew ? vmbus_connection.handle_primary_chan_wq : in vmbus_process_offer()
|
/linux-6.1.9/mm/ |
D | filemap.c | 803 struct folio *fnew = page_folio(new); in replace_page_cache_page() local 817 mem_cgroup_migrate(fold, fnew); in replace_page_cache_page()
|