Lines Matching refs:layr
16 #define container_obj(layr) ((struct cfserl *) layr) argument
29 static int cfserl_receive(struct cflayer *layr, struct cfpkt *pkt);
30 static int cfserl_transmit(struct cflayer *layr, struct cfpkt *pkt);
31 static void cfserl_ctrlcmd(struct cflayer *layr, enum caif_ctrlcmd ctrl,
55 struct cfserl *layr = container_obj(l); in cfserl_receive() local
66 spin_lock(&layr->sync); in cfserl_receive()
68 if (layr->incomplete_frm != NULL) { in cfserl_receive()
69 layr->incomplete_frm = in cfserl_receive()
70 cfpkt_append(layr->incomplete_frm, newpkt, expectlen); in cfserl_receive()
71 pkt = layr->incomplete_frm; in cfserl_receive()
73 spin_unlock(&layr->sync); in cfserl_receive()
79 layr->incomplete_frm = NULL; in cfserl_receive()
83 if (layr->usestx) { in cfserl_receive()
92 layr->incomplete_frm = NULL; in cfserl_receive()
93 spin_unlock(&layr->sync); in cfserl_receive()
108 if (layr->usestx) in cfserl_receive()
110 layr->incomplete_frm = pkt; in cfserl_receive()
111 spin_unlock(&layr->sync); in cfserl_receive()
126 if (!layr->usestx) { in cfserl_receive()
129 layr->incomplete_frm = NULL; in cfserl_receive()
131 spin_unlock(&layr->sync); in cfserl_receive()
139 if (layr->usestx) in cfserl_receive()
141 layr->incomplete_frm = pkt; in cfserl_receive()
142 spin_unlock(&layr->sync); in cfserl_receive()
156 spin_unlock(&layr->sync); in cfserl_receive()
157 ret = layr->layer.up->receive(layr->layer.up, pkt); in cfserl_receive()
158 spin_lock(&layr->sync); in cfserl_receive()
160 if (layr->usestx) { in cfserl_receive()
175 spin_unlock(&layr->sync); in cfserl_receive()
181 struct cfserl *layr = container_obj(layer); in cfserl_transmit() local
184 if (layr->usestx) in cfserl_transmit()
193 static void cfserl_ctrlcmd(struct cflayer *layr, enum caif_ctrlcmd ctrl, in cfserl_ctrlcmd() argument
196 layr->up->ctrlcmd(layr->up, ctrl, phyid); in cfserl_ctrlcmd()