Version:  2.0.40 2.2.26 2.4.37 2.6.39 3.0 3.1 3.2 3.3 3.4 3.5 3.6 3.7 3.8 3.9 3.10 3.11 3.12 3.13 3.14 3.15

Linux/drivers/usb/gadget/mv_u3d_core.c

  1 /*
  2  * Copyright (C) 2011 Marvell International Ltd. All rights reserved.
  3  *
  4  * This program is free software; you can redistribute it and/or modify it
  5  * under the terms and conditions of the GNU General Public License,
  6  * version 2, as published by the Free Software Foundation.
  7  */
  8 
  9 #include <linux/module.h>
 10 #include <linux/dma-mapping.h>
 11 #include <linux/dmapool.h>
 12 #include <linux/kernel.h>
 13 #include <linux/delay.h>
 14 #include <linux/ioport.h>
 15 #include <linux/sched.h>
 16 #include <linux/slab.h>
 17 #include <linux/errno.h>
 18 #include <linux/timer.h>
 19 #include <linux/list.h>
 20 #include <linux/notifier.h>
 21 #include <linux/interrupt.h>
 22 #include <linux/moduleparam.h>
 23 #include <linux/device.h>
 24 #include <linux/usb/ch9.h>
 25 #include <linux/usb/gadget.h>
 26 #include <linux/pm.h>
 27 #include <linux/io.h>
 28 #include <linux/irq.h>
 29 #include <linux/platform_device.h>
 30 #include <linux/platform_data/mv_usb.h>
 31 #include <linux/clk.h>
 32 
 33 #include "mv_u3d.h"
 34 
 35 #define DRIVER_DESC             "Marvell PXA USB3.0 Device Controller driver"
 36 
 37 static const char driver_name[] = "mv_u3d";
 38 static const char driver_desc[] = DRIVER_DESC;
 39 
 40 static void mv_u3d_nuke(struct mv_u3d_ep *ep, int status);
 41 static void mv_u3d_stop_activity(struct mv_u3d *u3d,
 42                         struct usb_gadget_driver *driver);
 43 
 44 /* for endpoint 0 operations */
 45 static const struct usb_endpoint_descriptor mv_u3d_ep0_desc = {
 46         .bLength =              USB_DT_ENDPOINT_SIZE,
 47         .bDescriptorType =      USB_DT_ENDPOINT,
 48         .bEndpointAddress =     0,
 49         .bmAttributes =         USB_ENDPOINT_XFER_CONTROL,
 50         .wMaxPacketSize =       MV_U3D_EP0_MAX_PKT_SIZE,
 51 };
 52 
 53 static void mv_u3d_ep0_reset(struct mv_u3d *u3d)
 54 {
 55         struct mv_u3d_ep *ep;
 56         u32 epxcr;
 57         int i;
 58 
 59         for (i = 0; i < 2; i++) {
 60                 ep = &u3d->eps[i];
 61                 ep->u3d = u3d;
 62 
 63                 /* ep0 ep context, ep0 in and out share the same ep context */
 64                 ep->ep_context = &u3d->ep_context[1];
 65         }
 66 
 67         /* reset ep state machine */
 68         /* reset ep0 out */
 69         epxcr = ioread32(&u3d->vuc_regs->epcr[0].epxoutcr0);
 70         epxcr |= MV_U3D_EPXCR_EP_INIT;
 71         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxoutcr0);
 72         udelay(5);
 73         epxcr &= ~MV_U3D_EPXCR_EP_INIT;
 74         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxoutcr0);
 75 
 76         epxcr = ((MV_U3D_EP0_MAX_PKT_SIZE
 77                 << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
 78                 | (1 << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
 79                 | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
 80                 | MV_U3D_EPXCR_EP_TYPE_CONTROL);
 81         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxoutcr1);
 82 
 83         /* reset ep0 in */
 84         epxcr = ioread32(&u3d->vuc_regs->epcr[0].epxincr0);
 85         epxcr |= MV_U3D_EPXCR_EP_INIT;
 86         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxincr0);
 87         udelay(5);
 88         epxcr &= ~MV_U3D_EPXCR_EP_INIT;
 89         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxincr0);
 90 
 91         epxcr = ((MV_U3D_EP0_MAX_PKT_SIZE
 92                 << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
 93                 | (1 << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
 94                 | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
 95                 | MV_U3D_EPXCR_EP_TYPE_CONTROL);
 96         iowrite32(epxcr, &u3d->vuc_regs->epcr[0].epxincr1);
 97 }
 98 
 99 static void mv_u3d_ep0_stall(struct mv_u3d *u3d)
100 {
101         u32 tmp;
102         dev_dbg(u3d->dev, "%s\n", __func__);
103 
104         /* set TX and RX to stall */
105         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxoutcr0);
106         tmp |= MV_U3D_EPXCR_EP_HALT;
107         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxoutcr0);
108 
109         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxincr0);
110         tmp |= MV_U3D_EPXCR_EP_HALT;
111         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxincr0);
112 
113         /* update ep0 state */
114         u3d->ep0_state = MV_U3D_WAIT_FOR_SETUP;
115         u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
116 }
117 
118 static int mv_u3d_process_ep_req(struct mv_u3d *u3d, int index,
119         struct mv_u3d_req *curr_req)
120 {
121         struct mv_u3d_trb       *curr_trb;
122         dma_addr_t cur_deq_lo;
123         struct mv_u3d_ep_context        *curr_ep_context;
124         int trb_complete, actual, remaining_length = 0;
125         int direction, ep_num;
126         int retval = 0;
127         u32 tmp, status, length;
128 
129         curr_ep_context = &u3d->ep_context[index];
130         direction = index % 2;
131         ep_num = index / 2;
132 
133         trb_complete = 0;
134         actual = curr_req->req.length;
135 
136         while (!list_empty(&curr_req->trb_list)) {
137                 curr_trb = list_entry(curr_req->trb_list.next,
138                                         struct mv_u3d_trb, trb_list);
139                 if (!curr_trb->trb_hw->ctrl.own) {
140                         dev_err(u3d->dev, "%s, TRB own error!\n",
141                                 u3d->eps[index].name);
142                         return 1;
143                 }
144 
145                 curr_trb->trb_hw->ctrl.own = 0;
146                 if (direction == MV_U3D_EP_DIR_OUT) {
147                         tmp = ioread32(&u3d->vuc_regs->rxst[ep_num].statuslo);
148                         cur_deq_lo =
149                                 ioread32(&u3d->vuc_regs->rxst[ep_num].curdeqlo);
150                 } else {
151                         tmp = ioread32(&u3d->vuc_regs->txst[ep_num].statuslo);
152                         cur_deq_lo =
153                                 ioread32(&u3d->vuc_regs->txst[ep_num].curdeqlo);
154                 }
155 
156                 status = tmp >> MV_U3D_XFERSTATUS_COMPLETE_SHIFT;
157                 length = tmp & MV_U3D_XFERSTATUS_TRB_LENGTH_MASK;
158 
159                 if (status == MV_U3D_COMPLETE_SUCCESS ||
160                         (status == MV_U3D_COMPLETE_SHORT_PACKET &&
161                         direction == MV_U3D_EP_DIR_OUT)) {
162                         remaining_length += length;
163                         actual -= remaining_length;
164                 } else {
165                         dev_err(u3d->dev,
166                                 "complete_tr error: ep=%d %s: error = 0x%x\n",
167                                 index >> 1, direction ? "SEND" : "RECV",
168                                 status);
169                         retval = -EPROTO;
170                 }
171 
172                 list_del_init(&curr_trb->trb_list);
173         }
174         if (retval)
175                 return retval;
176 
177         curr_req->req.actual = actual;
178         return 0;
179 }
180 
181 /*
182  * mv_u3d_done() - retire a request; caller blocked irqs
183  * @status : request status to be set, only works when
184  * request is still in progress.
185  */
186 static
187 void mv_u3d_done(struct mv_u3d_ep *ep, struct mv_u3d_req *req, int status)
188         __releases(&ep->udc->lock)
189         __acquires(&ep->udc->lock)
190 {
191         struct mv_u3d *u3d = (struct mv_u3d *)ep->u3d;
192 
193         dev_dbg(u3d->dev, "mv_u3d_done: remove req->queue\n");
194         /* Removed the req from ep queue */
195         list_del_init(&req->queue);
196 
197         /* req.status should be set as -EINPROGRESS in ep_queue() */
198         if (req->req.status == -EINPROGRESS)
199                 req->req.status = status;
200         else
201                 status = req->req.status;
202 
203         /* Free trb for the request */
204         if (!req->chain)
205                 dma_pool_free(u3d->trb_pool,
206                         req->trb_head->trb_hw, req->trb_head->trb_dma);
207         else {
208                 dma_unmap_single(ep->u3d->gadget.dev.parent,
209                         (dma_addr_t)req->trb_head->trb_dma,
210                         req->trb_count * sizeof(struct mv_u3d_trb_hw),
211                         DMA_BIDIRECTIONAL);
212                 kfree(req->trb_head->trb_hw);
213         }
214         kfree(req->trb_head);
215 
216         usb_gadget_unmap_request(&u3d->gadget, &req->req, mv_u3d_ep_dir(ep));
217 
218         if (status && (status != -ESHUTDOWN)) {
219                 dev_dbg(u3d->dev, "complete %s req %p stat %d len %u/%u",
220                         ep->ep.name, &req->req, status,
221                         req->req.actual, req->req.length);
222         }
223 
224         spin_unlock(&ep->u3d->lock);
225         /*
226          * complete() is from gadget layer,
227          * eg fsg->bulk_in_complete()
228          */
229         if (req->req.complete)
230                 req->req.complete(&ep->ep, &req->req);
231 
232         spin_lock(&ep->u3d->lock);
233 }
234 
235 static int mv_u3d_queue_trb(struct mv_u3d_ep *ep, struct mv_u3d_req *req)
236 {
237         u32 tmp, direction;
238         struct mv_u3d *u3d;
239         struct mv_u3d_ep_context *ep_context;
240         int retval = 0;
241 
242         u3d = ep->u3d;
243         direction = mv_u3d_ep_dir(ep);
244 
245         /* ep0 in and out share the same ep context slot 1*/
246         if (ep->ep_num == 0)
247                 ep_context = &(u3d->ep_context[1]);
248         else
249                 ep_context = &(u3d->ep_context[ep->ep_num * 2 + direction]);
250 
251         /* check if the pipe is empty or not */
252         if (!list_empty(&ep->queue)) {
253                 dev_err(u3d->dev, "add trb to non-empty queue!\n");
254                 retval = -ENOMEM;
255                 WARN_ON(1);
256         } else {
257                 ep_context->rsvd0 = cpu_to_le32(1);
258                 ep_context->rsvd1 = 0;
259 
260                 /* Configure the trb address and set the DCS bit.
261                  * Both DCS bit and own bit in trb should be set.
262                  */
263                 ep_context->trb_addr_lo =
264                         cpu_to_le32(req->trb_head->trb_dma | DCS_ENABLE);
265                 ep_context->trb_addr_hi = 0;
266 
267                 /* Ensure that updates to the EP Context will
268                  * occure before Ring Bell.
269                  */
270                 wmb();
271 
272                 /* ring bell the ep */
273                 if (ep->ep_num == 0)
274                         tmp = 0x1;
275                 else
276                         tmp = ep->ep_num * 2
277                                 + ((direction == MV_U3D_EP_DIR_OUT) ? 0 : 1);
278 
279                 iowrite32(tmp, &u3d->op_regs->doorbell);
280         }
281         return retval;
282 }
283 
284 static struct mv_u3d_trb *mv_u3d_build_trb_one(struct mv_u3d_req *req,
285                                 unsigned *length, dma_addr_t *dma)
286 {
287         u32 temp;
288         unsigned int direction;
289         struct mv_u3d_trb *trb;
290         struct mv_u3d_trb_hw *trb_hw;
291         struct mv_u3d *u3d;
292 
293         /* how big will this transfer be? */
294         *length = req->req.length - req->req.actual;
295         BUG_ON(*length > (unsigned)MV_U3D_EP_MAX_LENGTH_TRANSFER);
296 
297         u3d = req->ep->u3d;
298 
299         trb = kzalloc(sizeof(*trb), GFP_ATOMIC);
300         if (!trb) {
301                 dev_err(u3d->dev, "%s, trb alloc fail\n", __func__);
302                 return NULL;
303         }
304 
305         /*
306          * Be careful that no _GFP_HIGHMEM is set,
307          * or we can not use dma_to_virt
308          * cannot use GFP_KERNEL in spin lock
309          */
310         trb_hw = dma_pool_alloc(u3d->trb_pool, GFP_ATOMIC, dma);
311         if (!trb_hw) {
312                 kfree(trb);
313                 dev_err(u3d->dev,
314                         "%s, dma_pool_alloc fail\n", __func__);
315                 return NULL;
316         }
317         trb->trb_dma = *dma;
318         trb->trb_hw = trb_hw;
319 
320         /* initialize buffer page pointers */
321         temp = (u32)(req->req.dma + req->req.actual);
322 
323         trb_hw->buf_addr_lo = cpu_to_le32(temp);
324         trb_hw->buf_addr_hi = 0;
325         trb_hw->trb_len = cpu_to_le32(*length);
326         trb_hw->ctrl.own = 1;
327 
328         if (req->ep->ep_num == 0)
329                 trb_hw->ctrl.type = TYPE_DATA;
330         else
331                 trb_hw->ctrl.type = TYPE_NORMAL;
332 
333         req->req.actual += *length;
334 
335         direction = mv_u3d_ep_dir(req->ep);
336         if (direction == MV_U3D_EP_DIR_IN)
337                 trb_hw->ctrl.dir = 1;
338         else
339                 trb_hw->ctrl.dir = 0;
340 
341         /* Enable interrupt for the last trb of a request */
342         if (!req->req.no_interrupt)
343                 trb_hw->ctrl.ioc = 1;
344 
345         trb_hw->ctrl.chain = 0;
346 
347         wmb();
348         return trb;
349 }
350 
351 static int mv_u3d_build_trb_chain(struct mv_u3d_req *req, unsigned *length,
352                 struct mv_u3d_trb *trb, int *is_last)
353 {
354         u32 temp;
355         unsigned int direction;
356         struct mv_u3d *u3d;
357 
358         /* how big will this transfer be? */
359         *length = min(req->req.length - req->req.actual,
360                         (unsigned)MV_U3D_EP_MAX_LENGTH_TRANSFER);
361 
362         u3d = req->ep->u3d;
363 
364         trb->trb_dma = 0;
365 
366         /* initialize buffer page pointers */
367         temp = (u32)(req->req.dma + req->req.actual);
368 
369         trb->trb_hw->buf_addr_lo = cpu_to_le32(temp);
370         trb->trb_hw->buf_addr_hi = 0;
371         trb->trb_hw->trb_len = cpu_to_le32(*length);
372         trb->trb_hw->ctrl.own = 1;
373 
374         if (req->ep->ep_num == 0)
375                 trb->trb_hw->ctrl.type = TYPE_DATA;
376         else
377                 trb->trb_hw->ctrl.type = TYPE_NORMAL;
378 
379         req->req.actual += *length;
380 
381         direction = mv_u3d_ep_dir(req->ep);
382         if (direction == MV_U3D_EP_DIR_IN)
383                 trb->trb_hw->ctrl.dir = 1;
384         else
385                 trb->trb_hw->ctrl.dir = 0;
386 
387         /* zlp is needed if req->req.zero is set */
388         if (req->req.zero) {
389                 if (*length == 0 || (*length % req->ep->ep.maxpacket) != 0)
390                         *is_last = 1;
391                 else
392                         *is_last = 0;
393         } else if (req->req.length == req->req.actual)
394                 *is_last = 1;
395         else
396                 *is_last = 0;
397 
398         /* Enable interrupt for the last trb of a request */
399         if (*is_last && !req->req.no_interrupt)
400                 trb->trb_hw->ctrl.ioc = 1;
401 
402         if (*is_last)
403                 trb->trb_hw->ctrl.chain = 0;
404         else {
405                 trb->trb_hw->ctrl.chain = 1;
406                 dev_dbg(u3d->dev, "chain trb\n");
407         }
408 
409         wmb();
410 
411         return 0;
412 }
413 
414 /* generate TRB linked list for a request
415  * usb controller only supports continous trb chain,
416  * that trb structure physical address should be continous.
417  */
418 static int mv_u3d_req_to_trb(struct mv_u3d_req *req)
419 {
420         unsigned count;
421         int is_last;
422         struct mv_u3d_trb *trb;
423         struct mv_u3d_trb_hw *trb_hw;
424         struct mv_u3d *u3d;
425         dma_addr_t dma;
426         unsigned length;
427         unsigned trb_num;
428 
429         u3d = req->ep->u3d;
430 
431         INIT_LIST_HEAD(&req->trb_list);
432 
433         length = req->req.length - req->req.actual;
434         /* normally the request transfer length is less than 16KB.
435          * we use buil_trb_one() to optimize it.
436          */
437         if (length <= (unsigned)MV_U3D_EP_MAX_LENGTH_TRANSFER) {
438                 trb = mv_u3d_build_trb_one(req, &count, &dma);
439                 list_add_tail(&trb->trb_list, &req->trb_list);
440                 req->trb_head = trb;
441                 req->trb_count = 1;
442                 req->chain = 0;
443         } else {
444                 trb_num = length / MV_U3D_EP_MAX_LENGTH_TRANSFER;
445                 if (length % MV_U3D_EP_MAX_LENGTH_TRANSFER)
446                         trb_num++;
447 
448                 trb = kcalloc(trb_num, sizeof(*trb), GFP_ATOMIC);
449                 if (!trb) {
450                         dev_err(u3d->dev,
451                                         "%s, trb alloc fail\n", __func__);
452                         return -ENOMEM;
453                 }
454 
455                 trb_hw = kcalloc(trb_num, sizeof(*trb_hw), GFP_ATOMIC);
456                 if (!trb_hw) {
457                         kfree(trb);
458                         dev_err(u3d->dev,
459                                         "%s, trb_hw alloc fail\n", __func__);
460                         return -ENOMEM;
461                 }
462 
463                 do {
464                         trb->trb_hw = trb_hw;
465                         if (mv_u3d_build_trb_chain(req, &count,
466                                                 trb, &is_last)) {
467                                 dev_err(u3d->dev,
468                                         "%s, mv_u3d_build_trb_chain fail\n",
469                                         __func__);
470                                 return -EIO;
471                         }
472 
473                         list_add_tail(&trb->trb_list, &req->trb_list);
474                         req->trb_count++;
475                         trb++;
476                         trb_hw++;
477                 } while (!is_last);
478 
479                 req->trb_head = list_entry(req->trb_list.next,
480                                         struct mv_u3d_trb, trb_list);
481                 req->trb_head->trb_dma = dma_map_single(u3d->gadget.dev.parent,
482                                         req->trb_head->trb_hw,
483                                         trb_num * sizeof(*trb_hw),
484                                         DMA_BIDIRECTIONAL);
485 
486                 req->chain = 1;
487         }
488 
489         return 0;
490 }
491 
492 static int
493 mv_u3d_start_queue(struct mv_u3d_ep *ep)
494 {
495         struct mv_u3d *u3d = ep->u3d;
496         struct mv_u3d_req *req;
497         int ret;
498 
499         if (!list_empty(&ep->req_list) && !ep->processing)
500                 req = list_entry(ep->req_list.next, struct mv_u3d_req, list);
501         else
502                 return 0;
503 
504         ep->processing = 1;
505 
506         /* set up dma mapping */
507         ret = usb_gadget_map_request(&u3d->gadget, &req->req,
508                                         mv_u3d_ep_dir(ep));
509         if (ret)
510                 return ret;
511 
512         req->req.status = -EINPROGRESS;
513         req->req.actual = 0;
514         req->trb_count = 0;
515 
516         /* build trbs and push them to device queue */
517         if (!mv_u3d_req_to_trb(req)) {
518                 ret = mv_u3d_queue_trb(ep, req);
519                 if (ret) {
520                         ep->processing = 0;
521                         return ret;
522                 }
523         } else {
524                 ep->processing = 0;
525                 dev_err(u3d->dev, "%s, mv_u3d_req_to_trb fail\n", __func__);
526                 return -ENOMEM;
527         }
528 
529         /* irq handler advances the queue */
530         if (req)
531                 list_add_tail(&req->queue, &ep->queue);
532 
533         return 0;
534 }
535 
536 static int mv_u3d_ep_enable(struct usb_ep *_ep,
537                 const struct usb_endpoint_descriptor *desc)
538 {
539         struct mv_u3d *u3d;
540         struct mv_u3d_ep *ep;
541         struct mv_u3d_ep_context *ep_context;
542         u16 max = 0;
543         unsigned maxburst = 0;
544         u32 epxcr, direction;
545 
546         if (!_ep || !desc || desc->bDescriptorType != USB_DT_ENDPOINT)
547                 return -EINVAL;
548 
549         ep = container_of(_ep, struct mv_u3d_ep, ep);
550         u3d = ep->u3d;
551 
552         if (!u3d->driver || u3d->gadget.speed == USB_SPEED_UNKNOWN)
553                 return -ESHUTDOWN;
554 
555         direction = mv_u3d_ep_dir(ep);
556         max = le16_to_cpu(desc->wMaxPacketSize);
557 
558         if (!_ep->maxburst)
559                 _ep->maxburst = 1;
560         maxburst = _ep->maxburst;
561 
562         /* Get the endpoint context address */
563         ep_context = (struct mv_u3d_ep_context *)ep->ep_context;
564 
565         /* Set the max burst size */
566         switch (desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK) {
567         case USB_ENDPOINT_XFER_BULK:
568                 if (maxburst > 16) {
569                         dev_dbg(u3d->dev,
570                                 "max burst should not be greater "
571                                 "than 16 on bulk ep\n");
572                         maxburst = 1;
573                         _ep->maxburst = maxburst;
574                 }
575                 dev_dbg(u3d->dev,
576                         "maxburst: %d on bulk %s\n", maxburst, ep->name);
577                 break;
578         case USB_ENDPOINT_XFER_CONTROL:
579                 /* control transfer only supports maxburst as one */
580                 maxburst = 1;
581                 _ep->maxburst = maxburst;
582                 break;
583         case USB_ENDPOINT_XFER_INT:
584                 if (maxburst != 1) {
585                         dev_dbg(u3d->dev,
586                                 "max burst should be 1 on int ep "
587                                 "if transfer size is not 1024\n");
588                         maxburst = 1;
589                         _ep->maxburst = maxburst;
590                 }
591                 break;
592         case USB_ENDPOINT_XFER_ISOC:
593                 if (maxburst != 1) {
594                         dev_dbg(u3d->dev,
595                                 "max burst should be 1 on isoc ep "
596                                 "if transfer size is not 1024\n");
597                         maxburst = 1;
598                         _ep->maxburst = maxburst;
599                 }
600                 break;
601         default:
602                 goto en_done;
603         }
604 
605         ep->ep.maxpacket = max;
606         ep->ep.desc = desc;
607         ep->enabled = 1;
608 
609         /* Enable the endpoint for Rx or Tx and set the endpoint type */
610         if (direction == MV_U3D_EP_DIR_OUT) {
611                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
612                 epxcr |= MV_U3D_EPXCR_EP_INIT;
613                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
614                 udelay(5);
615                 epxcr &= ~MV_U3D_EPXCR_EP_INIT;
616                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
617 
618                 epxcr = ((max << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
619                       | ((maxburst - 1) << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
620                       | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
621                       | (desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK));
622                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr1);
623         } else {
624                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
625                 epxcr |= MV_U3D_EPXCR_EP_INIT;
626                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
627                 udelay(5);
628                 epxcr &= ~MV_U3D_EPXCR_EP_INIT;
629                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
630 
631                 epxcr = ((max << MV_U3D_EPXCR_MAX_PACKET_SIZE_SHIFT)
632                       | ((maxburst - 1) << MV_U3D_EPXCR_MAX_BURST_SIZE_SHIFT)
633                       | (1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
634                       | (desc->bmAttributes & USB_ENDPOINT_XFERTYPE_MASK));
635                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr1);
636         }
637 
638         return 0;
639 en_done:
640         return -EINVAL;
641 }
642 
643 static int  mv_u3d_ep_disable(struct usb_ep *_ep)
644 {
645         struct mv_u3d *u3d;
646         struct mv_u3d_ep *ep;
647         struct mv_u3d_ep_context *ep_context;
648         u32 epxcr, direction;
649         unsigned long flags;
650 
651         if (!_ep)
652                 return -EINVAL;
653 
654         ep = container_of(_ep, struct mv_u3d_ep, ep);
655         if (!ep->ep.desc)
656                 return -EINVAL;
657 
658         u3d = ep->u3d;
659 
660         /* Get the endpoint context address */
661         ep_context = ep->ep_context;
662 
663         direction = mv_u3d_ep_dir(ep);
664 
665         /* nuke all pending requests (does flush) */
666         spin_lock_irqsave(&u3d->lock, flags);
667         mv_u3d_nuke(ep, -ESHUTDOWN);
668         spin_unlock_irqrestore(&u3d->lock, flags);
669 
670         /* Disable the endpoint for Rx or Tx and reset the endpoint type */
671         if (direction == MV_U3D_EP_DIR_OUT) {
672                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr1);
673                 epxcr &= ~((1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
674                       | USB_ENDPOINT_XFERTYPE_MASK);
675                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr1);
676         } else {
677                 epxcr = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr1);
678                 epxcr &= ~((1 << MV_U3D_EPXCR_EP_ENABLE_SHIFT)
679                       | USB_ENDPOINT_XFERTYPE_MASK);
680                 iowrite32(epxcr, &u3d->vuc_regs->epcr[ep->ep_num].epxincr1);
681         }
682 
683         ep->enabled = 0;
684 
685         ep->ep.desc = NULL;
686         return 0;
687 }
688 
689 static struct usb_request *
690 mv_u3d_alloc_request(struct usb_ep *_ep, gfp_t gfp_flags)
691 {
692         struct mv_u3d_req *req = NULL;
693 
694         req = kzalloc(sizeof *req, gfp_flags);
695         if (!req)
696                 return NULL;
697 
698         INIT_LIST_HEAD(&req->queue);
699 
700         return &req->req;
701 }
702 
703 static void mv_u3d_free_request(struct usb_ep *_ep, struct usb_request *_req)
704 {
705         struct mv_u3d_req *req = container_of(_req, struct mv_u3d_req, req);
706 
707         kfree(req);
708 }
709 
710 static void mv_u3d_ep_fifo_flush(struct usb_ep *_ep)
711 {
712         struct mv_u3d *u3d;
713         u32 direction;
714         struct mv_u3d_ep *ep = container_of(_ep, struct mv_u3d_ep, ep);
715         unsigned int loops;
716         u32 tmp;
717 
718         /* if endpoint is not enabled, cannot flush endpoint */
719         if (!ep->enabled)
720                 return;
721 
722         u3d = ep->u3d;
723         direction = mv_u3d_ep_dir(ep);
724 
725         /* ep0 need clear bit after flushing fifo. */
726         if (!ep->ep_num) {
727                 if (direction == MV_U3D_EP_DIR_OUT) {
728                         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxoutcr0);
729                         tmp |= MV_U3D_EPXCR_EP_FLUSH;
730                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxoutcr0);
731                         udelay(10);
732                         tmp &= ~MV_U3D_EPXCR_EP_FLUSH;
733                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxoutcr0);
734                 } else {
735                         tmp = ioread32(&u3d->vuc_regs->epcr[0].epxincr0);
736                         tmp |= MV_U3D_EPXCR_EP_FLUSH;
737                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxincr0);
738                         udelay(10);
739                         tmp &= ~MV_U3D_EPXCR_EP_FLUSH;
740                         iowrite32(tmp, &u3d->vuc_regs->epcr[0].epxincr0);
741                 }
742                 return;
743         }
744 
745         if (direction == MV_U3D_EP_DIR_OUT) {
746                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
747                 tmp |= MV_U3D_EPXCR_EP_FLUSH;
748                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
749 
750                 /* Wait until flushing completed */
751                 loops = LOOPS(MV_U3D_FLUSH_TIMEOUT);
752                 while (ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0) &
753                         MV_U3D_EPXCR_EP_FLUSH) {
754                         /*
755                          * EP_FLUSH bit should be cleared to indicate this
756                          * operation is complete
757                          */
758                         if (loops == 0) {
759                                 dev_dbg(u3d->dev,
760                                     "EP FLUSH TIMEOUT for ep%d%s\n", ep->ep_num,
761                                     direction ? "in" : "out");
762                                 return;
763                         }
764                         loops--;
765                         udelay(LOOPS_USEC);
766                 }
767         } else {        /* EP_DIR_IN */
768                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
769                 tmp |= MV_U3D_EPXCR_EP_FLUSH;
770                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
771 
772                 /* Wait until flushing completed */
773                 loops = LOOPS(MV_U3D_FLUSH_TIMEOUT);
774                 while (ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0) &
775                         MV_U3D_EPXCR_EP_FLUSH) {
776                         /*
777                         * EP_FLUSH bit should be cleared to indicate this
778                         * operation is complete
779                         */
780                         if (loops == 0) {
781                                 dev_dbg(u3d->dev,
782                                     "EP FLUSH TIMEOUT for ep%d%s\n", ep->ep_num,
783                                     direction ? "in" : "out");
784                                 return;
785                         }
786                         loops--;
787                         udelay(LOOPS_USEC);
788                 }
789         }
790 }
791 
792 /* queues (submits) an I/O request to an endpoint */
793 static int
794 mv_u3d_ep_queue(struct usb_ep *_ep, struct usb_request *_req, gfp_t gfp_flags)
795 {
796         struct mv_u3d_ep *ep;
797         struct mv_u3d_req *req;
798         struct mv_u3d *u3d;
799         unsigned long flags;
800         int is_first_req = 0;
801 
802         if (unlikely(!_ep || !_req))
803                 return -EINVAL;
804 
805         ep = container_of(_ep, struct mv_u3d_ep, ep);
806         u3d = ep->u3d;
807 
808         req = container_of(_req, struct mv_u3d_req, req);
809 
810         if (!ep->ep_num
811                 && u3d->ep0_state == MV_U3D_STATUS_STAGE
812                 && !_req->length) {
813                 dev_dbg(u3d->dev, "ep0 status stage\n");
814                 u3d->ep0_state = MV_U3D_WAIT_FOR_SETUP;
815                 return 0;
816         }
817 
818         dev_dbg(u3d->dev, "%s: %s, req: 0x%p\n",
819                         __func__, _ep->name, req);
820 
821         /* catch various bogus parameters */
822         if (!req->req.complete || !req->req.buf
823                         || !list_empty(&req->queue)) {
824                 dev_err(u3d->dev,
825                         "%s, bad params, _req: 0x%p,"
826                         "req->req.complete: 0x%p, req->req.buf: 0x%p,"
827                         "list_empty: 0x%x\n",
828                         __func__, _req,
829                         req->req.complete, req->req.buf,
830                         list_empty(&req->queue));
831                 return -EINVAL;
832         }
833         if (unlikely(!ep->ep.desc)) {
834                 dev_err(u3d->dev, "%s, bad ep\n", __func__);
835                 return -EINVAL;
836         }
837         if (ep->ep.desc->bmAttributes == USB_ENDPOINT_XFER_ISOC) {
838                 if (req->req.length > ep->ep.maxpacket)
839                         return -EMSGSIZE;
840         }
841 
842         if (!u3d->driver || u3d->gadget.speed == USB_SPEED_UNKNOWN) {
843                 dev_err(u3d->dev,
844                         "bad params of driver/speed\n");
845                 return -ESHUTDOWN;
846         }
847 
848         req->ep = ep;
849 
850         /* Software list handles usb request. */
851         spin_lock_irqsave(&ep->req_lock, flags);
852         is_first_req = list_empty(&ep->req_list);
853         list_add_tail(&req->list, &ep->req_list);
854         spin_unlock_irqrestore(&ep->req_lock, flags);
855         if (!is_first_req) {
856                 dev_dbg(u3d->dev, "list is not empty\n");
857                 return 0;
858         }
859 
860         dev_dbg(u3d->dev, "call mv_u3d_start_queue from usb_ep_queue\n");
861         spin_lock_irqsave(&u3d->lock, flags);
862         mv_u3d_start_queue(ep);
863         spin_unlock_irqrestore(&u3d->lock, flags);
864         return 0;
865 }
866 
867 /* dequeues (cancels, unlinks) an I/O request from an endpoint */
868 static int mv_u3d_ep_dequeue(struct usb_ep *_ep, struct usb_request *_req)
869 {
870         struct mv_u3d_ep *ep;
871         struct mv_u3d_req *req;
872         struct mv_u3d *u3d;
873         struct mv_u3d_ep_context *ep_context;
874         struct mv_u3d_req *next_req;
875 
876         unsigned long flags;
877         int ret = 0;
878 
879         if (!_ep || !_req)
880                 return -EINVAL;
881 
882         ep = container_of(_ep, struct mv_u3d_ep, ep);
883         u3d = ep->u3d;
884 
885         spin_lock_irqsave(&ep->u3d->lock, flags);
886 
887         /* make sure it's actually queued on this endpoint */
888         list_for_each_entry(req, &ep->queue, queue) {
889                 if (&req->req == _req)
890                         break;
891         }
892         if (&req->req != _req) {
893                 ret = -EINVAL;
894                 goto out;
895         }
896 
897         /* The request is in progress, or completed but not dequeued */
898         if (ep->queue.next == &req->queue) {
899                 _req->status = -ECONNRESET;
900                 mv_u3d_ep_fifo_flush(_ep);
901 
902                 /* The request isn't the last request in this ep queue */
903                 if (req->queue.next != &ep->queue) {
904                         dev_dbg(u3d->dev,
905                                 "it is the last request in this ep queue\n");
906                         ep_context = ep->ep_context;
907                         next_req = list_entry(req->queue.next,
908                                         struct mv_u3d_req, queue);
909 
910                         /* Point first TRB of next request to the EP context. */
911                         iowrite32((unsigned long) next_req->trb_head,
912                                         &ep_context->trb_addr_lo);
913                 } else {
914                         struct mv_u3d_ep_context *ep_context;
915                         ep_context = ep->ep_context;
916                         ep_context->trb_addr_lo = 0;
917                         ep_context->trb_addr_hi = 0;
918                 }
919 
920         } else
921                 WARN_ON(1);
922 
923         mv_u3d_done(ep, req, -ECONNRESET);
924 
925         /* remove the req from the ep req list */
926         if (!list_empty(&ep->req_list)) {
927                 struct mv_u3d_req *curr_req;
928                 curr_req = list_entry(ep->req_list.next,
929                                         struct mv_u3d_req, list);
930                 if (curr_req == req) {
931                         list_del_init(&req->list);
932                         ep->processing = 0;
933                 }
934         }
935 
936 out:
937         spin_unlock_irqrestore(&ep->u3d->lock, flags);
938         return ret;
939 }
940 
941 static void
942 mv_u3d_ep_set_stall(struct mv_u3d *u3d, u8 ep_num, u8 direction, int stall)
943 {
944         u32 tmp;
945         struct mv_u3d_ep *ep = u3d->eps;
946 
947         dev_dbg(u3d->dev, "%s\n", __func__);
948         if (direction == MV_U3D_EP_DIR_OUT) {
949                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
950                 if (stall)
951                         tmp |= MV_U3D_EPXCR_EP_HALT;
952                 else
953                         tmp &= ~MV_U3D_EPXCR_EP_HALT;
954                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxoutcr0);
955         } else {
956                 tmp = ioread32(&u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
957                 if (stall)
958                         tmp |= MV_U3D_EPXCR_EP_HALT;
959                 else
960                         tmp &= ~MV_U3D_EPXCR_EP_HALT;
961                 iowrite32(tmp, &u3d->vuc_regs->epcr[ep->ep_num].epxincr0);
962         }
963 }
964 
965 static int mv_u3d_ep_set_halt_wedge(struct usb_ep *_ep, int halt, int wedge)
966 {
967         struct mv_u3d_ep *ep;
968         unsigned long flags = 0;
969         int status = 0;
970         struct mv_u3d *u3d;
971 
972         ep = container_of(_ep, struct mv_u3d_ep, ep);
973         u3d = ep->u3d;
974         if (!ep->ep.desc) {
975                 status = -EINVAL;
976                 goto out;
977         }
978 
979         if (ep->ep.desc->bmAttributes == USB_ENDPOINT_XFER_ISOC) {
980                 status = -EOPNOTSUPP;
981                 goto out;
982         }
983 
984         /*
985          * Attempt to halt IN ep will fail if any transfer requests
986          * are still queue
987          */
988         if (halt && (mv_u3d_ep_dir(ep) == MV_U3D_EP_DIR_IN)
989                         && !list_empty(&ep->queue)) {
990                 status = -EAGAIN;
991                 goto out;
992         }
993 
994         spin_lock_irqsave(&ep->u3d->lock, flags);
995         mv_u3d_ep_set_stall(u3d, ep->ep_num, mv_u3d_ep_dir(ep), halt);
996         if (halt && wedge)
997                 ep->wedge = 1;
998         else if (!halt)
999                 ep->wedge = 0;
1000         spin_unlock_irqrestore(&ep->u3d->lock, flags);
1001 
1002         if (ep->ep_num == 0)
1003                 u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
1004 out:
1005         return status;
1006 }
1007 
1008 static int mv_u3d_ep_set_halt(struct usb_ep *_ep, int halt)
1009 {
1010         return mv_u3d_ep_set_halt_wedge(_ep, halt, 0);
1011 }
1012 
1013 static int mv_u3d_ep_set_wedge(struct usb_ep *_ep)
1014 {
1015         return mv_u3d_ep_set_halt_wedge(_ep, 1, 1);
1016 }
1017 
1018 static struct usb_ep_ops mv_u3d_ep_ops = {
1019         .enable         = mv_u3d_ep_enable,
1020         .disable        = mv_u3d_ep_disable,
1021 
1022         .alloc_request  = mv_u3d_alloc_request,
1023         .free_request   = mv_u3d_free_request,
1024 
1025         .queue          = mv_u3d_ep_queue,
1026         .dequeue        = mv_u3d_ep_dequeue,
1027 
1028         .set_wedge      = mv_u3d_ep_set_wedge,
1029         .set_halt       = mv_u3d_ep_set_halt,
1030         .fifo_flush     = mv_u3d_ep_fifo_flush,
1031 };
1032 
1033 static void mv_u3d_controller_stop(struct mv_u3d *u3d)
1034 {
1035         u32 tmp;
1036 
1037         if (!u3d->clock_gating && u3d->vbus_valid_detect)
1038                 iowrite32(MV_U3D_INTR_ENABLE_VBUS_VALID,
1039                                 &u3d->vuc_regs->intrenable);
1040         else
1041                 iowrite32(0, &u3d->vuc_regs->intrenable);
1042         iowrite32(~0x0, &u3d->vuc_regs->endcomplete);
1043         iowrite32(~0x0, &u3d->vuc_regs->trbunderrun);
1044         iowrite32(~0x0, &u3d->vuc_regs->trbcomplete);
1045         iowrite32(~0x0, &u3d->vuc_regs->linkchange);
1046         iowrite32(0x1, &u3d->vuc_regs->setuplock);
1047 
1048         /* Reset the RUN bit in the command register to stop USB */
1049         tmp = ioread32(&u3d->op_regs->usbcmd);
1050         tmp &= ~MV_U3D_CMD_RUN_STOP;
1051         iowrite32(tmp, &u3d->op_regs->usbcmd);
1052         dev_dbg(u3d->dev, "after u3d_stop, USBCMD 0x%x\n",
1053                 ioread32(&u3d->op_regs->usbcmd));
1054 }
1055 
1056 static void mv_u3d_controller_start(struct mv_u3d *u3d)
1057 {
1058         u32 usbintr;
1059         u32 temp;
1060 
1061         /* enable link LTSSM state machine */
1062         temp = ioread32(&u3d->vuc_regs->ltssm);
1063         temp |= MV_U3D_LTSSM_PHY_INIT_DONE;
1064         iowrite32(temp, &u3d->vuc_regs->ltssm);
1065 
1066         /* Enable interrupts */
1067         usbintr = MV_U3D_INTR_ENABLE_LINK_CHG | MV_U3D_INTR_ENABLE_TXDESC_ERR |
1068                 MV_U3D_INTR_ENABLE_RXDESC_ERR | MV_U3D_INTR_ENABLE_TX_COMPLETE |
1069                 MV_U3D_INTR_ENABLE_RX_COMPLETE | MV_U3D_INTR_ENABLE_SETUP |
1070                 (u3d->vbus_valid_detect ? MV_U3D_INTR_ENABLE_VBUS_VALID : 0);
1071         iowrite32(usbintr, &u3d->vuc_regs->intrenable);
1072 
1073         /* Enable ctrl ep */
1074         iowrite32(0x1, &u3d->vuc_regs->ctrlepenable);
1075 
1076         /* Set the Run bit in the command register */
1077         iowrite32(MV_U3D_CMD_RUN_STOP, &u3d->op_regs->usbcmd);
1078         dev_dbg(u3d->dev, "after u3d_start, USBCMD 0x%x\n",
1079                 ioread32(&u3d->op_regs->usbcmd));
1080 }
1081 
1082 static int mv_u3d_controller_reset(struct mv_u3d *u3d)
1083 {
1084         unsigned int loops;
1085         u32 tmp;
1086 
1087         /* Stop the controller */
1088         tmp = ioread32(&u3d->op_regs->usbcmd);
1089         tmp &= ~MV_U3D_CMD_RUN_STOP;
1090         iowrite32(tmp, &u3d->op_regs->usbcmd);
1091 
1092         /* Reset the controller to get default values */
1093         iowrite32(MV_U3D_CMD_CTRL_RESET, &u3d->op_regs->usbcmd);
1094 
1095         /* wait for reset to complete */
1096         loops = LOOPS(MV_U3D_RESET_TIMEOUT);
1097         while (ioread32(&u3d->op_regs->usbcmd) & MV_U3D_CMD_CTRL_RESET) {
1098                 if (loops == 0) {
1099                         dev_err(u3d->dev,
1100                                 "Wait for RESET completed TIMEOUT\n");
1101                         return -ETIMEDOUT;
1102                 }
1103                 loops--;
1104                 udelay(LOOPS_USEC);
1105         }
1106 
1107         /* Configure the Endpoint Context Address */
1108         iowrite32(u3d->ep_context_dma, &u3d->op_regs->dcbaapl);
1109         iowrite32(0, &u3d->op_regs->dcbaaph);
1110 
1111         return 0;
1112 }
1113 
1114 static int mv_u3d_enable(struct mv_u3d *u3d)
1115 {
1116         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1117         int retval;
1118 
1119         if (u3d->active)
1120                 return 0;
1121 
1122         if (!u3d->clock_gating) {
1123                 u3d->active = 1;
1124                 return 0;
1125         }
1126 
1127         dev_dbg(u3d->dev, "enable u3d\n");
1128         clk_enable(u3d->clk);
1129         if (pdata->phy_init) {
1130                 retval = pdata->phy_init(u3d->phy_regs);
1131                 if (retval) {
1132                         dev_err(u3d->dev,
1133                                 "init phy error %d\n", retval);
1134                         clk_disable(u3d->clk);
1135                         return retval;
1136                 }
1137         }
1138         u3d->active = 1;
1139 
1140         return 0;
1141 }
1142 
1143 static void mv_u3d_disable(struct mv_u3d *u3d)
1144 {
1145         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1146         if (u3d->clock_gating && u3d->active) {
1147                 dev_dbg(u3d->dev, "disable u3d\n");
1148                 if (pdata->phy_deinit)
1149                         pdata->phy_deinit(u3d->phy_regs);
1150                 clk_disable(u3d->clk);
1151                 u3d->active = 0;
1152         }
1153 }
1154 
1155 static int mv_u3d_vbus_session(struct usb_gadget *gadget, int is_active)
1156 {
1157         struct mv_u3d *u3d;
1158         unsigned long flags;
1159         int retval = 0;
1160 
1161         u3d = container_of(gadget, struct mv_u3d, gadget);
1162 
1163         spin_lock_irqsave(&u3d->lock, flags);
1164 
1165         u3d->vbus_active = (is_active != 0);
1166         dev_dbg(u3d->dev, "%s: softconnect %d, vbus_active %d\n",
1167                 __func__, u3d->softconnect, u3d->vbus_active);
1168         /*
1169          * 1. external VBUS detect: we can disable/enable clock on demand.
1170          * 2. UDC VBUS detect: we have to enable clock all the time.
1171          * 3. No VBUS detect: we have to enable clock all the time.
1172          */
1173         if (u3d->driver && u3d->softconnect && u3d->vbus_active) {
1174                 retval = mv_u3d_enable(u3d);
1175                 if (retval == 0) {
1176                         /*
1177                          * after clock is disabled, we lost all the register
1178                          *  context. We have to re-init registers
1179                          */
1180                         mv_u3d_controller_reset(u3d);
1181                         mv_u3d_ep0_reset(u3d);
1182                         mv_u3d_controller_start(u3d);
1183                 }
1184         } else if (u3d->driver && u3d->softconnect) {
1185                 if (!u3d->active)
1186                         goto out;
1187 
1188                 /* stop all the transfer in queue*/
1189                 mv_u3d_stop_activity(u3d, u3d->driver);
1190                 mv_u3d_controller_stop(u3d);
1191                 mv_u3d_disable(u3d);
1192         }
1193 
1194 out:
1195         spin_unlock_irqrestore(&u3d->lock, flags);
1196         return retval;
1197 }
1198 
1199 /* constrain controller's VBUS power usage
1200  * This call is used by gadget drivers during SET_CONFIGURATION calls,
1201  * reporting how much power the device may consume.  For example, this
1202  * could affect how quickly batteries are recharged.
1203  *
1204  * Returns zero on success, else negative errno.
1205  */
1206 static int mv_u3d_vbus_draw(struct usb_gadget *gadget, unsigned mA)
1207 {
1208         struct mv_u3d *u3d = container_of(gadget, struct mv_u3d, gadget);
1209 
1210         u3d->power = mA;
1211 
1212         return 0;
1213 }
1214 
1215 static int mv_u3d_pullup(struct usb_gadget *gadget, int is_on)
1216 {
1217         struct mv_u3d *u3d = container_of(gadget, struct mv_u3d, gadget);
1218         unsigned long flags;
1219         int retval = 0;
1220 
1221         spin_lock_irqsave(&u3d->lock, flags);
1222 
1223         dev_dbg(u3d->dev, "%s: softconnect %d, vbus_active %d\n",
1224                 __func__, u3d->softconnect, u3d->vbus_active);
1225         u3d->softconnect = (is_on != 0);
1226         if (u3d->driver && u3d->softconnect && u3d->vbus_active) {
1227                 retval = mv_u3d_enable(u3d);
1228                 if (retval == 0) {
1229                         /*
1230                          * after clock is disabled, we lost all the register
1231                          *  context. We have to re-init registers
1232                          */
1233                         mv_u3d_controller_reset(u3d);
1234                         mv_u3d_ep0_reset(u3d);
1235                         mv_u3d_controller_start(u3d);
1236                 }
1237         } else if (u3d->driver && u3d->vbus_active) {
1238                 /* stop all the transfer in queue*/
1239                 mv_u3d_stop_activity(u3d, u3d->driver);
1240                 mv_u3d_controller_stop(u3d);
1241                 mv_u3d_disable(u3d);
1242         }
1243 
1244         spin_unlock_irqrestore(&u3d->lock, flags);
1245 
1246         return retval;
1247 }
1248 
1249 static int mv_u3d_start(struct usb_gadget *g,
1250                 struct usb_gadget_driver *driver)
1251 {
1252         struct mv_u3d *u3d = container_of(g, struct mv_u3d, gadget);
1253         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1254         unsigned long flags;
1255 
1256         if (u3d->driver)
1257                 return -EBUSY;
1258 
1259         spin_lock_irqsave(&u3d->lock, flags);
1260 
1261         if (!u3d->clock_gating) {
1262                 clk_enable(u3d->clk);
1263                 if (pdata->phy_init)
1264                         pdata->phy_init(u3d->phy_regs);
1265         }
1266 
1267         /* hook up the driver ... */
1268         driver->driver.bus = NULL;
1269         u3d->driver = driver;
1270 
1271         u3d->ep0_dir = USB_DIR_OUT;
1272 
1273         spin_unlock_irqrestore(&u3d->lock, flags);
1274 
1275         u3d->vbus_valid_detect = 1;
1276 
1277         return 0;
1278 }
1279 
1280 static int mv_u3d_stop(struct usb_gadget *g,
1281                 struct usb_gadget_driver *driver)
1282 {
1283         struct mv_u3d *u3d = container_of(g, struct mv_u3d, gadget);
1284         struct mv_usb_platform_data *pdata = dev_get_platdata(u3d->dev);
1285         unsigned long flags;
1286 
1287         u3d->vbus_valid_detect = 0;
1288         spin_lock_irqsave(&u3d->lock, flags);
1289 
1290         /* enable clock to access controller register */
1291         clk_enable(u3d->clk);
1292         if (pdata->phy_init)
1293                 pdata->phy_init(u3d->phy_regs);
1294 
1295         mv_u3d_controller_stop(u3d);
1296         /* stop all usb activities */
1297         u3d->gadget.speed = USB_SPEED_UNKNOWN;
1298         mv_u3d_stop_activity(u3d, driver);
1299         mv_u3d_disable(u3d);
1300 
1301         if (pdata->phy_deinit)
1302                 pdata->phy_deinit(u3d->phy_regs);
1303         clk_disable(u3d->clk);
1304 
1305         spin_unlock_irqrestore(&u3d->lock, flags);
1306 
1307         u3d->driver = NULL;
1308 
1309         return 0;
1310 }
1311 
1312 /* device controller usb_gadget_ops structure */
1313 static const struct usb_gadget_ops mv_u3d_ops = {
1314         /* notify controller that VBUS is powered or not */
1315         .vbus_session   = mv_u3d_vbus_session,
1316 
1317         /* constrain controller's VBUS power usage */
1318         .vbus_draw      = mv_u3d_vbus_draw,
1319 
1320         .pullup         = mv_u3d_pullup,
1321         .udc_start      = mv_u3d_start,
1322         .udc_stop       = mv_u3d_stop,
1323 };
1324 
1325 static int mv_u3d_eps_init(struct mv_u3d *u3d)
1326 {
1327         struct mv_u3d_ep        *ep;
1328         char name[14];
1329         int i;
1330 
1331         /* initialize ep0, ep0 in/out use eps[1] */
1332         ep = &u3d->eps[1];
1333         ep->u3d = u3d;
1334         strncpy(ep->name, "ep0", sizeof(ep->name));
1335         ep->ep.name = ep->name;
1336         ep->ep.ops = &mv_u3d_ep_ops;
1337         ep->wedge = 0;
1338         usb_ep_set_maxpacket_limit(&ep->ep, MV_U3D_EP0_MAX_PKT_SIZE);
1339         ep->ep_num = 0;
1340         ep->ep.desc = &mv_u3d_ep0_desc;
1341         INIT_LIST_HEAD(&ep->queue);
1342         INIT_LIST_HEAD(&ep->req_list);
1343         ep->ep_type = USB_ENDPOINT_XFER_CONTROL;
1344 
1345         /* add ep0 ep_context */
1346         ep->ep_context = &u3d->ep_context[1];
1347 
1348         /* initialize other endpoints */
1349         for (i = 2; i < u3d->max_eps * 2; i++) {
1350                 ep = &u3d->eps[i];
1351                 if (i & 1) {
1352                         snprintf(name, sizeof(name), "ep%din", i >> 1);
1353                         ep->direction = MV_U3D_EP_DIR_IN;
1354                 } else {
1355                         snprintf(name, sizeof(name), "ep%dout", i >> 1);
1356                         ep->direction = MV_U3D_EP_DIR_OUT;
1357                 }
1358                 ep->u3d = u3d;
1359                 strncpy(ep->name, name, sizeof(ep->name));
1360                 ep->ep.name = ep->name;
1361 
1362                 ep->ep.ops = &mv_u3d_ep_ops;
1363                 usb_ep_set_maxpacket_limit(&ep->ep, (unsigned short) ~0);
1364                 ep->ep_num = i / 2;
1365 
1366                 INIT_LIST_HEAD(&ep->queue);
1367                 list_add_tail(&ep->ep.ep_list, &u3d->gadget.ep_list);
1368 
1369                 INIT_LIST_HEAD(&ep->req_list);
1370                 spin_lock_init(&ep->req_lock);
1371                 ep->ep_context = &u3d->ep_context[i];
1372         }
1373 
1374         return 0;
1375 }
1376 
1377 /* delete all endpoint requests, called with spinlock held */
1378 static void mv_u3d_nuke(struct mv_u3d_ep *ep, int status)
1379 {
1380         /* endpoint fifo flush */
1381         mv_u3d_ep_fifo_flush(&ep->ep);
1382 
1383         while (!list_empty(&ep->queue)) {
1384                 struct mv_u3d_req *req = NULL;
1385                 req = list_entry(ep->queue.next, struct mv_u3d_req, queue);
1386                 mv_u3d_done(ep, req, status);
1387         }
1388 }
1389 
1390 /* stop all USB activities */
1391 static
1392 void mv_u3d_stop_activity(struct mv_u3d *u3d, struct usb_gadget_driver *driver)
1393 {
1394         struct mv_u3d_ep        *ep;
1395 
1396         mv_u3d_nuke(&u3d->eps[1], -ESHUTDOWN);
1397 
1398         list_for_each_entry(ep, &u3d->gadget.ep_list, ep.ep_list) {
1399                 mv_u3d_nuke(ep, -ESHUTDOWN);
1400         }
1401 
1402         /* report disconnect; the driver is already quiesced */
1403         if (driver) {
1404                 spin_unlock(&u3d->lock);
1405                 driver->disconnect(&u3d->gadget);
1406                 spin_lock(&u3d->lock);
1407         }
1408 }
1409 
1410 static void mv_u3d_irq_process_error(struct mv_u3d *u3d)
1411 {
1412         /* Increment the error count */
1413         u3d->errors++;
1414         dev_err(u3d->dev, "%s\n", __func__);
1415 }
1416 
1417 static void mv_u3d_irq_process_link_change(struct mv_u3d *u3d)
1418 {
1419         u32 linkchange;
1420 
1421         linkchange = ioread32(&u3d->vuc_regs->linkchange);
1422         iowrite32(linkchange, &u3d->vuc_regs->linkchange);
1423 
1424         dev_dbg(u3d->dev, "linkchange: 0x%x\n", linkchange);
1425 
1426         if (linkchange & MV_U3D_LINK_CHANGE_LINK_UP) {
1427                 dev_dbg(u3d->dev, "link up: ltssm state: 0x%x\n",
1428                         ioread32(&u3d->vuc_regs->ltssmstate));
1429 
1430                 u3d->usb_state = USB_STATE_DEFAULT;
1431                 u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
1432                 u3d->ep0_state = MV_U3D_WAIT_FOR_SETUP;
1433 
1434                 /* set speed */
1435                 u3d->gadget.speed = USB_SPEED_SUPER;
1436         }
1437 
1438         if (linkchange & MV_U3D_LINK_CHANGE_SUSPEND) {
1439                 dev_dbg(u3d->dev, "link suspend\n");
1440                 u3d->resume_state = u3d->usb_state;
1441                 u3d->usb_state = USB_STATE_SUSPENDED;
1442         }
1443 
1444         if (linkchange & MV_U3D_LINK_CHANGE_RESUME) {
1445                 dev_dbg(u3d->dev, "link resume\n");
1446                 u3d->usb_state = u3d->resume_state;
1447                 u3d->resume_state = 0;
1448         }
1449 
1450         if (linkchange & MV_U3D_LINK_CHANGE_WRESET) {
1451                 dev_dbg(u3d->dev, "warm reset\n");
1452                 u3d->usb_state = USB_STATE_POWERED;
1453         }
1454 
1455         if (linkchange & MV_U3D_LINK_CHANGE_HRESET) {
1456                 dev_dbg(u3d->dev, "hot reset\n");
1457                 u3d->usb_state = USB_STATE_DEFAULT;
1458         }
1459 
1460         if (linkchange & MV_U3D_LINK_CHANGE_INACT)
1461                 dev_dbg(u3d->dev, "inactive\n");
1462 
1463         if (linkchange & MV_U3D_LINK_CHANGE_DISABLE_AFTER_U0)
1464                 dev_dbg(u3d->dev, "ss.disabled\n");
1465 
1466         if (linkchange & MV_U3D_LINK_CHANGE_VBUS_INVALID) {
1467                 dev_dbg(u3d->dev, "vbus invalid\n");
1468                 u3d->usb_state = USB_STATE_ATTACHED;
1469                 u3d->vbus_valid_detect = 1;
1470                 /* if external vbus detect is not supported,
1471                  * we handle it here.
1472                  */
1473                 if (!u3d->vbus) {
1474                         spin_unlock(&u3d->lock);
1475                         mv_u3d_vbus_session(&u3d->gadget, 0);
1476                         spin_lock(&u3d->lock);
1477                 }
1478         }
1479 }
1480 
1481 static void mv_u3d_ch9setaddress(struct mv_u3d *u3d,
1482                                 struct usb_ctrlrequest *setup)
1483 {
1484         u32 tmp;
1485 
1486         if (u3d->usb_state != USB_STATE_DEFAULT) {
1487                 dev_err(u3d->dev,
1488                         "%s, cannot setaddr in this state (%d)\n",
1489                         __func__, u3d->usb_state);
1490                 goto err;
1491         }
1492 
1493         u3d->dev_addr = (u8)setup->wValue;
1494 
1495         dev_dbg(u3d->dev, "%s: 0x%x\n", __func__, u3d->dev_addr);
1496 
1497         if (u3d->dev_addr > 127) {
1498                 dev_err(u3d->dev,
1499                         "%s, u3d address is wrong (out of range)\n", __func__);
1500                 u3d->dev_addr = 0;
1501                 goto err;
1502         }
1503 
1504         /* update usb state */
1505         u3d->usb_state = USB_STATE_ADDRESS;
1506 
1507         /* set the new address */
1508         tmp = ioread32(&u3d->vuc_regs->devaddrtiebrkr);
1509         tmp &= ~0x7F;
1510         tmp |= (u32)u3d->dev_addr;
1511         iowrite32(tmp, &u3d->vuc_regs->devaddrtiebrkr);
1512 
1513         return;
1514 err:
1515         mv_u3d_ep0_stall(u3d);
1516 }
1517 
1518 static int mv_u3d_is_set_configuration(struct usb_ctrlrequest *setup)
1519 {
1520         if ((setup->bRequestType & USB_TYPE_MASK) == USB_TYPE_STANDARD)
1521                 if (setup->bRequest == USB_REQ_SET_CONFIGURATION)
1522                         return 1;
1523 
1524         return 0;
1525 }
1526 
1527 static void mv_u3d_handle_setup_packet(struct mv_u3d *u3d, u8 ep_num,
1528         struct usb_ctrlrequest *setup)
1529         __releases(&u3c->lock)
1530         __acquires(&u3c->lock)
1531 {
1532         bool delegate = false;
1533 
1534         mv_u3d_nuke(&u3d->eps[ep_num * 2 + MV_U3D_EP_DIR_IN], -ESHUTDOWN);
1535 
1536         dev_dbg(u3d->dev, "SETUP %02x.%02x v%04x i%04x l%04x\n",
1537                         setup->bRequestType, setup->bRequest,
1538                         setup->wValue, setup->wIndex, setup->wLength);
1539 
1540         /* We process some stardard setup requests here */
1541         if ((setup->bRequestType & USB_TYPE_MASK) == USB_TYPE_STANDARD) {
1542                 switch (setup->bRequest) {
1543                 case USB_REQ_GET_STATUS:
1544                         delegate = true;
1545                         break;
1546 
1547                 case USB_REQ_SET_ADDRESS:
1548                         mv_u3d_ch9setaddress(u3d, setup);
1549                         break;
1550 
1551                 case USB_REQ_CLEAR_FEATURE:
1552                         delegate = true;
1553                         break;
1554 
1555                 case USB_REQ_SET_FEATURE:
1556                         delegate = true;
1557                         break;
1558 
1559                 default:
1560                         delegate = true;
1561                 }
1562         } else
1563                 delegate = true;
1564 
1565         /* delegate USB standard requests to the gadget driver */
1566         if (delegate == true) {
1567                 /* USB requests handled by gadget */
1568                 if (setup->wLength) {
1569                         /* DATA phase from gadget, STATUS phase from u3d */
1570                         u3d->ep0_dir = (setup->bRequestType & USB_DIR_IN)
1571                                         ? MV_U3D_EP_DIR_IN : MV_U3D_EP_DIR_OUT;
1572                         spin_unlock(&u3d->lock);
1573                         if (u3d->driver->setup(&u3d->gadget,
1574                                 &u3d->local_setup_buff) < 0) {
1575                                 dev_err(u3d->dev, "setup error!\n");
1576                                 mv_u3d_ep0_stall(u3d);
1577                         }
1578                         spin_lock(&u3d->lock);
1579                 } else {
1580                         /* no DATA phase, STATUS phase from gadget */
1581                         u3d->ep0_dir = MV_U3D_EP_DIR_IN;
1582                         u3d->ep0_state = MV_U3D_STATUS_STAGE;
1583                         spin_unlock(&u3d->lock);
1584                         if (u3d->driver->setup(&u3d->gadget,
1585                                 &u3d->local_setup_buff) < 0)
1586                                 mv_u3d_ep0_stall(u3d);
1587                         spin_lock(&u3d->lock);
1588                 }
1589 
1590                 if (mv_u3d_is_set_configuration(setup)) {
1591                         dev_dbg(u3d->dev, "u3d configured\n");
1592                         u3d->usb_state = USB_STATE_CONFIGURED;
1593                 }
1594         }
1595 }
1596 
1597 static void mv_u3d_get_setup_data(struct mv_u3d *u3d, u8 ep_num, u8 *buffer_ptr)
1598 {
1599         struct mv_u3d_ep_context *epcontext;
1600 
1601         epcontext = &u3d->ep_context[ep_num * 2 + MV_U3D_EP_DIR_IN];
1602 
1603         /* Copy the setup packet to local buffer */
1604         memcpy(buffer_ptr, (u8 *) &epcontext->setup_buffer, 8);
1605 }
1606 
1607 static void mv_u3d_irq_process_setup(struct mv_u3d *u3d)
1608 {
1609         u32 tmp, i;
1610         /* Process all Setup packet received interrupts */
1611         tmp = ioread32(&u3d->vuc_regs->setuplock);
1612         if (tmp) {
1613                 for (i = 0; i < u3d->max_eps; i++) {
1614                         if (tmp & (1 << i)) {
1615                                 mv_u3d_get_setup_data(u3d, i,
1616                                         (u8 *)(&u3d->local_setup_buff));
1617                                 mv_u3d_handle_setup_packet(u3d, i,
1618                                         &u3d->local_setup_buff);
1619                         }
1620                 }
1621         }
1622 
1623         iowrite32(tmp, &u3d->vuc_regs->setuplock);
1624 }
1625 
1626 static void mv_u3d_irq_process_tr_complete(struct mv_u3d *u3d)
1627 {
1628         u32 tmp, bit_pos;
1629         int i, ep_num = 0, direction = 0;
1630         struct mv_u3d_ep        *curr_ep;
1631         struct mv_u3d_req *curr_req, *temp_req;
1632         int status;
1633 
1634         tmp = ioread32(&u3d->vuc_regs->endcomplete);
1635 
1636         dev_dbg(u3d->dev, "tr_complete: ep: 0x%x\n", tmp);
1637         if (!tmp)
1638                 return;
1639         iowrite32(tmp, &u3d->vuc_regs->endcomplete);
1640 
1641         for (i = 0; i < u3d->max_eps * 2; i++) {
1642                 ep_num = i >> 1;
1643                 direction = i % 2;
1644 
1645                 bit_pos = 1 << (ep_num + 16 * direction);
1646 
1647                 if (!(bit_pos & tmp))
1648                         continue;
1649 
1650                 if (i == 0)
1651                         curr_ep = &u3d->eps[1];
1652                 else
1653                         curr_ep = &u3d->eps[i];
1654 
1655                 /* remove req out of ep request list after completion */
1656                 dev_dbg(u3d->dev, "tr comp: check req_list\n");
1657                 spin_lock(&curr_ep->req_lock);
1658                 if (!list_empty(&curr_ep->req_list)) {
1659                         struct mv_u3d_req *req;
1660                         req = list_entry(curr_ep->req_list.next,
1661                                                 struct mv_u3d_req, list);
1662                         list_del_init(&req->list);
1663                         curr_ep->processing = 0;
1664                 }
1665                 spin_unlock(&curr_ep->req_lock);
1666 
1667                 /* process the req queue until an uncomplete request */
1668                 list_for_each_entry_safe(curr_req, temp_req,
1669                         &curr_ep->queue, queue) {
1670                         status = mv_u3d_process_ep_req(u3d, i, curr_req);
1671                         if (status)
1672                                 break;
1673                         /* write back status to req */
1674                         curr_req->req.status = status;
1675 
1676                         /* ep0 request completion */
1677                         if (ep_num == 0) {
1678                                 mv_u3d_done(curr_ep, curr_req, 0);
1679                                 break;
1680                         } else {
1681                                 mv_u3d_done(curr_ep, curr_req, status);
1682                         }
1683                 }
1684 
1685                 dev_dbg(u3d->dev, "call mv_u3d_start_queue from ep complete\n");
1686                 mv_u3d_start_queue(curr_ep);
1687         }
1688 }
1689 
1690 static irqreturn_t mv_u3d_irq(int irq, void *dev)
1691 {
1692         struct mv_u3d *u3d = (struct mv_u3d *)dev;
1693         u32 status, intr;
1694         u32 bridgesetting;
1695         u32 trbunderrun;
1696 
1697         spin_lock(&u3d->lock);
1698 
1699         status = ioread32(&u3d->vuc_regs->intrcause);
1700         intr = ioread32(&u3d->vuc_regs->intrenable);
1701         status &= intr;
1702 
1703         if (status == 0) {
1704                 spin_unlock(&u3d->lock);
1705                 dev_err(u3d->dev, "irq error!\n");
1706                 return IRQ_NONE;
1707         }
1708 
1709         if (status & MV_U3D_USBINT_VBUS_VALID) {
1710                 bridgesetting = ioread32(&u3d->vuc_regs->bridgesetting);
1711                 if (bridgesetting & MV_U3D_BRIDGE_SETTING_VBUS_VALID) {
1712                         /* write vbus valid bit of bridge setting to clear */
1713                         bridgesetting = MV_U3D_BRIDGE_SETTING_VBUS_VALID;
1714                         iowrite32(bridgesetting, &u3d->vuc_regs->bridgesetting);
1715                         dev_dbg(u3d->dev, "vbus valid\n");
1716 
1717                         u3d->usb_state = USB_STATE_POWERED;
1718                         u3d->vbus_valid_detect = 0;
1719                         /* if external vbus detect is not supported,
1720                          * we handle it here.
1721                          */
1722                         if (!u3d->vbus) {
1723                                 spin_unlock(&u3d->lock);
1724                                 mv_u3d_vbus_session(&u3d->gadget, 1);
1725                                 spin_lock(&u3d->lock);
1726                         }
1727                 } else
1728                         dev_err(u3d->dev, "vbus bit is not set\n");
1729         }
1730 
1731         /* RX data is already in the 16KB FIFO.*/
1732         if (status & MV_U3D_USBINT_UNDER_RUN) {
1733                 trbunderrun = ioread32(&u3d->vuc_regs->trbunderrun);
1734                 dev_err(u3d->dev, "under run, ep%d\n", trbunderrun);
1735                 iowrite32(trbunderrun, &u3d->vuc_regs->trbunderrun);
1736                 mv_u3d_irq_process_error(u3d);
1737         }
1738 
1739         if (status & (MV_U3D_USBINT_RXDESC_ERR | MV_U3D_USBINT_TXDESC_ERR)) {
1740                 /* write one to clear */
1741                 iowrite32(status & (MV_U3D_USBINT_RXDESC_ERR
1742                         | MV_U3D_USBINT_TXDESC_ERR),
1743                         &u3d->vuc_regs->intrcause);
1744                 dev_err(u3d->dev, "desc err 0x%x\n", status);
1745                 mv_u3d_irq_process_error(u3d);
1746         }
1747 
1748         if (status & MV_U3D_USBINT_LINK_CHG)
1749                 mv_u3d_irq_process_link_change(u3d);
1750 
1751         if (status & MV_U3D_USBINT_TX_COMPLETE)
1752                 mv_u3d_irq_process_tr_complete(u3d);
1753 
1754         if (status & MV_U3D_USBINT_RX_COMPLETE)
1755                 mv_u3d_irq_process_tr_complete(u3d);
1756 
1757         if (status & MV_U3D_USBINT_SETUP)
1758                 mv_u3d_irq_process_setup(u3d);
1759 
1760         spin_unlock(&u3d->lock);
1761         return IRQ_HANDLED;
1762 }
1763 
1764 static int mv_u3d_remove(struct platform_device *dev)
1765 {
1766         struct mv_u3d *u3d = platform_get_drvdata(dev);
1767 
1768         BUG_ON(u3d == NULL);
1769 
1770         usb_del_gadget_udc(&u3d->gadget);
1771 
1772         /* free memory allocated in probe */
1773         if (u3d->trb_pool)
1774                 dma_pool_destroy(u3d->trb_pool);
1775 
1776         if (u3d->ep_context)
1777                 dma_free_coherent(&dev->dev, u3d->ep_context_size,
1778                         u3d->ep_context, u3d->ep_context_dma);
1779 
1780         kfree(u3d->eps);
1781 
1782         if (u3d->irq)
1783                 free_irq(u3d->irq, u3d);
1784 
1785         if (u3d->cap_regs)
1786                 iounmap(u3d->cap_regs);
1787         u3d->cap_regs = NULL;
1788 
1789         kfree(u3d->status_req);
1790 
1791         clk_put(u3d->clk);
1792 
1793         kfree(u3d);
1794 
1795         return 0;
1796 }
1797 
1798 static int mv_u3d_probe(struct platform_device *dev)
1799 {
1800         struct mv_u3d *u3d = NULL;
1801         struct mv_usb_platform_data *pdata = dev_get_platdata(&dev->dev);
1802         int retval = 0;
1803         struct resource *r;
1804         size_t size;
1805 
1806         if (!dev_get_platdata(&dev->dev)) {
1807                 dev_err(&dev->dev, "missing platform_data\n");
1808                 retval = -ENODEV;
1809                 goto err_pdata;
1810         }
1811 
1812         u3d = kzalloc(sizeof(*u3d), GFP_KERNEL);
1813         if (!u3d) {
1814                 dev_err(&dev->dev, "failed to allocate memory for u3d\n");
1815                 retval = -ENOMEM;
1816                 goto err_alloc_private;
1817         }
1818 
1819         spin_lock_init(&u3d->lock);
1820 
1821         platform_set_drvdata(dev, u3d);
1822 
1823         u3d->dev = &dev->dev;
1824         u3d->vbus = pdata->vbus;
1825 
1826         u3d->clk = clk_get(&dev->dev, NULL);
1827         if (IS_ERR(u3d->clk)) {
1828                 retval = PTR_ERR(u3d->clk);
1829                 goto err_get_clk;
1830         }
1831 
1832         r = platform_get_resource_byname(dev, IORESOURCE_MEM, "capregs");
1833         if (!r) {
1834                 dev_err(&dev->dev, "no I/O memory resource defined\n");
1835                 retval = -ENODEV;
1836                 goto err_get_cap_regs;
1837         }
1838 
1839         u3d->cap_regs = (struct mv_u3d_cap_regs __iomem *)
1840                 ioremap(r->start, resource_size(r));
1841         if (!u3d->cap_regs) {
1842                 dev_err(&dev->dev, "failed to map I/O memory\n");
1843                 retval = -EBUSY;
1844                 goto err_map_cap_regs;
1845         } else {
1846                 dev_dbg(&dev->dev, "cap_regs address: 0x%lx/0x%lx\n",
1847                         (unsigned long) r->start,
1848                         (unsigned long) u3d->cap_regs);
1849         }
1850 
1851         /* we will access controller register, so enable the u3d controller */
1852         clk_enable(u3d->clk);
1853 
1854         if (pdata->phy_init) {
1855                 retval = pdata->phy_init(u3d->phy_regs);
1856                 if (retval) {
1857                         dev_err(&dev->dev, "init phy error %d\n", retval);
1858                         goto err_u3d_enable;
1859                 }
1860         }
1861 
1862         u3d->op_regs = (struct mv_u3d_op_regs __iomem *)(u3d->cap_regs
1863                 + MV_U3D_USB3_OP_REGS_OFFSET);
1864 
1865         u3d->vuc_regs = (struct mv_u3d_vuc_regs __iomem *)(u3d->cap_regs
1866                 + ioread32(&u3d->cap_regs->vuoff));
1867 
1868         u3d->max_eps = 16;
1869 
1870         /*
1871          * some platform will use usb to download image, it may not disconnect
1872          * usb gadget before loading kernel. So first stop u3d here.
1873          */
1874         mv_u3d_controller_stop(u3d);
1875         iowrite32(0xFFFFFFFF, &u3d->vuc_regs->intrcause);
1876 
1877         if (pdata->phy_deinit)
1878                 pdata->phy_deinit(u3d->phy_regs);
1879         clk_disable(u3d->clk);
1880 
1881         size = u3d->max_eps * sizeof(struct mv_u3d_ep_context) * 2;
1882         size = (size + MV_U3D_EP_CONTEXT_ALIGNMENT - 1)
1883                 & ~(MV_U3D_EP_CONTEXT_ALIGNMENT - 1);
1884         u3d->ep_context = dma_alloc_coherent(&dev->dev, size,
1885                                         &u3d->ep_context_dma, GFP_KERNEL);
1886         if (!u3d->ep_context) {
1887                 dev_err(&dev->dev, "allocate ep context memory failed\n");
1888                 retval = -ENOMEM;
1889                 goto err_alloc_ep_context;
1890         }
1891         u3d->ep_context_size = size;
1892 
1893         /* create TRB dma_pool resource */
1894         u3d->trb_pool = dma_pool_create("u3d_trb",
1895                         &dev->dev,
1896                         sizeof(struct mv_u3d_trb_hw),
1897                         MV_U3D_TRB_ALIGNMENT,
1898                         MV_U3D_DMA_BOUNDARY);
1899 
1900         if (!u3d->trb_pool) {
1901                 retval = -ENOMEM;
1902                 goto err_alloc_trb_pool;
1903         }
1904 
1905         size = u3d->max_eps * sizeof(struct mv_u3d_ep) * 2;
1906         u3d->eps = kzalloc(size, GFP_KERNEL);
1907         if (!u3d->eps) {
1908                 dev_err(&dev->dev, "allocate ep memory failed\n");
1909                 retval = -ENOMEM;
1910                 goto err_alloc_eps;
1911         }
1912 
1913         /* initialize ep0 status request structure */
1914         u3d->status_req = kzalloc(sizeof(struct mv_u3d_req) + 8, GFP_KERNEL);
1915         if (!u3d->status_req) {
1916                 dev_err(&dev->dev, "allocate status_req memory failed\n");
1917                 retval = -ENOMEM;
1918                 goto err_alloc_status_req;
1919         }
1920         INIT_LIST_HEAD(&u3d->status_req->queue);
1921 
1922         /* allocate a small amount of memory to get valid address */
1923         u3d->status_req->req.buf = (char *)u3d->status_req
1924                                         + sizeof(struct mv_u3d_req);
1925         u3d->status_req->req.dma = virt_to_phys(u3d->status_req->req.buf);
1926 
1927         u3d->resume_state = USB_STATE_NOTATTACHED;
1928         u3d->usb_state = USB_STATE_ATTACHED;
1929         u3d->ep0_dir = MV_U3D_EP_DIR_OUT;
1930         u3d->remote_wakeup = 0;
1931 
1932         r = platform_get_resource(dev, IORESOURCE_IRQ, 0);
1933         if (!r) {
1934                 dev_err(&dev->dev, "no IRQ resource defined\n");
1935                 retval = -ENODEV;
1936                 goto err_get_irq;
1937         }
1938         u3d->irq = r->start;
1939         if (request_irq(u3d->irq, mv_u3d_irq,
1940                 IRQF_SHARED, driver_name, u3d)) {
1941                 u3d->irq = 0;
1942                 dev_err(&dev->dev, "Request irq %d for u3d failed\n",
1943                         u3d->irq);
1944                 retval = -ENODEV;
1945                 goto err_request_irq;
1946         }
1947 
1948         /* initialize gadget structure */
1949         u3d->gadget.ops = &mv_u3d_ops;  /* usb_gadget_ops */
1950         u3d->gadget.ep0 = &u3d->eps[1].ep;      /* gadget ep0 */
1951         INIT_LIST_HEAD(&u3d->gadget.ep_list);   /* ep_list */
1952         u3d->gadget.speed = USB_SPEED_UNKNOWN;  /* speed */
1953 
1954         /* the "gadget" abstracts/virtualizes the controller */
1955         u3d->gadget.name = driver_name;         /* gadget name */
1956 
1957         mv_u3d_eps_init(u3d);
1958 
1959         /* external vbus detection */
1960         if (u3d->vbus) {
1961                 u3d->clock_gating = 1;
1962                 dev_err(&dev->dev, "external vbus detection\n");
1963         }
1964 
1965         if (!u3d->clock_gating)
1966                 u3d->vbus_active = 1;
1967 
1968         /* enable usb3 controller vbus detection */
1969         u3d->vbus_valid_detect = 1;
1970 
1971         retval = usb_add_gadget_udc(&dev->dev, &u3d->gadget);
1972         if (retval)
1973                 goto err_unregister;
1974 
1975         dev_dbg(&dev->dev, "successful probe usb3 device %s clock gating.\n",
1976                 u3d->clock_gating ? "with" : "without");
1977 
1978         return 0;
1979 
1980 err_unregister:
1981         free_irq(u3d->irq, u3d);
1982 err_request_irq:
1983 err_get_irq:
1984         kfree(u3d->status_req);
1985 err_alloc_status_req:
1986         kfree(u3d->eps);
1987 err_alloc_eps:
1988         dma_pool_destroy(u3d->trb_pool);
1989 err_alloc_trb_pool:
1990         dma_free_coherent(&dev->dev, u3d->ep_context_size,
1991                 u3d->ep_context, u3d->ep_context_dma);
1992 err_alloc_ep_context:
1993         if (pdata->phy_deinit)
1994                 pdata->phy_deinit(u3d->phy_regs);
1995         clk_disable(u3d->clk);
1996 err_u3d_enable:
1997         iounmap(u3d->cap_regs);
1998 err_map_cap_regs:
1999 err_get_cap_regs:
2000 err_get_clk:
2001         clk_put(u3d->clk);
2002         kfree(u3d);
2003 err_alloc_private:
2004 err_pdata:
2005         return retval;
2006 }
2007 
2008 #ifdef CONFIG_PM_SLEEP
2009 static int mv_u3d_suspend(struct device *dev)
2010 {
2011         struct mv_u3d *u3d = dev_get_drvdata(dev);
2012 
2013         /*
2014          * only cable is unplugged, usb can suspend.
2015          * So do not care about clock_gating == 1, it is handled by
2016          * vbus session.
2017          */
2018         if (!u3d->clock_gating) {
2019                 mv_u3d_controller_stop(u3d);
2020 
2021                 spin_lock_irq(&u3d->lock);
2022                 /* stop all usb activities */
2023                 mv_u3d_stop_activity(u3d, u3d->driver);
2024                 spin_unlock_irq(&u3d->lock);
2025 
2026                 mv_u3d_disable(u3d);
2027         }
2028 
2029         return 0;
2030 }
2031 
2032 static int mv_u3d_resume(struct device *dev)
2033 {
2034         struct mv_u3d *u3d = dev_get_drvdata(dev);
2035         int retval;
2036 
2037         if (!u3d->clock_gating) {
2038                 retval = mv_u3d_enable(u3d);
2039                 if (retval)
2040                         return retval;
2041 
2042                 if (u3d->driver && u3d->softconnect) {
2043                         mv_u3d_controller_reset(u3d);
2044                         mv_u3d_ep0_reset(u3d);
2045                         mv_u3d_controller_start(u3d);
2046                 }
2047         }
2048 
2049         return 0;
2050 }
2051 #endif
2052 
2053 static SIMPLE_DEV_PM_OPS(mv_u3d_pm_ops, mv_u3d_suspend, mv_u3d_resume);
2054 
2055 static void mv_u3d_shutdown(struct platform_device *dev)
2056 {
2057         struct mv_u3d *u3d = platform_get_drvdata(dev);
2058         u32 tmp;
2059 
2060         tmp = ioread32(&u3d->op_regs->usbcmd);
2061         tmp &= ~MV_U3D_CMD_RUN_STOP;
2062         iowrite32(tmp, &u3d->op_regs->usbcmd);
2063 }
2064 
2065 static struct platform_driver mv_u3d_driver = {
2066         .probe          = mv_u3d_probe,
2067         .remove         = mv_u3d_remove,
2068         .shutdown       = mv_u3d_shutdown,
2069         .driver         = {
2070                 .owner  = THIS_MODULE,
2071                 .name   = "mv-u3d",
2072                 .pm     = &mv_u3d_pm_ops,
2073         },
2074 };
2075 
2076 module_platform_driver(mv_u3d_driver);
2077 MODULE_ALIAS("platform:mv-u3d");
2078 MODULE_DESCRIPTION(DRIVER_DESC);
2079 MODULE_AUTHOR("Yu Xu <yuxu@marvell.com>");
2080 MODULE_LICENSE("GPL");
2081 

This page was automatically generated by LXR 0.3.1 (source).  •  Linux is a registered trademark of Linus Torvalds  •  Contact us