Lines Matching refs:drvdata

166 static void xilinx_fb_out32(struct xilinxfb_drvdata *drvdata, u32 offset,
169 if (drvdata->flags & BUS_ACCESS_FLAG) {
170 if (drvdata->flags & LITTLE_ENDIAN_ACCESS)
171 iowrite32(val, drvdata->regs + (offset << 2));
173 iowrite32be(val, drvdata->regs + (offset << 2));
177 dcr_write(drvdata->dcr_host, offset, val);
181 static u32 xilinx_fb_in32(struct xilinxfb_drvdata *drvdata, u32 offset)
183 if (drvdata->flags & BUS_ACCESS_FLAG) {
184 if (drvdata->flags & LITTLE_ENDIAN_ACCESS)
185 return ioread32(drvdata->regs + (offset << 2));
187 return ioread32be(drvdata->regs + (offset << 2));
191 return dcr_read(drvdata->dcr_host, offset);
229 struct xilinxfb_drvdata *drvdata = to_xilinxfb_drvdata(fbi);
234 xilinx_fb_out32(drvdata, REG_CTRL, drvdata->reg_ctrl_default);
242 xilinx_fb_out32(drvdata, REG_CTRL, 0);
263 struct xilinxfb_drvdata *drvdata,
270 if (drvdata->flags & BUS_ACCESS_FLAG) {
273 drvdata->regs = devm_platform_get_and_ioremap_resource(pdev, 0, &res);
274 if (IS_ERR(drvdata->regs))
275 return PTR_ERR(drvdata->regs);
277 drvdata->regs_phys = res->start;
282 drvdata->fb_phys = pdata->fb_phys;
283 drvdata->fb_virt = ioremap(pdata->fb_phys, fbsize);
285 drvdata->fb_alloced = 1;
286 drvdata->fb_virt = dma_alloc_coherent(dev, PAGE_ALIGN(fbsize),
287 &drvdata->fb_phys,
291 if (!drvdata->fb_virt) {
297 memset_io((void __iomem *)drvdata->fb_virt, 0, fbsize);
300 xilinx_fb_out32(drvdata, REG_FB_ADDR, drvdata->fb_phys);
301 rc = xilinx_fb_in32(drvdata, REG_FB_ADDR);
303 if (rc != drvdata->fb_phys) {
304 drvdata->flags |= LITTLE_ENDIAN_ACCESS;
305 xilinx_fb_out32(drvdata, REG_FB_ADDR, drvdata->fb_phys);
309 drvdata->reg_ctrl_default = REG_CTRL_ENABLE;
311 drvdata->reg_ctrl_default |= REG_CTRL_ROTATE;
312 xilinx_fb_out32(drvdata, REG_CTRL, drvdata->reg_ctrl_default);
315 drvdata->info.device = dev;
316 drvdata->info.screen_base = (void __iomem *)drvdata->fb_virt;
317 drvdata->info.fbops = &xilinxfb_ops;
318 drvdata->info.fix = xilinx_fb_fix;
319 drvdata->info.fix.smem_start = drvdata->fb_phys;
320 drvdata->info.fix.smem_len = fbsize;
321 drvdata->info.fix.line_length = pdata->xvirt * BYTES_PER_PIXEL;
323 drvdata->info.pseudo_palette = drvdata->pseudo_palette;
324 drvdata->info.var = xilinx_fb_var;
325 drvdata->info.var.height = pdata->screen_height_mm;
326 drvdata->info.var.width = pdata->screen_width_mm;
327 drvdata->info.var.xres = pdata->xres;
328 drvdata->info.var.yres = pdata->yres;
329 drvdata->info.var.xres_virtual = pdata->xvirt;
330 drvdata->info.var.yres_virtual = pdata->yvirt;
333 rc = fb_alloc_cmap(&drvdata->info.cmap, PALETTE_ENTRIES_NO, 0);
341 rc = register_framebuffer(&drvdata->info);
347 if (drvdata->flags & BUS_ACCESS_FLAG) {
350 &drvdata->regs_phys, drvdata->regs);
354 (unsigned long long)drvdata->fb_phys, drvdata->fb_virt, fbsize);
359 fb_dealloc_cmap(&drvdata->info.cmap);
362 if (drvdata->fb_alloced)
363 dma_free_coherent(dev, PAGE_ALIGN(fbsize), drvdata->fb_virt,
364 drvdata->fb_phys);
366 iounmap(drvdata->fb_virt);
369 xilinx_fb_out32(drvdata, REG_CTRL, 0);
376 struct xilinxfb_drvdata *drvdata = dev_get_drvdata(dev);
379 xilinx_fb_blank(VESA_POWERDOWN, &drvdata->info);
382 unregister_framebuffer(&drvdata->info);
384 fb_dealloc_cmap(&drvdata->info.cmap);
386 if (drvdata->fb_alloced)
387 dma_free_coherent(dev, PAGE_ALIGN(drvdata->info.fix.smem_len),
388 drvdata->fb_virt, drvdata->fb_phys);
390 iounmap(drvdata->fb_virt);
393 xilinx_fb_out32(drvdata, REG_CTRL, 0);
397 if (!(drvdata->flags & BUS_ACCESS_FLAG))
398 dcr_unmap(drvdata->dcr_host, drvdata->dcr_len);
412 struct xilinxfb_drvdata *drvdata;
418 drvdata = devm_kzalloc(&pdev->dev, sizeof(*drvdata), GFP_KERNEL);
419 if (!drvdata)
434 drvdata->flags |= BUS_ACCESS_FLAG;
440 drvdata->dcr_len = dcr_resource_len(pdev->dev.of_node, 0);
441 drvdata->dcr_host = dcr_map(pdev->dev.of_node, start, drvdata->dcr_len);
442 if (!DCR_MAP_OK(drvdata->dcr_host)) {
469 platform_set_drvdata(pdev, drvdata);
470 return xilinxfb_assign(pdev, drvdata, &pdata);