gpio: davinci: Remove custom .xlate

With the current redesign of driver it's not necessary to have
custom .xlate() as the gpiolib will assign default of_gpio_simple_xlate().

Suggested-by: Grygorii Strashko <grygorii.strashko@ti.com>
Signed-off-by: Keerthy <j-keerthy@ti.com>
Reviewed-by: Grygorii Strashko <grygorii.strashko@ti.com>
Signed-off-by: Linus Walleij <linus.walleij@linaro.org>
This commit is contained in:
Keerthy 2017-01-17 21:59:14 +05:30 committed by Linus Walleij
parent 8e11047b8f
commit 035a86b857

View File

@ -163,27 +163,6 @@ of_err:
return NULL;
}
#ifdef CONFIG_OF_GPIO
static int davinci_gpio_of_xlate(struct gpio_chip *gc,
const struct of_phandle_args *gpiospec,
u32 *flags)
{
struct davinci_gpio_controller *chips = dev_get_drvdata(gc->parent);
struct davinci_gpio_platform_data *pdata = dev_get_platdata(gc->parent);
if (gpiospec->args[0] > pdata->ngpio)
return -EINVAL;
if (gc != &chips->chip)
return -EINVAL;
if (flags)
*flags = gpiospec->args[1];
return gpiospec->args[0] % 32;
}
#endif
static int davinci_gpio_probe(struct platform_device *pdev)
{
static int ctrl_num, bank_base;
@ -244,7 +223,6 @@ static int davinci_gpio_probe(struct platform_device *pdev)
#ifdef CONFIG_OF_GPIO
chips->chip.of_gpio_n_cells = 2;
chips->chip.of_xlate = davinci_gpio_of_xlate;
chips->chip.parent = dev;
chips->chip.of_node = dev->of_node;
#endif