python – Web scrapping and design pattern lifting

I would like to get some feed back on my code; the goal is to get all agencies adress of banks. I wrote a pretty simple brute force algorithm.
I was wondering if you would have any advices to improve the code, design it differently (do i need an OOP approach here) etc .

import requests
from lxml import html

groupe = "credit-agricole"
dep = '21'
groupes =("credit-agricole")
deps = ('53', '44', '56', '35', '22', '49', '72', '29', '85')

def get_nb_pages(groupe,dep):
    """ Return nb_pages ((int)): number of pages containing banks information .
        groupe ((string)): bank groupe ("credit-agricole",...)
        dep ((string)): departement ("01",...)
    url ="{groupe}/{dep}".format(groupe=groupe,dep=dep)
    req = requests.get(url)
    raw_html = req.text
    xpath = "/html/body/div(2)/article/div/div/div(3)/div(2)/nav/a"
    tree = html.fromstring(raw_html)
    nb_pages = len(tree.xpath(xpath)) +1
    return nb_pages

def get_agencies(groupe,dep,page_num):
    """ Return agencies ((List)): description of agencies scrapped on website target page.
        groupe ((string)): bank groupe ("credit-agricole",...)
        dep ((string)): departement ("01",...)
        page_num ((int)): target page
    url ="{groupe}/{dep}/{page_num}".format(groupe=groupe,dep=dep,page_num=page_num)
    req = requests.get(url)
    raw_html = req.text
    xpath = '//div(@class="lh-bloc-agence like-text")'
    tree = html.fromstring(raw_html)
    blocs_agencies = tree.xpath(xpath)
    agencies = () 
    for bloc in  blocs_agencies:
        agence = bloc.xpath("div/div(1)/h4")(0).text
        rue = bloc.xpath("div/div(1)/p(1)")(0).text
        code_postale = bloc.xpath("div/div(1)/p(2)")(0).text
    return agencies
def get_all(groupes,deps):
    """Return all_agencies ((List)): description of agencies scrapped.
        groupes ((List)): target groups
        deps ((List)): target departments
    all_agencies = ()
    for groupe in groupes:
        for dep in deps:
            nb_pages = get_nb_pages(groupe,dep)
            for p in range(1,nb_pages+1):
                agencies = get_agencies(groupe,dep,p)
    df_agencies = pd.DataFrame(all_agencies,columns=('agence','rue','code_postale'))
    return df_agencies

df_agencies = get_all(groupes,deps)