performance – fastest way tto export speed to csv in my Webscrapping code python

below is my code in which i am writing row by row (there are around 900 pages with 10 rows and 5 data in each row) is there any way to make this faster. currently it’s taking 80 min to export the data into csv

import requests
from urllib3.exceptions import InsecureRequestWarning
import csv

from bs4 import BeautifulSoup as bs

f = csv.writer(open('GEM.csv', 'w', newline=''))
f.writerow(('Bidnumber', 'Items', 'Quantitiy', 'Department', 'Enddate'))

def scrap_bid_data():
    page_no = 1
    while page_no < 910:
        print('Hold on creating URL to fetch data...')
        url = '' + str(page_no)
        print('URL created: ' + url)
        scraped_data = requests.get(url, verify=False)
        soup_data = bs(scraped_data.text, 'lxml')
        extracted_data = soup_data.find('div', {'id': 'pagi_content'})
        if len(extracted_data) == 0:
            for idx in range(len(extracted_data)):
                if (idx % 2 == 1):
                    bid_data = extracted_data.contents(idx).text.strip().split('n')

                    bidno = bid_data(0).split(":")(-1)
                    items = bid_data(5).split(":")(-1)
                    qnty = int(bid_data(6).split(':')(1).strip())
                    dept = (bid_data(10) + bid_data(12).strip()).split(":")(-1)
                    edate = bid_data(17).split("End Date:")(-1)
                    f.writerow((bidno, items, qnty, dept, edate))