python – Calculating a function over a large array

I have written some code to determine an interpolation matrix over a grid of points. I originally wrote the code in matlab, where under the specified parameters it took just over 2 minutes. The equivalent code in Python however takes 40minutes!

I was expecting Python to be slower, but not be such an amount! Any feedback on the code is much appreciated.

import numpy as np
from numpy import linalg as LA

def g(x, y):
    gx = -x * (x ** 2 + y ** 2)
    gy = -y * (x ** 2 + y ** 2)
    return np.array((gx, gy))

def wendland(r):
    if r < 1:
        return ((1-r)**4)*(4*r+1)
    else:
        return 0

def make_collocation_points(h, x1, x2, y1, y2):
    i = 0
    x = ()
    y = ()

    for j in range(x1,x2+1):
        for k in np.arange(y1, y2, h):
            if LA.norm(g(j*h, k*h) - (j*h, k*h)) > 0.00001: # Don't accept values on the chain recurrent set!
                x.append(j*h)
                y.append(k*h)

                i += 1

    return x, y, i

def get_aij(x, y, n):
    a = np.zeros((n,n))
    for j in range(0, n):
        for k in range(0, n):
            a(j, k) = (
                wendland(LA.norm(g(x(j), y(j)) - g(x(k), y(k))))
                - wendland(LA.norm(g(x(j), y(j)) - np.array((x(k), y(k)))))
                - wendland(LA.norm((x(j), y(j)) - g(x(k), y(k))))
                + wendland(LA.norm(np.array((x(j), y(j))) - np.array((x(k), y(k)))))
            )
    return a

if __name__ == "__main__":
    h = 0.11
    x1, x2 = -15, 15
    y1, y2 = -15, 15

    x, y, n = make_collocation_points(h, x1, x2, y1, y2)
    a = get_aij(x, y, n)