How do I calculate error on each node?

I run the heat equation code. I am wondering, how to calculate the error on each node, and how to save the solution in xdml format?

from __future__ import print_function
from fenics import *
import numpy as np

T = 2.0            # final time
num_steps = 10     # number of time steps
dt = T / num_steps # time step size
alpha = 3          # parameter alpha
beta = 1.2         # parameter beta

# Create mesh and define function space
nx = ny = 8
mesh = UnitSquareMesh(nx, ny)
V = FunctionSpace(mesh, 'P', 1)
dof_coordinates = V.tabulate_dof_coordinates()
# Define boundary condition
u_D = Expression('1 + x[0]*x[0] + alpha*x[1]*x[1] + beta*t',
                 degree=2, alpha=alpha, beta=beta, t=0)

def boundary(x, on_boundary):
    return on_boundary

bc = DirichletBC(V, u_D, boundary)

# Define initial value
u_n = interpolate(u_D, V)
#u_n = project(u_D, V)

# Define variational problem
u = TrialFunction(V)
v = TestFunction(V)
f = Constant(beta - 2 - 2*alpha)

F = u*v*dx + dt*dot(grad(u), grad(v))*dx - (u_n + dt*f)*v*dx
a, L = lhs(F), rhs(F)

# Time-stepping
u = Function(V)
t = 0
for n in range(num_steps):

    # Update current time
    t += dt
    u_D.t = t

    # Compute solution
    solve(a == L, u, bc)
    #print(dof_coordinates)
    print(dof_coordinates, u.vector().get_local())

    # Plot solution
    plot(u)
    plot(mesh, title="Solution to Heat Equation")

    # Compute error at vertices
    u_e = interpolate(u_D, V)
    import numpy as np
    error = np.abs(u_e.vector().get_local() - u.vector().get_local()).max()
    print('t = %.2f: error = %.3g' % (t, error))
    #plot(t,error)

    # Update previous solution
    u_n.assign(u)

# Hold plot
import matplotlib.pyplot as plt
plt.show()

Please see the attached tutorial:
https://fenicsproject.org/pub/tutorial/html/._ftut1004.html#___sec35

For writing xdmf files, see for instance: Loading xdmf data back in - #4 by dokken