Skip to content

Commit

Permalink
Custom operation first implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
niccololaurora committed Nov 8, 2024
1 parent b5af92d commit bba581d
Show file tree
Hide file tree
Showing 2 changed files with 98 additions and 2 deletions.
35 changes: 35 additions & 0 deletions prova.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@

import tensorflow as tf
import numpy as np
from qibo import Circuit, gates


@tf.custom_gradient
def custom_operation():
output =

def grad_fn()


return output, grad_fn



class MyLayer(tf.keras.layers.Layer):

def __init__(self):
super(MyLayer, self).__init__():
self.circuit = self.circuit()

self.weights = self.add_weights(name='w', shape=(4,), initializer="random_normal")


def circuit(self):
c = Circuit(2)
c.add(gates.X(0))
c.add(gates.RX(1, theta=0.5))

def call(self, x):
self.circuit()

def
65 changes: 63 additions & 2 deletions src/qiboml/models/keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,58 @@
}


@tf.custom_gradient
def custom_operation(
encoding, circuit, decoding, differentiation, backend, parameters, x
):
"""
We need to detach the parameters and the datapoint from the
TensorFlow graph.
"""
# Datapoint
x_clone = tf.identity(x)
x_clone = tf.stop_gradient(x)

with tf.device("CPU:0"):
x_clone = tf.identity(x_clone)

x_clone = x_clone.numpy()
x_clone = backend.cast(x_clone, dtype=backend.precision)

# Parameters
parameters = tf.identity(parameters)
params = []
for w in parameters:
w_clone = tf.identity(w)
w_clone = tf.stop_gradient(w_clone)

with tf.device("CPU:0"):
w_clone = tf.identity(w_clone)

w_clone_numpy = w_clone.numpy()
w_clone_backend_compatible = backend.cast(
w_clone_numpy, dtype=backend.precision
)
params.append(w_clone_backend_compatible)

output = encoding(x_clone) + circuit
output.set_parameters(params)
output = decoding(output)
output = tf.expand_dims(output, axis=0)

def custom_grad(upstream):
grad_input, *gradients = (
tf.Constant(backend.to_numpy(grad).tolist())
for grad in differentiation.evaluate(
x_clone, encoding, circuit, decoding, backend, *parameters
)
)

return upstream * grad_input

return output, custom_grad


@dataclass(eq=False)
class QuantumModel(keras.Model): # pylint: disable=no-member

Expand Down Expand Up @@ -47,8 +99,17 @@ def call(self, x: tf.Tensor) -> tf.Tensor:
or self.differentiation is not None
or not self.decoding.analytic
):



"""devo chiamare una funzione che mi ritorna la prediction voluta e
la funzione custom grad"""
return custom_operation(
self.encoding,
self.circuit,
self.decoding,
self.differentiation,
self.circuit_parameters,
x,
)

else:

Expand Down

0 comments on commit bba581d

Please sign in to comment.