1 from numpy
import array, eye, matrix, ndarray
3 from dynamic_graph
import plug
5 from dynamic_graph.sot.core.feature_generic
import FeatureGeneric
6 from dynamic_graph.sot.core.gain_adaptive
import GainAdaptive
16 self.
feature = FeatureGeneric(
"feature" + name)
18 self.
gain = GainAdaptive(
"gain" + name)
20 plug(dyn.com, self.
feature.errorIN)
21 plug(dyn.Jcom, self.
feature.jacobianIN)
25 self.task.add(self.
feature.name)
26 plug(self.task.error, self.
gain.error)
27 plug(self.
gain.gain, self.task.controlGain)
41 if isinstance(val, int)
or isinstance(val, float):
44 gain.setConstant(val[0])
46 gain.set(val[0], val[1], val[2])
48 gain.setByPoint(val[0], val[1], val[2], val[3])
53 if isinstance(p, (matrix, ndarray))
and p.size == 3:
55 elif isinstance(p, tuple)
and len(p) == 3:
57 elif isinstance(p, (matrix, ndarray))
and p.shape == (4, 4):
59 elif isinstance(p, (matrix, tuple))
and len(p) == 4 == len(p[0]) == len(
61 ) == len(p[2]) == len(p[3]):
63 elif isinstance(p, (matrix, ndarray, tuple))
and len(p) == 6:
67 print(
"Position with other parameters ... todo")
71 def goto6d(task, position, gain=None, resetJacobian=True):
73 task.featureDes.position.value = array(M)
74 task.feature.selec.value = Flags(
"111111")
77 "resetJacobianDerivative" in task.task.__class__.__dict__.keys()
80 task.task.resetJacobianDerivative()
83 def gotoNd(task, position, selec=None, gain=None, resetJacobian=True):
86 if not isinstance(selec, Flags):
88 task.feature.selec.value = selec
89 task.featureDes.position.value = array(M)
92 "resetJacobianDerivative" in task.task.__class__.__dict__.keys()
95 task.task.resetJacobianDerivative()