Newer
Older
/*
* This file is part of ArmarX.
*
* ArmarX is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* ArmarX is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* @package RobotComponents::ViewSelection
* @author David Schiebener (schiebener at kit dot edu)
* @date 2015
* @copyright http://www.gnu.org/licenses/gpl-2.0.txt
* GNU General Public License
*/
#include "ViewSelection.h"
#include <ArmarXCore/core/system/ArmarXDataPath.h>
#include <RobotAPI/libraries/core/remoterobot/RemoteRobot.h>
#include <MemoryX/core/MemoryXCoreObjectFactories.h>
#include <MemoryX/libraries/memorytypes/MemoryXTypesObjectFactories.h>
#include <VirtualRobot/Robot.h>
#include <VirtualRobot/Nodes/RobotNode.h>
#include <MemoryX/libraries/helpers/ObjectRecognitionHelpers/ObjectRecognitionWrapper.h>
using namespace armarx;
using namespace Eigen;
void ViewSelection::onInitComponent()
{
usingProxy(getProperty<std::string>("RobotStateComponentName").getValue());
usingProxy(getProperty<std::string>("HeadIKUnitName").getValue());
usingProxy(getProperty<std::string>("WorkingMemoryName").getValue());
usingProxy(getProperty<std::string>("PriorKnowledgeName").getValue());
offeringTopic("DebugDrawerUpdates");
headIKKinematicChainName = getProperty<std::string>("HeadIKKinematicChainName").getValue();
headFrameName = getProperty<std::string>("HeadFrameName").getValue();
cameraFrameName = getProperty<std::string>("CameraFrameName").getValue();
drawViewDirection = getProperty<bool>("VisualizeViewDirection").getValue();
randomNoiseLevel = getProperty<float>("RandomNoiseLevel").getValue();
std::string graphFileName;
if (ArmarXDataPath::getAbsolutePath("RobotComponents/ViewSelection/graph40k.gra", graphFileName))
{
saliencyEgosphereGraph = new CIntensityGraph(graphFileName);
ARMARX_VERBOSE << "Created egosphere graph with " << saliencyEgosphereGraph->getNodes()->size() << "nodes";
graphLookupTable = new CGraphPyramidLookupTable(9, 18);
graphLookupTable->buildLookupTable(saliencyEgosphereGraph);
nodeVisitedForObject.resize(saliencyEgosphereGraph->getNodes()->size());
// create visibility mask
visibilityMaskGraph = new CIntensityGraph(*saliencyEgosphereGraph);
TNodeList* nodes = visibilityMaskGraph->getNodes();
const float maxOverallHeadTiltAngle = getProperty<float>("MaxOverallHeadTiltAngle").getValue();
const float borderAreaAngle = 10.0f;
const float centralAngle = getProperty<float>("CentralHeadTiltAngle").getValue();
for (size_t i = 0; i < nodes->size(); i++)
{
CIntensityNode* node = (CIntensityNode*) nodes->at(i);
if (fabs(node->getPosition().fPhi - centralAngle) < maxOverallHeadTiltAngle - borderAreaAngle)
{
node->setIntensity(1.0f);
}
else if (fabs(node->getPosition().fPhi - centralAngle) < maxOverallHeadTiltAngle)
node->setIntensity(1.0f - (fabs(node->getPosition().fPhi - centralAngle) - (maxOverallHeadTiltAngle - borderAreaAngle)) / borderAreaAngle);
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
}
else
{
node->setIntensity(0.0f);
}
}
randomNoiseGraph = new CIntensityGraph(*saliencyEgosphereGraph);
randomNoiseGraph->set(0);
}
else
{
ARMARX_ERROR << "Could not find required graph file";
}
halfCameraOpeningAngle = getProperty<float>("HalfCameraOpeningAngle").getValue();
sleepingTimeBetweenViewDirectionChanges = getProperty<int>("SleepingTimeBetweenViewDirectionChanges").getValue();
doAutomaticViewSelection = getProperty<bool>("ActiveAtStartup").getValue();
deviationFromCameraCenterFactor = 0.5f;
timeOfLastViewChange = IceUtil::Time::now();
// this is robot model specific: offset from the used head coordinate system to the actual
// head center where the eyes are assumed to be located. Here it is correct for ARMAR-III
offsetToHeadCenter << 0, 0, 150;
}
void ViewSelection::onConnectComponent()
{
robotStateComponent = getProxy<RobotStateComponentInterfacePrx>(getProperty<std::string>("RobotStateComponentName").getValue());
memoryProxy = getProxy<memoryx::WorkingMemoryInterfacePrx>(getProperty<std::string>("WorkingMemoryName").getValue());
priorKnowledge = getProxy<memoryx::PriorKnowledgeInterfacePrx>(getProperty<std::string>("PriorKnowledgeName").getValue());
objectInstancesProxy = memoryx::ObjectInstanceMemorySegmentBasePrx::uncheckedCast(memoryProxy->getSegment("objectInstances"));
objectClassesProxy = memoryx::ObjectClassMemorySegmentBasePrx::uncheckedCast(memoryProxy->getSegment("objectClasses"));
headIKUnitProxy = getProxy<HeadIKUnitInterfacePrx>(getProperty<std::string>("HeadIKUnitName").getValue());
drawer = getTopic<DebugDrawerInterfacePrx>("DebugDrawerUpdates");
headIKUnitProxy->request();
auto objClassSegment = priorKnowledge->getObjectClassesSegment();
auto ids = objClassSegment->getAllEntityIds();
{
memoryx::ObjectClassPtr objClass = memoryx::ObjectClassPtr::dynamicCast(objClassSegment->getEntityById(id));
memoryx::EntityWrappers::ObjectRecognitionWrapperPtr wrapper = objClass->addWrapper(new memoryx::EntityWrappers::ObjectRecognitionWrapper());
if (!wrapper->getRecognitionMethod().empty() && wrapper->getRecognitionMethod() != "<none>")
{
recognizableObjClasses[objClass->getName()] = objClass;
ARMARX_INFO << objClass->getName() << " has " << wrapper->getRecognitionMethod();
}
processorTask = new PeriodicTask<ViewSelection>(this, &ViewSelection::process, 30);
processorTask->setDelayWarningTolerance(sleepingTimeBetweenViewDirectionChanges + 100);
processorTask->start();
}
void ViewSelection::onDisconnectComponent()
{
try
{
if (drawer)
drawer->removeArrowDebugLayerVisu("HeadRealViewDirection");
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
processorTask->stop();
headIKUnitProxy->release();
}
catch (...)
{
ARMARX_WARNING_S << "An exception occured during onDisconnectComponent()";
handleExceptions();
}
}
void ViewSelection::onExitComponent()
{
delete visibilityMaskGraph;
delete graphLookupTable;
delete saliencyEgosphereGraph;
delete randomNoiseGraph;
}
void ViewSelection::process()
{
try
{
if (manualViewTargets.size() > 0)
{
// look to the manually set target
David Schiebener
committed
SharedRobotInterfacePrx robotPrx = robotStateComponent->getSynchronizedRobot();
FramedPositionPtr target;
{
ScopedLock lock(manualViewTargetsMutex);
David Schiebener
committed
target = new FramedPosition(FramedPositionPtr::dynamicCast(manualViewTargets.at(0))->toEigen(), manualViewTargets.at(0)->frame, robotPrx->getName());
for (size_t i = 0; i < manualViewTargets.size() - 1; i++)
manualViewTargets.at(i) = manualViewTargets.at(i + 1);
manualViewTargets.pop_back();
}
ARMARX_INFO << "Looking to manual head target " << *target;
headIKUnitProxy->setHeadTarget(headIKKinematicChainName, target);
boost::this_thread::sleep(boost::posix_time::milliseconds(sleepingTimeBetweenViewDirectionChanges));
}
else if (doAutomaticViewSelection)
{
// reset saliency and visited nodes
saliencyEgosphereGraph->set(0);
for (size_t i = 0; i < saliencyEgosphereGraph->getNodes()->size(); i++)
{
nodeVisitedForObject.at(i) = -1;
}
// generate new random noise
if (randomNoiseLevel > 0)
{
setRandomNoise();
}
// collect localizable objects
std::vector<memoryx::ObjectInstancePtr> localizableObjects;
std::vector<FramedPositionPtr> localizableObjectPositions;
David Schiebener
committed
float maxObjectDistance = getProperty<float>("MaxObjectDistance").getValue();
SharedRobotInterfacePrx robotPrx = robotStateComponent->getSynchronizedRobot();
int numberOfLostObjects = 0;
memoryx::EntityIdList objectIds = objectInstancesProxy->getAllEntityIds();
for (size_t i = 0; i < objectIds.size(); i++)
{
const std::string objectId = objectIds.at(i);
const memoryx::EntityBasePtr entityBase = objectInstancesProxy->getEntityById(objectId);
const memoryx::ObjectInstancePtr object = memoryx::ObjectInstancePtr::dynamicCast(entityBase);
if (object)
{
if (recognizableObjClasses.count(object->getName()) > 0 && !object->getPosition()->getFrame().empty())
{
if (objectClassesProxy->hasEntityByName(object->getName())) // should be true if and only if object has been requested
{
FramedPositionPtr position = object->getPosition();
position->changeFrame(robotPrx, headFrameName);
float objDist = position->toEigen().norm();
if (objDist <= maxObjectDistance)
{
localizableObjects.push_back(object);
localizableObjectPositions.push_back(position);
if (object->getExistenceCertainty() < 0.5)
{
numberOfLostObjects++;
}
}
else
{
ARMARX_DEBUG << "Discarding " << object->getName() << " which is at " << position->output() << " ObjectDistance " << objDist;
}
}
}
}
}
const float probabilityToAddALostObject = (numberOfLostObjects > 0) ? getProperty<float>("ProbabilityToLookForALostObject").getValue() / numberOfLostObjects : 0;
// add localization necessities to sphere of possible view directions
for (size_t i = 0; i < localizableObjects.size(); i++)
{
const memoryx::ObjectInstancePtr object = localizableObjects.at(i);
if ((object->getExistenceCertainty() >= 0.5) || (rand() % 100000 < 100000 * probabilityToAddALostObject))
{
const FramedPositionPtr position = localizableObjectPositions.at(i);
memoryx::MultivariateNormalDistributionPtr uncertaintyGaussian = memoryx::MultivariateNormalDistributionPtr::dynamicCast(object->getPositionUncertainty());
if (!uncertaintyGaussian)
{
uncertaintyGaussian = memoryx::MultivariateNormalDistribution::CreateDefaultDistribution(1000000);
}
float saliency = log(1.0 + uncertaintyGaussian->getVarianceScalar());
if (saliency > 0)
{
David Schiebener
committed
// close objects get a smaller visibility angle to avoid problems due to the head being positioned unfortunately
float modifiedHalfCameraOpeingAngle = halfCameraOpeningAngle;
float objDist = position->toEigen().norm();
float shortDistance = 0.33f * maxObjectDistance;
if (objDist < shortDistance)
{
modifiedHalfCameraOpeingAngle = objDist / (shortDistance) * halfCameraOpeningAngle;
}
TSphereCoord positionInSphereCoordinates;
MathTools::convert(position->toEigen() - offsetToHeadCenter, positionInSphereCoordinates);
int closestNodeIndex = graphLookupTable->getClosestNode(positionInSphereCoordinates);
David Schiebener
committed
addSaliencyRecursive(closestNodeIndex, saliency, positionInSphereCoordinates, i, modifiedHalfCameraOpeingAngle);
}
}
}
// find the direction with highest saliency
TNodeList* visibilityMaskGraphNodes = visibilityMaskGraph->getNodes();
TNodeList* saliencyEgosphereGraphNodes = saliencyEgosphereGraph->getNodes();
TNodeList* randomNoiseGraphNodes = randomNoiseGraph->getNodes();
int maxIndex = -1;
float maxSaliency = 0;
for (size_t i = 0; i < saliencyEgosphereGraphNodes->size(); i++)
{
CIntensityNode* visibilityNode = (CIntensityNode*) visibilityMaskGraphNodes->at(i);
CIntensityNode* saliencyNode = (CIntensityNode*) saliencyEgosphereGraphNodes->at(i);
CIntensityNode* randomNoiseNode = (CIntensityNode*) randomNoiseGraphNodes->at(i);
float saliency = visibilityNode->getIntensity() * (saliencyNode->getIntensity() + randomNoiseNode->getIntensity());
if (saliency > maxSaliency)
{
maxSaliency = saliency;
maxIndex = i;
}
}
ARMARX_DEBUG << "Highest saliency: " << maxSaliency;
// select a new view if saliency is bigger than threshold, which converges towards 0 over time
int timeSinceLastViewChange = (IceUtil::Time::now() - timeOfLastViewChange).toMilliSeconds();
float saliencyThreshold = 0;
if (timeSinceLastViewChange > 0)
{
saliencyThreshold = 2.0f * sleepingTimeBetweenViewDirectionChanges / timeSinceLastViewChange;
}
if (maxSaliency > saliencyThreshold)
{
Eigen::Vector3f target;
MathTools::convert(saliencyEgosphereGraphNodes->at(maxIndex)->getPosition(), target);
const float distanceFactor = 1500;
target = distanceFactor * target + offsetToHeadCenter;
FramedPositionPtr viewTargetPositionPtr = new FramedPosition(target, headFrameName, robotPrx->getName());
if (drawer && robotPrx->hasRobotNode("Cameras") && drawViewDirection)
{
Vector3Ptr startPos = new Vector3(FramedPosePtr::dynamicCast(robotPrx->getRobotNode("Cameras")->getGlobalPose())->toEigen());
FramedDirectionPtr direction = new FramedDirection(Eigen::Vector3f(1, 0, 0), "Cameras", robotPrx->getName());
drawer->setArrowDebugLayerVisu("HeadRealViewDirection", startPos, direction->toGlobal(robotPrx), DrawColor {0, 1, 0, 0.2}, maxObjectDistance, 5);
headIKUnitProxy->setHeadTarget(headIKKinematicChainName, viewTargetPositionPtr);
timeOfLastViewChange = IceUtil::Time::now();
boost::this_thread::sleep(boost::posix_time::milliseconds(sleepingTimeBetweenViewDirectionChanges));
}
}
David Schiebener
committed
else
{
boost::this_thread::sleep(boost::posix_time::milliseconds(5));
}
}
catch (...)
{
ARMARX_WARNING_S << "An exception occured during process()";
handleExceptions();
}
}
David Schiebener
committed
void ViewSelection::addSaliencyRecursive(int currentNodeIndex, float saliency, TSphereCoord objectSphereCoord, int objectIndex, float maxDistanceOnArc)
{
// distance on arc between object projection center and node,
// normalized by the maximal viewing angle of the camera (=> in [0,1])
David Schiebener
committed
float normalizedDistance = MathTools::getDistanceOnArc(objectSphereCoord, saliencyEgosphereGraph->getNodes()->at(currentNodeIndex)->getPosition()) / maxDistanceOnArc;
// increase value of node
float newValue = ((CIntensityNode*)saliencyEgosphereGraph->getNodes()->at(currentNodeIndex))->getIntensity()
+ (1.0f - deviationFromCameraCenterFactor * normalizedDistance * normalizedDistance) * saliency;
((CIntensityNode*)saliencyEgosphereGraph->getNodes()->at(currentNodeIndex))->setIntensity(newValue);
// mark node as visited for this object
nodeVisitedForObject.at(currentNodeIndex) = objectIndex;
// recurse on neighbours if they were not yet visited and close enough to the object projection center
int neighbourIndex;
for (int i = 0; i < (int)saliencyEgosphereGraph->getNodeAdjacency(currentNodeIndex)->size(); i++)
{
neighbourIndex = saliencyEgosphereGraph->getNodeAdjacency(currentNodeIndex)->at(i);
if (nodeVisitedForObject.at(neighbourIndex) != objectIndex)
{
David Schiebener
committed
if (MathTools::getDistanceOnArc(objectSphereCoord, saliencyEgosphereGraph->getNodes()->at(neighbourIndex)->getPosition()) <= maxDistanceOnArc)
David Schiebener
committed
addSaliencyRecursive(neighbourIndex, saliency, objectSphereCoord, objectIndex, maxDistanceOnArc);
}
else
{
nodeVisitedForObject.at(neighbourIndex) = objectIndex;
}
}
}
}
void ViewSelection::setRandomNoise()
{
if (!robotStateComponent)
{
return;
}
Eigen::Vector3f currentViewTargetEigen(1000, 0, 0); // virtual central gaze goes into x direction
FramedPosition currentViewTarget(currentViewTargetEigen, cameraFrameName, robotStateComponent->getSynchronizedRobot()->getName());
currentViewTarget.changeFrame(robotStateComponent->getSynchronizedRobot(), headFrameName);
currentViewTargetEigen = currentViewTarget.toEigen() - offsetToHeadCenter;
TSphereCoord currentViewDirection;
MathTools::convert(currentViewTargetEigen, currentViewDirection);
const float centralAngle = getProperty<float>("CentralHeadTiltAngle").getValue();
TNodeList* nodes = randomNoiseGraph->getNodes();
const int randomFactor = 100000;
const float noiseFactor = randomNoiseLevel / randomFactor;
for (size_t i = 0; i < nodes->size(); i++)
{
CIntensityNode* node = (CIntensityNode*) nodes->at(i);
TSphereCoord nodeCoord = node->getPosition();
float distanceOnSphere = MathTools::getDistanceOnArc(currentViewDirection, nodeCoord);
David Schiebener
committed
float distanceFactor = 1.0f - 0.4f * distanceOnSphere / M_PI; // prefer directions close to current direction
float heightFactor = 1.0f - 0.15f * fabs(nodeCoord.fPhi - centralAngle) / 90.0f; // prefer directions that look down 20°
node->setIntensity(heightFactor * distanceFactor * noiseFactor * (rand() % randomFactor));
void ViewSelection::addManualViewTarget(const FramedPositionBasePtr& target, const Ice::Current& c)
{
ScopedLock lock(manualViewTargetsMutex);
manualViewTargets.push_back(target);
}
PropertyDefinitionsPtr ViewSelection::createPropertyDefinitions()
{
return PropertyDefinitionsPtr(new ViewSelectionPropertyDefinitions(getConfigIdentifier()));
}