Skip to content
Snippets Groups Projects

Draft: Feature/mongo db

Open Joana Plewnia requested to merge feature/MongoDB into master
1 unresolved thread
Compare and
11 files
+ 601
234
Compare changes
  • Side-by-side
  • Inline
Files
11
@@ -39,13 +39,56 @@ namespace armarx::armem::server::ltm::util::mongodb
return ret;
}
//Only for MongoCXX Version < 3.5.0 required.
std::vector<std::string>
list_database_names(mongocxx::client& client)
{
std::vector<std::string> database_names;
auto dbs = client.list_databases();
for (auto&& view : dbs)
{
std::string name = std::string(view["name"].get_utf8().value);
database_names.push_back(name);
}
return database_names;
}
//Only for MongoCXX Version < 3.4.0 required.
std::vector<std::string>
list_collection_names(mongocxx::database& db)
{
std::vector<std::string> collection_names;
auto dbs = db.list_collections();
;
for (auto&& view : dbs)
{
std::string name = std::string(view["name"].get_utf8().value);
collection_names.push_back(name);
}
return collection_names;
}
bool
insert(mongocxx::collection& coll, const nlohmann::json& value)
{
nlohmann::json json = value;
std::string v = value.dump();
auto q = bsoncxx::from_json(v);
auto res = coll.insert_one(q.view());
size_t document_size = q.view().length();
if (document_size > 16 * 1024 * 1024)
{ // 16 MB Document
ARMARX_INFO << deactivateSpam()
<< "Document size exceeds the 16MB limit: " << document_size
<< " bytes. Delete image in VisonMemory.";
for (auto& instance : json["_data"]["_entity_instances"]) instance["_data"] = nullptr;
q = bsoncxx::from_json(json.dump());
}
auto res = coll.insert_one(q.view());
return (bool)res;
}
@@ -77,17 +120,57 @@ namespace armarx::armem::server::ltm::util::mongodb
return (bool)res;
}
std::string
extractLastElementFromCollectionName(const std::string& collectionName)
{
size_t pos = collectionName.rfind('.');
if (pos != std::string::npos)
{
return collectionName.substr(pos + 1);
}
else
{
return collectionName;
}
}
std::string
removeLastElementFromCollectionName(const std::string& collectionName)
{
size_t pos = collectionName.find_last_of('.');
if (pos != std::string::npos) {
return collectionName.substr(0, pos);
}
return collectionName;
}
} // namespace detail
std::optional<mongocxx::database>
databaseExists(mongocxx::client& client, const std::string& databaseName)
{
//auto names = client.list_databases();
//if (auto it = std::find(names.begin(), names.end(), databaseName); it != names.end())
//{
// return client[databaseName];
//}
return std::nullopt;
// MongoCXX Version > 3.5.0 required
//auto names = client.list_database_names();
//return client[databaseName];
try
{
mongocxx::cursor dbs = client.list_databases();
bsoncxx::stdx::string_view databaseNameView{databaseName};
for (auto& view : dbs)
{
if (view["name"].get_utf8().value == databaseNameView)
{
return client[databaseName];
}
}
return std::nullopt;
}
catch (...)
{
ARMARX_ERROR << "Exception while checking database existence";
return std::nullopt;
}
}
mongocxx::database
@@ -104,12 +187,17 @@ namespace armarx::armem::server::ltm::util::mongodb
databaseName);
}
}
//Workaround to create an empty Database client[databaseName] does not create a database if nothing is added
//db = client[databaseName];
//db.create_collection("empty");
return client[databaseName];
}
std::optional<mongocxx::collection>
collectionExists(mongocxx::database& db, const std::string& collectionName)
{
//ARMARX_INFO << "CollectionName: " << collectionName;
//return db[collectionName];
if (db.has_collection(collectionName))
{
return db[collectionName];
@@ -131,6 +219,7 @@ namespace armarx::armem::server::ltm::util::mongodb
collectionName);
}
}
//Workaround to create an empty Collection db[collectionName] does not create a Collection if no document is added
return db[collectionName];
}
@@ -141,7 +230,6 @@ namespace armarx::armem::server::ltm::util::mongodb
{
return id.timestampStr();
}
// fallback
throw armarx::LocalException("Called toDocumentName() on non-snapshot id: " + id.str());
}
@@ -153,7 +241,7 @@ namespace armarx::armem::server::ltm::util::mongodb
std::stringstream ss;
if (id.hasMemoryName())
{
ss << detail::escapeName(id.memoryName);
ss << (detail::escapeName(id.memoryName));
}
if (id.hasCoreSegmentName())
{
@@ -168,9 +256,75 @@ namespace armarx::armem::server::ltm::util::mongodb
ss << "." << detail::escapeName(id.entityName);
}
//ARMARX_INFO << deactivateSpam() << ss.str();
return ss.str();
}
std::string
toGridFSFileName(const armem::MemoryID& id)
{
ARMARX_CHECK(id.isWellDefined());
std::stringstream ss;
if (id.hasMemoryName())
{
ss << id.memoryName;
}
if (id.hasCoreSegmentName())
{
ss << "." << id.coreSegmentName;
}
if (id.hasProviderSegmentName())
{
ss << "." << id.providerSegmentName;
}
if (id.hasEntityName())
{
ss << "." << id.entityName;
}
if (id.hasTimestamp())
{
ss << "." << id.timestampStr();
}
if (id.hasInstanceIndex())
{
ss << "." << id.instanceIndexStr();
}
//ARMARX_INFO << deactivateSpam() << ss.str();
return ss.str();
}
/*unused
std::vector<std::string>
getDirectSuccessors(const std::vector<std::string>& collection_names, const std::string& prefix) {
std::vector<std::string> direct_successors;
for (const auto& coll_name : collection_names) {
auto segments = split(coll_name, ".");
if (segments.size() > collectionNameDepth) {
std::string current_prefix;
for (size_t i = 0; i < collectionNameDepth; ++i) {
if (i > 0) {
current_prefix += ".";
}
current_prefix += segments[i];
}
if (current_prefix == prefix) {
direct_successors.push_back(segments[collectionNameDepth]);
}
}
}
// Removing duplicates
std::set<std::string> unique_successors(direct_successors.begin(), direct_successors.end());
direct_successors.assign(unique_successors.begin(), unique_successors.end());
return direct_successors;
}
std::vector<std::string>
getSubCollections(mongocxx::database& db, const armem::MemoryID& id) {
return getDirectSuccessors(detail::list_collection_names(db), toCollectionName(id));
}
*/
std::optional<nlohmann::json>
documentExists(mongocxx::collection& collection, const nlohmann::json& json)
{
@@ -217,6 +371,33 @@ namespace armarx::armem::server::ltm::util::mongodb
return *doc;
}
std::string
readSpecialDataFromGridFS(mongocxx::gridfs::bucket &bucket, const std::string &oidStr)
{
// Newer MongoCXX Versions tested on 3.10.2
//auto downloader = bucket.open_download_stream(bsoncxx::types::bson_value::value((bsoncxx::oid(oidStr))).view());
// OR
//bsoncxx::oid oid(oidStr);
//bsoncxx::types::bson_value::value oid_value(oid);
//auto downloader = bucket.open_download_stream(oid_value.view());
bsoncxx::oid oid(oidStr);
bsoncxx::document::value filter = bsoncxx::builder::stream::document{}
<< "oid" << oid << bsoncxx::builder::stream::finalize;
auto downloader = bucket.open_download_stream(filter.view()["oid"].get_value());
std::int64_t file_length = downloader.file_length();
std::vector<std::uint8_t> download_buffer(file_length);
downloader.read(download_buffer.data(), file_length);
downloader.close();
return std::string(download_buffer.begin(), download_buffer.end());
}
void
writeDataToDocument(mongocxx::collection& collection,
const nlohmann::json& query,
@@ -224,17 +405,37 @@ namespace armarx::armem::server::ltm::util::mongodb
{
if (documentExists(collection, query))
{
detail::update(collection, query, update);
if (!update.empty())
{
detail::update(collection, query, update);
}
}
else
{
nlohmann::json full;
full.update(query);
full.update(update);
if (!update.empty())
{
full.update(update);
}
detail::insert(collection, full);
}
}
std::string
writeSpecialDataToGridFS(mongocxx::gridfs::bucket &bucket,
const bsoncxx::stdx::string_view& filename,
const std::string& data)
{
auto uploader = bucket.open_upload_stream(filename);
uploader.write(reinterpret_cast<const std::uint8_t*>(data.c_str()), data.size());
auto result = uploader.close();
return result.id().get_oid().value.to_string();
}
std::vector<nlohmann::json>
getAllDocuments(mongocxx::collection& collection)
{
@@ -247,5 +448,4 @@ namespace armarx::armem::server::ltm::util::mongodb
}
return ret;
}
} // namespace armarx::armem::server::ltm::util::mongodb
Loading