Commit 4c073264 authored by Giorgio Azzinnaro's avatar Giorgio Azzinnaro

working top-level normalization

parent a6af9821
......@@ -40,7 +40,7 @@ profanedb::protobuf::PutResp profanedb::storage::Db::Put(const profanedb::protob
auto map = normalizer.NormalizeMessage(request.serializable());
for (auto const & obj: map) {
std::cout << obj.first << ":" << std::endl << obj.second.DebugString() << std::endl;
std::cout << obj.first << ":" << std::endl << obj.second->DebugString() << std::endl;
}
}
......
......@@ -28,7 +28,7 @@ profanedb::storage::Normalizer::Normalizer(Parser & parser)
{
}
std::map<std::string, const google::protobuf::Message> profanedb::storage::Normalizer::NormalizeMessage(
std::map<std::string, std::shared_ptr<const google::protobuf::Message>> profanedb::storage::Normalizer::NormalizeMessage(
const google::protobuf::Any & serializable)
{
// Any messages have a type url beginning with `type.googleapis.com/`, this is stripped
......@@ -36,23 +36,23 @@ std::map<std::string, const google::protobuf::Message> profanedb::storage::Norma
auto definition = std::unique_ptr<const google::protobuf::Descriptor>(schemaPool->FindMessageTypeByName(type.substr(type.rfind('/')+1, std::string::npos)));
// Having the definition our message factory can simply generate a container,
auto container = std::unique_ptr<google::protobuf::Message>(messageFactory.GetPrototype(definition.get())->New());
auto container = std::shared_ptr<google::protobuf::Message>(messageFactory.GetPrototype(definition.get())->New());
// and convert the bytes coming from Any into it
serializable.UnpackTo(container.get());
// The method getting a Message as paramater does the actual normalization of data
return this->NormalizeMessage(*container);
return this->NormalizeMessage(container);
}
std::map<std::string, const google::protobuf::Message> profanedb::storage::Normalizer::NormalizeMessage(
const google::protobuf::Message & message) const
std::map<std::string, std::shared_ptr<const google::protobuf::Message>> profanedb::storage::Normalizer::NormalizeMessage(
std::shared_ptr<const google::protobuf::Message> message) const
{
auto dependencies = std::map<std::string, const google::protobuf::Message>();
Parser::NormalizedDescriptor & normalizedDesc = parser.normalizedDescriptors.at(message.GetDescriptor()->full_name());
auto dependencies = std::map<std::string, std::shared_ptr<const google::protobuf::Message>>();
Parser::NormalizedDescriptor & normalizedDesc = parser.normalizedDescriptors.at(message->GetDescriptor()->full_name());
dependencies.insert(std::pair<std::string, const google::protobuf::Message>(
FieldToKey(message, normalizedDesc.GetKey()),
dependencies.insert(std::pair<std::string, std::shared_ptr<const google::protobuf::Message>>(
FieldToKey(*message, normalizedDesc.GetKey()),
message));
return dependencies;
......
......@@ -41,12 +41,12 @@ public:
Normalizer(Parser & parser);
// This is just a layer on top of NormalizeMessage(Message) to parse an Any message
std::map<std::string, const google::protobuf::Message> NormalizeMessage(
std::map<std::string, std::shared_ptr<const google::protobuf::Message>> NormalizeMessage(
const google::protobuf::Any & serializable);
// Unnest keyable messages and assign their key to their parent object
std::map<std::string, const google::protobuf::Message> NormalizeMessage(
const google::protobuf::Message & message) const;
std::map<std::string, std::shared_ptr<const google::protobuf::Message>> NormalizeMessage(
std::shared_ptr<const google::protobuf::Message> message) const;
private:
Parser & parser;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment