blob: 3324c6b9b839d085752790ebd83cd27c1277b3a3 (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
|
// Copyright Vespa.ai. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#include "distribution_config_util.h"
#include <vespa/vespalib/text/stringtokenizer.h>
#include <boost/lexical_cast.hpp>
namespace storage::lib {
std::vector<uint16_t> DistributionConfigUtil::getGroupPath(vespalib::stringref path) {
vespalib::StringTokenizer st(path, ".", "");
std::vector<uint16_t> result(st.size());
for (uint32_t i=0, n=result.size(); i<n; ++i) {
result[i] = boost::lexical_cast<uint16_t>(st[i]);
}
return result;
}
}
|