诸暨麻将添加redis
您不能選擇超過 %s 個話題 話題必須以字母或數字為開頭,可包含連接號 ('-') 且最長為 35 個字
 
 
 
 
 
 

2672 行
95 KiB

  1. // Protocol Buffers - Google's data interchange format
  2. // Copyright 2008 Google Inc. All rights reserved.
  3. // https://developers.google.com/protocol-buffers/
  4. //
  5. // Redistribution and use in source and binary forms, with or without
  6. // modification, are permitted provided that the following conditions are
  7. // met:
  8. //
  9. // * Redistributions of source code must retain the above copyright
  10. // notice, this list of conditions and the following disclaimer.
  11. // * Redistributions in binary form must reproduce the above
  12. // copyright notice, this list of conditions and the following disclaimer
  13. // in the documentation and/or other materials provided with the
  14. // distribution.
  15. // * Neither the name of Google Inc. nor the names of its
  16. // contributors may be used to endorse or promote products derived from
  17. // this software without specific prior written permission.
  18. //
  19. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  20. // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  21. // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
  22. // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
  23. // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  24. // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
  25. // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  26. // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  27. // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  28. // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  29. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  30. // Author: kenton@google.com (Kenton Varda)
  31. // Based on original Protocol Buffers design by
  32. // Sanjay Ghemawat, Jeff Dean, and others.
  33. //
  34. // RepeatedField and RepeatedPtrField are used by generated protocol message
  35. // classes to manipulate repeated fields. These classes are very similar to
  36. // STL's vector, but include a number of optimizations found to be useful
  37. // specifically in the case of Protocol Buffers. RepeatedPtrField is
  38. // particularly different from STL vector as it manages ownership of the
  39. // pointers that it contains.
  40. //
  41. // Typically, clients should not need to access RepeatedField objects directly,
  42. // but should instead use the accessor functions generated automatically by the
  43. // protocol compiler.
  44. #ifndef GOOGLE_PROTOBUF_REPEATED_FIELD_H__
  45. #define GOOGLE_PROTOBUF_REPEATED_FIELD_H__
  46. #include <utility>
  47. #ifdef _MSC_VER
  48. // This is required for min/max on VS2013 only.
  49. #include <algorithm>
  50. #endif
  51. #include <iterator>
  52. #include <limits>
  53. #include <string>
  54. #include <type_traits>
  55. #include <google/protobuf/stubs/logging.h>
  56. #include <google/protobuf/stubs/common.h>
  57. #include <google/protobuf/arena.h>
  58. #include <google/protobuf/message_lite.h>
  59. #include <google/protobuf/port.h>
  60. #include <google/protobuf/stubs/casts.h>
  61. #include <type_traits>
  62. #include <google/protobuf/port_def.inc>
  63. #ifdef SWIG
  64. #error "You cannot SWIG proto headers"
  65. #endif
  66. namespace google {
  67. namespace protobuf {
  68. class Message;
  69. class Reflection;
  70. template <typename T>
  71. struct WeakRepeatedPtrField;
  72. namespace internal {
  73. class MergePartialFromCodedStreamHelper;
  74. static const int kMinRepeatedFieldAllocationSize = 4;
  75. // A utility function for logging that doesn't need any template types.
  76. void LogIndexOutOfBounds(int index, int size);
  77. template <typename Iter>
  78. inline int CalculateReserve(Iter begin, Iter end, std::forward_iterator_tag) {
  79. return static_cast<int>(std::distance(begin, end));
  80. }
  81. template <typename Iter>
  82. inline int CalculateReserve(Iter /*begin*/, Iter /*end*/,
  83. std::input_iterator_tag /*unused*/) {
  84. return -1;
  85. }
  86. template <typename Iter>
  87. inline int CalculateReserve(Iter begin, Iter end) {
  88. typedef typename std::iterator_traits<Iter>::iterator_category Category;
  89. return CalculateReserve(begin, end, Category());
  90. }
  91. } // namespace internal
  92. // RepeatedField is used to represent repeated fields of a primitive type (in
  93. // other words, everything except strings and nested Messages). Most users will
  94. // not ever use a RepeatedField directly; they will use the get-by-index,
  95. // set-by-index, and add accessors that are generated for all repeated fields.
  96. template <typename Element>
  97. class RepeatedField final {
  98. static_assert(
  99. alignof(Arena) >= alignof(Element),
  100. "We only support types that have an alignment smaller than Arena");
  101. public:
  102. RepeatedField();
  103. explicit RepeatedField(Arena* arena);
  104. RepeatedField(const RepeatedField& other);
  105. template <typename Iter>
  106. RepeatedField(Iter begin, const Iter& end);
  107. ~RepeatedField();
  108. RepeatedField& operator=(const RepeatedField& other);
  109. RepeatedField(RepeatedField&& other) noexcept;
  110. RepeatedField& operator=(RepeatedField&& other) noexcept;
  111. bool empty() const;
  112. int size() const;
  113. const Element& Get(int index) const;
  114. Element* Mutable(int index);
  115. const Element& operator[](int index) const { return Get(index); }
  116. Element& operator[](int index) { return *Mutable(index); }
  117. const Element& at(int index) const;
  118. Element& at(int index);
  119. void Set(int index, const Element& value);
  120. void Add(const Element& value);
  121. // Appends a new element and return a pointer to it.
  122. // The new element is uninitialized if |Element| is a POD type.
  123. Element* Add();
  124. // Append elements in the range [begin, end) after reserving
  125. // the appropriate number of elements.
  126. template <typename Iter>
  127. void Add(Iter begin, Iter end);
  128. // Remove the last element in the array.
  129. void RemoveLast();
  130. // Extract elements with indices in "[start .. start+num-1]".
  131. // Copy them into "elements[0 .. num-1]" if "elements" is not NULL.
  132. // Caution: implementation also moves elements with indices [start+num ..].
  133. // Calling this routine inside a loop can cause quadratic behavior.
  134. void ExtractSubrange(int start, int num, Element* elements);
  135. void Clear();
  136. void MergeFrom(const RepeatedField& other);
  137. void CopyFrom(const RepeatedField& other);
  138. // Reserve space to expand the field to at least the given size. If the
  139. // array is grown, it will always be at least doubled in size.
  140. void Reserve(int new_size);
  141. // Resize the RepeatedField to a new, smaller size. This is O(1).
  142. void Truncate(int new_size);
  143. void AddAlreadyReserved(const Element& value);
  144. // Appends a new element and return a pointer to it.
  145. // The new element is uninitialized if |Element| is a POD type.
  146. // Should be called only if Capacity() > Size().
  147. Element* AddAlreadyReserved();
  148. Element* AddNAlreadyReserved(int elements);
  149. int Capacity() const;
  150. // Like STL resize. Uses value to fill appended elements.
  151. // Like Truncate() if new_size <= size(), otherwise this is
  152. // O(new_size - size()).
  153. void Resize(int new_size, const Element& value);
  154. // Gets the underlying array. This pointer is possibly invalidated by
  155. // any add or remove operation.
  156. Element* mutable_data();
  157. const Element* data() const;
  158. // Swap entire contents with "other". If they are separate arenas then, copies
  159. // data between each other.
  160. void Swap(RepeatedField* other);
  161. // Swap entire contents with "other". Should be called only if the caller can
  162. // guarantee that both repeated fields are on the same arena or are on the
  163. // heap. Swapping between different arenas is disallowed and caught by a
  164. // GOOGLE_DCHECK (see API docs for details).
  165. void UnsafeArenaSwap(RepeatedField* other);
  166. // Swap two elements.
  167. void SwapElements(int index1, int index2);
  168. // STL-like iterator support
  169. typedef Element* iterator;
  170. typedef const Element* const_iterator;
  171. typedef Element value_type;
  172. typedef value_type& reference;
  173. typedef const value_type& const_reference;
  174. typedef value_type* pointer;
  175. typedef const value_type* const_pointer;
  176. typedef int size_type;
  177. typedef ptrdiff_t difference_type;
  178. iterator begin();
  179. const_iterator begin() const;
  180. const_iterator cbegin() const;
  181. iterator end();
  182. const_iterator end() const;
  183. const_iterator cend() const;
  184. // Reverse iterator support
  185. typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
  186. typedef std::reverse_iterator<iterator> reverse_iterator;
  187. reverse_iterator rbegin() { return reverse_iterator(end()); }
  188. const_reverse_iterator rbegin() const {
  189. return const_reverse_iterator(end());
  190. }
  191. reverse_iterator rend() { return reverse_iterator(begin()); }
  192. const_reverse_iterator rend() const {
  193. return const_reverse_iterator(begin());
  194. }
  195. // Returns the number of bytes used by the repeated field, excluding
  196. // sizeof(*this)
  197. size_t SpaceUsedExcludingSelfLong() const;
  198. int SpaceUsedExcludingSelf() const {
  199. return internal::ToIntSize(SpaceUsedExcludingSelfLong());
  200. }
  201. // Removes the element referenced by position.
  202. //
  203. // Returns an iterator to the element immediately following the removed
  204. // element.
  205. //
  206. // Invalidates all iterators at or after the removed element, including end().
  207. iterator erase(const_iterator position);
  208. // Removes the elements in the range [first, last).
  209. //
  210. // Returns an iterator to the element immediately following the removed range.
  211. //
  212. // Invalidates all iterators at or after the removed range, including end().
  213. iterator erase(const_iterator first, const_iterator last);
  214. // Get the Arena on which this RepeatedField stores its elements.
  215. Arena* GetArena() const { return GetArenaNoVirtual(); }
  216. // For internal use only.
  217. //
  218. // This is public due to it being called by generated code.
  219. inline void InternalSwap(RepeatedField* other);
  220. private:
  221. static const int kInitialSize = 0;
  222. // A note on the representation here (see also comment below for
  223. // RepeatedPtrFieldBase's struct Rep):
  224. //
  225. // We maintain the same sizeof(RepeatedField) as before we added arena support
  226. // so that we do not degrade performance by bloating memory usage. Directly
  227. // adding an arena_ element to RepeatedField is quite costly. By using
  228. // indirection in this way, we keep the same size when the RepeatedField is
  229. // empty (common case), and add only an 8-byte header to the elements array
  230. // when non-empty. We make sure to place the size fields directly in the
  231. // RepeatedField class to avoid costly cache misses due to the indirection.
  232. int current_size_;
  233. int total_size_;
  234. struct Rep {
  235. Arena* arena;
  236. Element elements[1];
  237. };
  238. // We can not use sizeof(Rep) - sizeof(Element) due to the trailing padding on
  239. // the struct. We can not use sizeof(Arena*) as well because there might be
  240. // a "gap" after the field arena and before the field elements (e.g., when
  241. // Element is double and pointer is 32bit).
  242. static const size_t kRepHeaderSize;
  243. // If total_size_ == 0 this points to an Arena otherwise it points to the
  244. // elements member of a Rep struct. Using this invariant allows the storage of
  245. // the arena pointer without an extra allocation in the constructor.
  246. void* arena_or_elements_;
  247. // Return pointer to elements array.
  248. // pre-condition: the array must have been allocated.
  249. Element* elements() const {
  250. GOOGLE_DCHECK_GT(total_size_, 0);
  251. // Because of above pre-condition this cast is safe.
  252. return unsafe_elements();
  253. }
  254. // Return pointer to elements array if it exists otherwise either null or
  255. // a invalid pointer is returned. This only happens for empty repeated fields,
  256. // where you can't dereference this pointer anyway (it's empty).
  257. Element* unsafe_elements() const {
  258. return static_cast<Element*>(arena_or_elements_);
  259. }
  260. // Return pointer to the Rep struct.
  261. // pre-condition: the Rep must have been allocated, ie elements() is safe.
  262. Rep* rep() const {
  263. char* addr = reinterpret_cast<char*>(elements()) - offsetof(Rep, elements);
  264. return reinterpret_cast<Rep*>(addr);
  265. }
  266. friend class Arena;
  267. typedef void InternalArenaConstructable_;
  268. // Move the contents of |from| into |to|, possibly clobbering |from| in the
  269. // process. For primitive types this is just a memcpy(), but it could be
  270. // specialized for non-primitive types to, say, swap each element instead.
  271. void MoveArray(Element* to, Element* from, int size);
  272. // Copy the elements of |from| into |to|.
  273. void CopyArray(Element* to, const Element* from, int size);
  274. // Internal helper expected by Arena methods.
  275. inline Arena* GetArenaNoVirtual() const {
  276. return (total_size_ == 0) ? static_cast<Arena*>(arena_or_elements_)
  277. : rep()->arena;
  278. }
  279. // Internal helper to delete all elements and deallocate the storage.
  280. // If Element has a trivial destructor (for example, if it's a fundamental
  281. // type, like int32), the loop will be removed by the optimizer.
  282. void InternalDeallocate(Rep* rep, int size) {
  283. if (rep != NULL) {
  284. Element* e = &rep->elements[0];
  285. Element* limit = &rep->elements[size];
  286. for (; e < limit; e++) {
  287. e->~Element();
  288. }
  289. if (rep->arena == NULL) {
  290. #if defined(__GXX_DELETE_WITH_SIZE__) || defined(__cpp_sized_deallocation)
  291. const size_t bytes = size * sizeof(*e) + kRepHeaderSize;
  292. ::operator delete(static_cast<void*>(rep), bytes);
  293. #else
  294. ::operator delete(static_cast<void*>(rep));
  295. #endif
  296. }
  297. }
  298. }
  299. };
  300. template <typename Element>
  301. const size_t RepeatedField<Element>::kRepHeaderSize =
  302. reinterpret_cast<size_t>(&reinterpret_cast<Rep*>(16)->elements[0]) - 16;
  303. namespace internal {
  304. template <typename It>
  305. class RepeatedPtrIterator;
  306. template <typename It, typename VoidPtr>
  307. class RepeatedPtrOverPtrsIterator;
  308. } // namespace internal
  309. namespace internal {
  310. // This is a helper template to copy an array of elements efficiently when they
  311. // have a trivial copy constructor, and correctly otherwise. This really
  312. // shouldn't be necessary, but our compiler doesn't optimize std::copy very
  313. // effectively.
  314. template <typename Element,
  315. bool HasTrivialCopy =
  316. std::is_pod<Element>::value>
  317. struct ElementCopier {
  318. void operator()(Element* to, const Element* from, int array_size);
  319. };
  320. } // namespace internal
  321. namespace internal {
  322. // type-traits helper for RepeatedPtrFieldBase: we only want to invoke
  323. // arena-related "copy if on different arena" behavior if the necessary methods
  324. // exist on the contained type. In particular, we rely on MergeFrom() existing
  325. // as a general proxy for the fact that a copy will work, and we also provide a
  326. // specific override for std::string*.
  327. template <typename T>
  328. struct TypeImplementsMergeBehaviorProbeForMergeFrom {
  329. typedef char HasMerge;
  330. typedef long HasNoMerge;
  331. // We accept either of:
  332. // - void MergeFrom(const T& other)
  333. // - bool MergeFrom(const T& other)
  334. //
  335. // We mangle these names a bit to avoid compatibility issues in 'unclean'
  336. // include environments that may have, e.g., "#define test ..." (yes, this
  337. // exists).
  338. template <typename U, typename RetType, RetType (U::*)(const U& arg)>
  339. struct CheckType;
  340. template <typename U>
  341. static HasMerge Check(CheckType<U, void, &U::MergeFrom>*);
  342. template <typename U>
  343. static HasMerge Check(CheckType<U, bool, &U::MergeFrom>*);
  344. template <typename U>
  345. static HasNoMerge Check(...);
  346. // Resolves to either std::true_type or std::false_type.
  347. typedef std::integral_constant<bool,
  348. (sizeof(Check<T>(0)) == sizeof(HasMerge))>
  349. type;
  350. };
  351. template <typename T, typename = void>
  352. struct TypeImplementsMergeBehavior
  353. : TypeImplementsMergeBehaviorProbeForMergeFrom<T> {};
  354. template <>
  355. struct TypeImplementsMergeBehavior<std::string> {
  356. typedef std::true_type type;
  357. };
  358. template <typename T>
  359. struct IsMovable
  360. : std::integral_constant<bool, std::is_move_constructible<T>::value &&
  361. std::is_move_assignable<T>::value> {};
  362. // This is the common base class for RepeatedPtrFields. It deals only in void*
  363. // pointers. Users should not use this interface directly.
  364. //
  365. // The methods of this interface correspond to the methods of RepeatedPtrField,
  366. // but may have a template argument called TypeHandler. Its signature is:
  367. // class TypeHandler {
  368. // public:
  369. // typedef MyType Type;
  370. // static Type* New();
  371. // static Type* NewFromPrototype(const Type* prototype,
  372. // Arena* arena);
  373. // static void Delete(Type*);
  374. // static void Clear(Type*);
  375. // static void Merge(const Type& from, Type* to);
  376. //
  377. // // Only needs to be implemented if SpaceUsedExcludingSelf() is called.
  378. // static int SpaceUsedLong(const Type&);
  379. // };
  380. class PROTOBUF_EXPORT RepeatedPtrFieldBase {
  381. protected:
  382. RepeatedPtrFieldBase();
  383. explicit RepeatedPtrFieldBase(Arena* arena);
  384. ~RepeatedPtrFieldBase() {
  385. #ifndef NDEBUG
  386. // Try to trigger segfault / asan failure in non-opt builds. If arena_
  387. // lifetime has ended before the destructor.
  388. if (arena_) (void)arena_->SpaceAllocated();
  389. #endif
  390. }
  391. public:
  392. // Must be called from destructor.
  393. template <typename TypeHandler>
  394. void Destroy();
  395. protected:
  396. bool empty() const;
  397. int size() const;
  398. template <typename TypeHandler>
  399. const typename TypeHandler::Type& at(int index) const;
  400. template <typename TypeHandler>
  401. typename TypeHandler::Type& at(int index);
  402. template <typename TypeHandler>
  403. typename TypeHandler::Type* Mutable(int index);
  404. template <typename TypeHandler>
  405. void Delete(int index);
  406. template <typename TypeHandler>
  407. typename TypeHandler::Type* Add(typename TypeHandler::Type* prototype = NULL);
  408. public:
  409. // The next few methods are public so that they can be called from generated
  410. // code when implicit weak fields are used, but they should never be called by
  411. // application code.
  412. template <typename TypeHandler>
  413. const typename TypeHandler::Type& Get(int index) const;
  414. // Creates and adds an element using the given prototype, without introducing
  415. // a link-time dependency on the concrete message type. This method is used to
  416. // implement implicit weak fields. The prototype may be NULL, in which case an
  417. // ImplicitWeakMessage will be used as a placeholder.
  418. MessageLite* AddWeak(const MessageLite* prototype);
  419. template <typename TypeHandler>
  420. void Clear();
  421. template <typename TypeHandler>
  422. void MergeFrom(const RepeatedPtrFieldBase& other);
  423. inline void InternalSwap(RepeatedPtrFieldBase* other);
  424. protected:
  425. template <
  426. typename TypeHandler,
  427. typename std::enable_if<TypeHandler::Movable::value>::type* = nullptr>
  428. void Add(typename TypeHandler::Type&& value);
  429. template <typename TypeHandler>
  430. void RemoveLast();
  431. template <typename TypeHandler>
  432. void CopyFrom(const RepeatedPtrFieldBase& other);
  433. void CloseGap(int start, int num);
  434. void Reserve(int new_size);
  435. int Capacity() const;
  436. // Used for constructing iterators.
  437. void* const* raw_data() const;
  438. void** raw_mutable_data() const;
  439. template <typename TypeHandler>
  440. typename TypeHandler::Type** mutable_data();
  441. template <typename TypeHandler>
  442. const typename TypeHandler::Type* const* data() const;
  443. template <typename TypeHandler>
  444. PROTOBUF_ALWAYS_INLINE void Swap(RepeatedPtrFieldBase* other);
  445. void SwapElements(int index1, int index2);
  446. template <typename TypeHandler>
  447. size_t SpaceUsedExcludingSelfLong() const;
  448. // Advanced memory management --------------------------------------
  449. // Like Add(), but if there are no cleared objects to use, returns NULL.
  450. template <typename TypeHandler>
  451. typename TypeHandler::Type* AddFromCleared();
  452. template <typename TypeHandler>
  453. void AddAllocated(typename TypeHandler::Type* value) {
  454. typename TypeImplementsMergeBehavior<typename TypeHandler::Type>::type t;
  455. AddAllocatedInternal<TypeHandler>(value, t);
  456. }
  457. template <typename TypeHandler>
  458. void UnsafeArenaAddAllocated(typename TypeHandler::Type* value);
  459. template <typename TypeHandler>
  460. typename TypeHandler::Type* ReleaseLast() {
  461. typename TypeImplementsMergeBehavior<typename TypeHandler::Type>::type t;
  462. return ReleaseLastInternal<TypeHandler>(t);
  463. }
  464. // Releases last element and returns it, but does not do out-of-arena copy.
  465. // And just returns the raw pointer to the contained element in the arena.
  466. template <typename TypeHandler>
  467. typename TypeHandler::Type* UnsafeArenaReleaseLast();
  468. int ClearedCount() const;
  469. template <typename TypeHandler>
  470. void AddCleared(typename TypeHandler::Type* value);
  471. template <typename TypeHandler>
  472. typename TypeHandler::Type* ReleaseCleared();
  473. template <typename TypeHandler>
  474. void AddAllocatedInternal(typename TypeHandler::Type* value, std::true_type);
  475. template <typename TypeHandler>
  476. void AddAllocatedInternal(typename TypeHandler::Type* value, std::false_type);
  477. template <typename TypeHandler>
  478. PROTOBUF_NOINLINE void AddAllocatedSlowWithCopy(
  479. typename TypeHandler::Type* value, Arena* value_arena, Arena* my_arena);
  480. template <typename TypeHandler>
  481. PROTOBUF_NOINLINE void AddAllocatedSlowWithoutCopy(
  482. typename TypeHandler::Type* value);
  483. template <typename TypeHandler>
  484. typename TypeHandler::Type* ReleaseLastInternal(std::true_type);
  485. template <typename TypeHandler>
  486. typename TypeHandler::Type* ReleaseLastInternal(std::false_type);
  487. template <typename TypeHandler>
  488. PROTOBUF_NOINLINE void SwapFallback(RepeatedPtrFieldBase* other);
  489. inline Arena* GetArenaNoVirtual() const { return arena_; }
  490. private:
  491. static const int kInitialSize = 0;
  492. // A few notes on internal representation:
  493. //
  494. // We use an indirected approach, with struct Rep, to keep
  495. // sizeof(RepeatedPtrFieldBase) equivalent to what it was before arena support
  496. // was added, namely, 3 8-byte machine words on x86-64. An instance of Rep is
  497. // allocated only when the repeated field is non-empty, and it is a
  498. // dynamically-sized struct (the header is directly followed by elements[]).
  499. // We place arena_ and current_size_ directly in the object to avoid cache
  500. // misses due to the indirection, because these fields are checked frequently.
  501. // Placing all fields directly in the RepeatedPtrFieldBase instance costs
  502. // significant performance for memory-sensitive workloads.
  503. Arena* arena_;
  504. int current_size_;
  505. int total_size_;
  506. struct Rep {
  507. int allocated_size;
  508. void* elements[1];
  509. };
  510. static const size_t kRepHeaderSize = sizeof(Rep) - sizeof(void*);
  511. Rep* rep_;
  512. template <typename TypeHandler>
  513. static inline typename TypeHandler::Type* cast(void* element) {
  514. return reinterpret_cast<typename TypeHandler::Type*>(element);
  515. }
  516. template <typename TypeHandler>
  517. static inline const typename TypeHandler::Type* cast(const void* element) {
  518. return reinterpret_cast<const typename TypeHandler::Type*>(element);
  519. }
  520. // Non-templated inner function to avoid code duplication. Takes a function
  521. // pointer to the type-specific (templated) inner allocate/merge loop.
  522. void MergeFromInternal(const RepeatedPtrFieldBase& other,
  523. void (RepeatedPtrFieldBase::*inner_loop)(void**,
  524. void**, int,
  525. int));
  526. template <typename TypeHandler>
  527. void MergeFromInnerLoop(void** our_elems, void** other_elems, int length,
  528. int already_allocated);
  529. // Internal helper: extend array space if necessary to contain |extend_amount|
  530. // more elements, and return a pointer to the element immediately following
  531. // the old list of elements. This interface factors out common behavior from
  532. // Reserve() and MergeFrom() to reduce code size. |extend_amount| must be > 0.
  533. void** InternalExtend(int extend_amount);
  534. // The reflection implementation needs to call protected methods directly,
  535. // reinterpreting pointers as being to Message instead of a specific Message
  536. // subclass.
  537. friend class ::PROTOBUF_NAMESPACE_ID::Reflection;
  538. // ExtensionSet stores repeated message extensions as
  539. // RepeatedPtrField<MessageLite>, but non-lite ExtensionSets need to implement
  540. // SpaceUsedLong(), and thus need to call SpaceUsedExcludingSelfLong()
  541. // reinterpreting MessageLite as Message. ExtensionSet also needs to make use
  542. // of AddFromCleared(), which is not part of the public interface.
  543. friend class ExtensionSet;
  544. // The MapFieldBase implementation needs to call protected methods directly,
  545. // reinterpreting pointers as being to Message instead of a specific Message
  546. // subclass.
  547. friend class MapFieldBase;
  548. // The table-driven MergePartialFromCodedStream implementation needs to
  549. // operate on RepeatedPtrField<MessageLite>.
  550. friend class MergePartialFromCodedStreamHelper;
  551. friend class AccessorHelper;
  552. template <typename T>
  553. friend struct google::protobuf::WeakRepeatedPtrField;
  554. GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(RepeatedPtrFieldBase);
  555. };
  556. template <typename GenericType>
  557. class GenericTypeHandler {
  558. public:
  559. typedef GenericType Type;
  560. using Movable = IsMovable<GenericType>;
  561. static inline GenericType* New(Arena* arena) {
  562. return Arena::CreateMaybeMessage<Type>(arena);
  563. }
  564. static inline GenericType* New(Arena* arena, GenericType&& value) {
  565. return Arena::Create<GenericType>(arena, std::move(value));
  566. }
  567. static inline GenericType* NewFromPrototype(const GenericType* prototype,
  568. Arena* arena = NULL);
  569. static inline void Delete(GenericType* value, Arena* arena) {
  570. if (arena == NULL) {
  571. delete value;
  572. }
  573. }
  574. static inline Arena* GetArena(GenericType* value) {
  575. return Arena::GetArena<Type>(value);
  576. }
  577. static inline void* GetMaybeArenaPointer(GenericType* value) {
  578. return Arena::GetArena<Type>(value);
  579. }
  580. static inline void Clear(GenericType* value) { value->Clear(); }
  581. PROTOBUF_NOINLINE
  582. static void Merge(const GenericType& from, GenericType* to);
  583. static inline size_t SpaceUsedLong(const GenericType& value) {
  584. return value.SpaceUsedLong();
  585. }
  586. };
  587. template <typename GenericType>
  588. GenericType* GenericTypeHandler<GenericType>::NewFromPrototype(
  589. const GenericType* /* prototype */, Arena* arena) {
  590. return New(arena);
  591. }
  592. template <typename GenericType>
  593. void GenericTypeHandler<GenericType>::Merge(const GenericType& from,
  594. GenericType* to) {
  595. to->MergeFrom(from);
  596. }
  597. // NewFromPrototype() and Merge() are not defined inline here, as we will need
  598. // to do a virtual function dispatch anyways to go from Message* to call
  599. // New/Merge.
  600. template <>
  601. MessageLite* GenericTypeHandler<MessageLite>::NewFromPrototype(
  602. const MessageLite* prototype, Arena* arena);
  603. template <>
  604. inline Arena* GenericTypeHandler<MessageLite>::GetArena(MessageLite* value) {
  605. return value->GetArena();
  606. }
  607. template <>
  608. inline void* GenericTypeHandler<MessageLite>::GetMaybeArenaPointer(
  609. MessageLite* value) {
  610. return value->GetMaybeArenaPointer();
  611. }
  612. template <>
  613. void GenericTypeHandler<MessageLite>::Merge(const MessageLite& from,
  614. MessageLite* to);
  615. template <>
  616. inline void GenericTypeHandler<std::string>::Clear(std::string* value) {
  617. value->clear();
  618. }
  619. template <>
  620. void GenericTypeHandler<std::string>::Merge(const std::string& from,
  621. std::string* to);
  622. // Message specialization bodies defined in message.cc. This split is necessary
  623. // to allow proto2-lite (which includes this header) to be independent of
  624. // Message.
  625. template <>
  626. PROTOBUF_EXPORT Message* GenericTypeHandler<Message>::NewFromPrototype(
  627. const Message* prototype, Arena* arena);
  628. template <>
  629. PROTOBUF_EXPORT Arena* GenericTypeHandler<Message>::GetArena(Message* value);
  630. template <>
  631. PROTOBUF_EXPORT void* GenericTypeHandler<Message>::GetMaybeArenaPointer(
  632. Message* value);
  633. class StringTypeHandler {
  634. public:
  635. typedef std::string Type;
  636. using Movable = IsMovable<Type>;
  637. static inline std::string* New(Arena* arena) {
  638. return Arena::Create<std::string>(arena);
  639. }
  640. static inline std::string* New(Arena* arena, std::string&& value) {
  641. return Arena::Create<std::string>(arena, std::move(value));
  642. }
  643. static inline std::string* NewFromPrototype(const std::string*,
  644. Arena* arena) {
  645. return New(arena);
  646. }
  647. static inline Arena* GetArena(std::string*) { return NULL; }
  648. static inline void* GetMaybeArenaPointer(std::string* /* value */) {
  649. return NULL;
  650. }
  651. static inline void Delete(std::string* value, Arena* arena) {
  652. if (arena == NULL) {
  653. delete value;
  654. }
  655. }
  656. static inline void Clear(std::string* value) { value->clear(); }
  657. static inline void Merge(const std::string& from, std::string* to) {
  658. *to = from;
  659. }
  660. static size_t SpaceUsedLong(const std::string& value) {
  661. return sizeof(value) + StringSpaceUsedExcludingSelfLong(value);
  662. }
  663. };
  664. } // namespace internal
  665. // RepeatedPtrField is like RepeatedField, but used for repeated strings or
  666. // Messages.
  667. template <typename Element>
  668. class RepeatedPtrField final : private internal::RepeatedPtrFieldBase {
  669. public:
  670. RepeatedPtrField();
  671. explicit RepeatedPtrField(Arena* arena);
  672. RepeatedPtrField(const RepeatedPtrField& other);
  673. template <typename Iter>
  674. RepeatedPtrField(Iter begin, const Iter& end);
  675. ~RepeatedPtrField();
  676. RepeatedPtrField& operator=(const RepeatedPtrField& other);
  677. RepeatedPtrField(RepeatedPtrField&& other) noexcept;
  678. RepeatedPtrField& operator=(RepeatedPtrField&& other) noexcept;
  679. bool empty() const;
  680. int size() const;
  681. const Element& Get(int index) const;
  682. Element* Mutable(int index);
  683. Element* Add();
  684. void Add(Element&& value);
  685. const Element& operator[](int index) const { return Get(index); }
  686. Element& operator[](int index) { return *Mutable(index); }
  687. const Element& at(int index) const;
  688. Element& at(int index);
  689. // Remove the last element in the array.
  690. // Ownership of the element is retained by the array.
  691. void RemoveLast();
  692. // Delete elements with indices in the range [start .. start+num-1].
  693. // Caution: implementation moves all elements with indices [start+num .. ].
  694. // Calling this routine inside a loop can cause quadratic behavior.
  695. void DeleteSubrange(int start, int num);
  696. void Clear();
  697. void MergeFrom(const RepeatedPtrField& other);
  698. void CopyFrom(const RepeatedPtrField& other);
  699. // Reserve space to expand the field to at least the given size. This only
  700. // resizes the pointer array; it doesn't allocate any objects. If the
  701. // array is grown, it will always be at least doubled in size.
  702. void Reserve(int new_size);
  703. int Capacity() const;
  704. // Gets the underlying array. This pointer is possibly invalidated by
  705. // any add or remove operation.
  706. Element** mutable_data();
  707. const Element* const* data() const;
  708. // Swap entire contents with "other". If they are on separate arenas, then
  709. // copies data.
  710. void Swap(RepeatedPtrField* other);
  711. // Swap entire contents with "other". Caller should guarantee that either both
  712. // fields are on the same arena or both are on the heap. Swapping between
  713. // different arenas with this function is disallowed and is caught via
  714. // GOOGLE_DCHECK.
  715. void UnsafeArenaSwap(RepeatedPtrField* other);
  716. // Swap two elements.
  717. void SwapElements(int index1, int index2);
  718. // STL-like iterator support
  719. typedef internal::RepeatedPtrIterator<Element> iterator;
  720. typedef internal::RepeatedPtrIterator<const Element> const_iterator;
  721. typedef Element value_type;
  722. typedef value_type& reference;
  723. typedef const value_type& const_reference;
  724. typedef value_type* pointer;
  725. typedef const value_type* const_pointer;
  726. typedef int size_type;
  727. typedef ptrdiff_t difference_type;
  728. iterator begin();
  729. const_iterator begin() const;
  730. const_iterator cbegin() const;
  731. iterator end();
  732. const_iterator end() const;
  733. const_iterator cend() const;
  734. // Reverse iterator support
  735. typedef std::reverse_iterator<const_iterator> const_reverse_iterator;
  736. typedef std::reverse_iterator<iterator> reverse_iterator;
  737. reverse_iterator rbegin() { return reverse_iterator(end()); }
  738. const_reverse_iterator rbegin() const {
  739. return const_reverse_iterator(end());
  740. }
  741. reverse_iterator rend() { return reverse_iterator(begin()); }
  742. const_reverse_iterator rend() const {
  743. return const_reverse_iterator(begin());
  744. }
  745. // Custom STL-like iterator that iterates over and returns the underlying
  746. // pointers to Element rather than Element itself.
  747. typedef internal::RepeatedPtrOverPtrsIterator<Element*, void*>
  748. pointer_iterator;
  749. typedef internal::RepeatedPtrOverPtrsIterator<const Element* const,
  750. const void* const>
  751. const_pointer_iterator;
  752. pointer_iterator pointer_begin();
  753. const_pointer_iterator pointer_begin() const;
  754. pointer_iterator pointer_end();
  755. const_pointer_iterator pointer_end() const;
  756. // Returns (an estimate of) the number of bytes used by the repeated field,
  757. // excluding sizeof(*this).
  758. size_t SpaceUsedExcludingSelfLong() const;
  759. int SpaceUsedExcludingSelf() const {
  760. return internal::ToIntSize(SpaceUsedExcludingSelfLong());
  761. }
  762. // Advanced memory management --------------------------------------
  763. // When hardcore memory management becomes necessary -- as it sometimes
  764. // does here at Google -- the following methods may be useful.
  765. // Add an already-allocated object, passing ownership to the
  766. // RepeatedPtrField.
  767. //
  768. // Note that some special behavior occurs with respect to arenas:
  769. //
  770. // (i) if this field holds submessages, the new submessage will be copied if
  771. // the original is in an arena and this RepeatedPtrField is either in a
  772. // different arena, or on the heap.
  773. // (ii) if this field holds strings, the passed-in string *must* be
  774. // heap-allocated, not arena-allocated. There is no way to dynamically check
  775. // this at runtime, so User Beware.
  776. void AddAllocated(Element* value);
  777. // Remove the last element and return it, passing ownership to the caller.
  778. // Requires: size() > 0
  779. //
  780. // If this RepeatedPtrField is on an arena, an object copy is required to pass
  781. // ownership back to the user (for compatible semantics). Use
  782. // UnsafeArenaReleaseLast() if this behavior is undesired.
  783. Element* ReleaseLast();
  784. // Add an already-allocated object, skipping arena-ownership checks. The user
  785. // must guarantee that the given object is in the same arena as this
  786. // RepeatedPtrField.
  787. // It is also useful in legacy code that uses temporary ownership to avoid
  788. // copies. Example:
  789. // RepeatedPtrField<T> temp_field;
  790. // temp_field.AddAllocated(new T);
  791. // ... // Do something with temp_field
  792. // temp_field.ExtractSubrange(0, temp_field.size(), nullptr);
  793. // If you put temp_field on the arena this fails, because the ownership
  794. // transfers to the arena at the "AddAllocated" call and is not released
  795. // anymore causing a double delete. UnsafeArenaAddAllocated prevents this.
  796. void UnsafeArenaAddAllocated(Element* value);
  797. // Remove the last element and return it. Works only when operating on an
  798. // arena. The returned pointer is to the original object in the arena, hence
  799. // has the arena's lifetime.
  800. // Requires: current_size_ > 0
  801. Element* UnsafeArenaReleaseLast();
  802. // Extract elements with indices in the range "[start .. start+num-1]".
  803. // The caller assumes ownership of the extracted elements and is responsible
  804. // for deleting them when they are no longer needed.
  805. // If "elements" is non-NULL, then pointers to the extracted elements
  806. // are stored in "elements[0 .. num-1]" for the convenience of the caller.
  807. // If "elements" is NULL, then the caller must use some other mechanism
  808. // to perform any further operations (like deletion) on these elements.
  809. // Caution: implementation also moves elements with indices [start+num ..].
  810. // Calling this routine inside a loop can cause quadratic behavior.
  811. //
  812. // Memory copying behavior is identical to ReleaseLast(), described above: if
  813. // this RepeatedPtrField is on an arena, an object copy is performed for each
  814. // returned element, so that all returned element pointers are to
  815. // heap-allocated copies. If this copy is not desired, the user should call
  816. // UnsafeArenaExtractSubrange().
  817. void ExtractSubrange(int start, int num, Element** elements);
  818. // Identical to ExtractSubrange() described above, except that when this
  819. // repeated field is on an arena, no object copies are performed. Instead, the
  820. // raw object pointers are returned. Thus, if on an arena, the returned
  821. // objects must not be freed, because they will not be heap-allocated objects.
  822. void UnsafeArenaExtractSubrange(int start, int num, Element** elements);
  823. // When elements are removed by calls to RemoveLast() or Clear(), they
  824. // are not actually freed. Instead, they are cleared and kept so that
  825. // they can be reused later. This can save lots of CPU time when
  826. // repeatedly reusing a protocol message for similar purposes.
  827. //
  828. // Hardcore programs may choose to manipulate these cleared objects
  829. // to better optimize memory management using the following routines.
  830. // Get the number of cleared objects that are currently being kept
  831. // around for reuse.
  832. int ClearedCount() const;
  833. // Add an element to the pool of cleared objects, passing ownership to
  834. // the RepeatedPtrField. The element must be cleared prior to calling
  835. // this method.
  836. //
  837. // This method cannot be called when the repeated field is on an arena or when
  838. // |value| is; both cases will trigger a GOOGLE_DCHECK-failure.
  839. void AddCleared(Element* value);
  840. // Remove a single element from the cleared pool and return it, passing
  841. // ownership to the caller. The element is guaranteed to be cleared.
  842. // Requires: ClearedCount() > 0
  843. //
  844. //
  845. // This method cannot be called when the repeated field is on an arena; doing
  846. // so will trigger a GOOGLE_DCHECK-failure.
  847. Element* ReleaseCleared();
  848. // Removes the element referenced by position.
  849. //
  850. // Returns an iterator to the element immediately following the removed
  851. // element.
  852. //
  853. // Invalidates all iterators at or after the removed element, including end().
  854. iterator erase(const_iterator position);
  855. // Removes the elements in the range [first, last).
  856. //
  857. // Returns an iterator to the element immediately following the removed range.
  858. //
  859. // Invalidates all iterators at or after the removed range, including end().
  860. iterator erase(const_iterator first, const_iterator last);
  861. // Gets the arena on which this RepeatedPtrField stores its elements.
  862. Arena* GetArena() const { return GetArenaNoVirtual(); }
  863. // For internal use only.
  864. //
  865. // This is public due to it being called by generated code.
  866. void InternalSwap(RepeatedPtrField* other) {
  867. internal::RepeatedPtrFieldBase::InternalSwap(other);
  868. }
  869. private:
  870. // Note: RepeatedPtrField SHOULD NOT be subclassed by users.
  871. class TypeHandler;
  872. // Internal arena accessor expected by helpers in Arena.
  873. inline Arena* GetArenaNoVirtual() const;
  874. // Implementations for ExtractSubrange(). The copying behavior must be
  875. // included only if the type supports the necessary operations (e.g.,
  876. // MergeFrom()), so we must resolve this at compile time. ExtractSubrange()
  877. // uses SFINAE to choose one of the below implementations.
  878. void ExtractSubrangeInternal(int start, int num, Element** elements,
  879. std::true_type);
  880. void ExtractSubrangeInternal(int start, int num, Element** elements,
  881. std::false_type);
  882. friend class Arena;
  883. template <typename T>
  884. friend struct WeakRepeatedPtrField;
  885. typedef void InternalArenaConstructable_;
  886. };
  887. // implementation ====================================================
  888. template <typename Element>
  889. inline RepeatedField<Element>::RepeatedField()
  890. : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {}
  891. template <typename Element>
  892. inline RepeatedField<Element>::RepeatedField(Arena* arena)
  893. : current_size_(0), total_size_(0), arena_or_elements_(arena) {}
  894. template <typename Element>
  895. inline RepeatedField<Element>::RepeatedField(const RepeatedField& other)
  896. : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {
  897. if (other.current_size_ != 0) {
  898. Reserve(other.size());
  899. AddNAlreadyReserved(other.size());
  900. CopyArray(Mutable(0), &other.Get(0), other.size());
  901. }
  902. }
  903. template <typename Element>
  904. template <typename Iter>
  905. RepeatedField<Element>::RepeatedField(Iter begin, const Iter& end)
  906. : current_size_(0), total_size_(0), arena_or_elements_(nullptr) {
  907. Add(begin, end);
  908. }
  909. template <typename Element>
  910. RepeatedField<Element>::~RepeatedField() {
  911. if (total_size_ > 0) {
  912. InternalDeallocate(rep(), total_size_);
  913. }
  914. }
  915. template <typename Element>
  916. inline RepeatedField<Element>& RepeatedField<Element>::operator=(
  917. const RepeatedField& other) {
  918. if (this != &other) CopyFrom(other);
  919. return *this;
  920. }
  921. template <typename Element>
  922. inline RepeatedField<Element>::RepeatedField(RepeatedField&& other) noexcept
  923. : RepeatedField() {
  924. // We don't just call Swap(&other) here because it would perform 3 copies if
  925. // other is on an arena. This field can't be on an arena because arena
  926. // construction always uses the Arena* accepting constructor.
  927. if (other.GetArenaNoVirtual()) {
  928. CopyFrom(other);
  929. } else {
  930. InternalSwap(&other);
  931. }
  932. }
  933. template <typename Element>
  934. inline RepeatedField<Element>& RepeatedField<Element>::operator=(
  935. RepeatedField&& other) noexcept {
  936. // We don't just call Swap(&other) here because it would perform 3 copies if
  937. // the two fields are on different arenas.
  938. if (this != &other) {
  939. if (this->GetArenaNoVirtual() != other.GetArenaNoVirtual()) {
  940. CopyFrom(other);
  941. } else {
  942. InternalSwap(&other);
  943. }
  944. }
  945. return *this;
  946. }
  947. template <typename Element>
  948. inline bool RepeatedField<Element>::empty() const {
  949. return current_size_ == 0;
  950. }
  951. template <typename Element>
  952. inline int RepeatedField<Element>::size() const {
  953. return current_size_;
  954. }
  955. template <typename Element>
  956. inline int RepeatedField<Element>::Capacity() const {
  957. return total_size_;
  958. }
  959. template <typename Element>
  960. inline void RepeatedField<Element>::AddAlreadyReserved(const Element& value) {
  961. GOOGLE_DCHECK_LT(current_size_, total_size_);
  962. elements()[current_size_++] = value;
  963. }
  964. template <typename Element>
  965. inline Element* RepeatedField<Element>::AddAlreadyReserved() {
  966. GOOGLE_DCHECK_LT(current_size_, total_size_);
  967. return &elements()[current_size_++];
  968. }
  969. template <typename Element>
  970. inline Element* RepeatedField<Element>::AddNAlreadyReserved(int n) {
  971. GOOGLE_DCHECK_GE(total_size_ - current_size_, n)
  972. << total_size_ << ", " << current_size_;
  973. // Warning: sometimes people call this when n == 0 and total_size_ == 0. In
  974. // this case the return pointer points to a zero size array (n == 0). Hence
  975. // we can just use unsafe_elements(), because the user cannot dereference the
  976. // pointer anyway.
  977. Element* ret = unsafe_elements() + current_size_;
  978. current_size_ += n;
  979. return ret;
  980. }
  981. template <typename Element>
  982. inline void RepeatedField<Element>::Resize(int new_size, const Element& value) {
  983. GOOGLE_DCHECK_GE(new_size, 0);
  984. if (new_size > current_size_) {
  985. Reserve(new_size);
  986. std::fill(&elements()[current_size_], &elements()[new_size], value);
  987. }
  988. current_size_ = new_size;
  989. }
  990. template <typename Element>
  991. inline const Element& RepeatedField<Element>::Get(int index) const {
  992. GOOGLE_DCHECK_GE(index, 0);
  993. GOOGLE_DCHECK_LT(index, current_size_);
  994. return elements()[index];
  995. }
  996. template <typename Element>
  997. inline const Element& RepeatedField<Element>::at(int index) const {
  998. GOOGLE_CHECK_GE(index, 0);
  999. GOOGLE_CHECK_LT(index, current_size_);
  1000. return elements()[index];
  1001. }
  1002. template <typename Element>
  1003. inline Element& RepeatedField<Element>::at(int index) {
  1004. GOOGLE_CHECK_GE(index, 0);
  1005. GOOGLE_CHECK_LT(index, current_size_);
  1006. return elements()[index];
  1007. }
  1008. template <typename Element>
  1009. inline Element* RepeatedField<Element>::Mutable(int index) {
  1010. GOOGLE_DCHECK_GE(index, 0);
  1011. GOOGLE_DCHECK_LT(index, current_size_);
  1012. return &elements()[index];
  1013. }
  1014. template <typename Element>
  1015. inline void RepeatedField<Element>::Set(int index, const Element& value) {
  1016. GOOGLE_DCHECK_GE(index, 0);
  1017. GOOGLE_DCHECK_LT(index, current_size_);
  1018. elements()[index] = value;
  1019. }
  1020. template <typename Element>
  1021. inline void RepeatedField<Element>::Add(const Element& value) {
  1022. if (current_size_ == total_size_) Reserve(total_size_ + 1);
  1023. elements()[current_size_++] = value;
  1024. }
  1025. template <typename Element>
  1026. inline Element* RepeatedField<Element>::Add() {
  1027. if (current_size_ == total_size_) Reserve(total_size_ + 1);
  1028. return &elements()[current_size_++];
  1029. }
  1030. template <typename Element>
  1031. template <typename Iter>
  1032. inline void RepeatedField<Element>::Add(Iter begin, Iter end) {
  1033. int reserve = internal::CalculateReserve(begin, end);
  1034. if (reserve != -1) {
  1035. if (reserve == 0) {
  1036. return;
  1037. }
  1038. Reserve(reserve + size());
  1039. // TODO(ckennelly): The compiler loses track of the buffer freshly
  1040. // allocated by Reserve() by the time we call elements, so it cannot
  1041. // guarantee that elements does not alias [begin(), end()).
  1042. //
  1043. // If restrict is available, annotating the pointer obtained from elements()
  1044. // causes this to lower to memcpy instead of memmove.
  1045. std::copy(begin, end, elements() + size());
  1046. current_size_ = reserve + size();
  1047. } else {
  1048. for (; begin != end; ++begin) {
  1049. Add(*begin);
  1050. }
  1051. }
  1052. }
  1053. template <typename Element>
  1054. inline void RepeatedField<Element>::RemoveLast() {
  1055. GOOGLE_DCHECK_GT(current_size_, 0);
  1056. current_size_--;
  1057. }
  1058. template <typename Element>
  1059. void RepeatedField<Element>::ExtractSubrange(int start, int num,
  1060. Element* elements) {
  1061. GOOGLE_DCHECK_GE(start, 0);
  1062. GOOGLE_DCHECK_GE(num, 0);
  1063. GOOGLE_DCHECK_LE(start + num, this->current_size_);
  1064. // Save the values of the removed elements if requested.
  1065. if (elements != NULL) {
  1066. for (int i = 0; i < num; ++i) elements[i] = this->Get(i + start);
  1067. }
  1068. // Slide remaining elements down to fill the gap.
  1069. if (num > 0) {
  1070. for (int i = start + num; i < this->current_size_; ++i)
  1071. this->Set(i - num, this->Get(i));
  1072. this->Truncate(this->current_size_ - num);
  1073. }
  1074. }
  1075. template <typename Element>
  1076. inline void RepeatedField<Element>::Clear() {
  1077. current_size_ = 0;
  1078. }
  1079. template <typename Element>
  1080. inline void RepeatedField<Element>::MergeFrom(const RepeatedField& other) {
  1081. GOOGLE_DCHECK_NE(&other, this);
  1082. if (other.current_size_ != 0) {
  1083. int existing_size = size();
  1084. Reserve(existing_size + other.size());
  1085. AddNAlreadyReserved(other.size());
  1086. CopyArray(Mutable(existing_size), &other.Get(0), other.size());
  1087. }
  1088. }
  1089. template <typename Element>
  1090. inline void RepeatedField<Element>::CopyFrom(const RepeatedField& other) {
  1091. if (&other == this) return;
  1092. Clear();
  1093. MergeFrom(other);
  1094. }
  1095. template <typename Element>
  1096. inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
  1097. const_iterator position) {
  1098. return erase(position, position + 1);
  1099. }
  1100. template <typename Element>
  1101. inline typename RepeatedField<Element>::iterator RepeatedField<Element>::erase(
  1102. const_iterator first, const_iterator last) {
  1103. size_type first_offset = first - cbegin();
  1104. if (first != last) {
  1105. Truncate(std::copy(last, cend(), begin() + first_offset) - cbegin());
  1106. }
  1107. return begin() + first_offset;
  1108. }
  1109. template <typename Element>
  1110. inline Element* RepeatedField<Element>::mutable_data() {
  1111. return unsafe_elements();
  1112. }
  1113. template <typename Element>
  1114. inline const Element* RepeatedField<Element>::data() const {
  1115. return unsafe_elements();
  1116. }
  1117. template <typename Element>
  1118. inline void RepeatedField<Element>::InternalSwap(RepeatedField* other) {
  1119. GOOGLE_DCHECK(this != other);
  1120. GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
  1121. std::swap(arena_or_elements_, other->arena_or_elements_);
  1122. std::swap(current_size_, other->current_size_);
  1123. std::swap(total_size_, other->total_size_);
  1124. }
  1125. template <typename Element>
  1126. void RepeatedField<Element>::Swap(RepeatedField* other) {
  1127. if (this == other) return;
  1128. if (GetArenaNoVirtual() == other->GetArenaNoVirtual()) {
  1129. InternalSwap(other);
  1130. } else {
  1131. RepeatedField<Element> temp(other->GetArenaNoVirtual());
  1132. temp.MergeFrom(*this);
  1133. CopyFrom(*other);
  1134. other->UnsafeArenaSwap(&temp);
  1135. }
  1136. }
  1137. template <typename Element>
  1138. void RepeatedField<Element>::UnsafeArenaSwap(RepeatedField* other) {
  1139. if (this == other) return;
  1140. InternalSwap(other);
  1141. }
  1142. template <typename Element>
  1143. void RepeatedField<Element>::SwapElements(int index1, int index2) {
  1144. using std::swap; // enable ADL with fallback
  1145. swap(elements()[index1], elements()[index2]);
  1146. }
  1147. template <typename Element>
  1148. inline typename RepeatedField<Element>::iterator
  1149. RepeatedField<Element>::begin() {
  1150. return unsafe_elements();
  1151. }
  1152. template <typename Element>
  1153. inline typename RepeatedField<Element>::const_iterator
  1154. RepeatedField<Element>::begin() const {
  1155. return unsafe_elements();
  1156. }
  1157. template <typename Element>
  1158. inline typename RepeatedField<Element>::const_iterator
  1159. RepeatedField<Element>::cbegin() const {
  1160. return unsafe_elements();
  1161. }
  1162. template <typename Element>
  1163. inline typename RepeatedField<Element>::iterator RepeatedField<Element>::end() {
  1164. return unsafe_elements() + current_size_;
  1165. }
  1166. template <typename Element>
  1167. inline typename RepeatedField<Element>::const_iterator
  1168. RepeatedField<Element>::end() const {
  1169. return unsafe_elements() + current_size_;
  1170. }
  1171. template <typename Element>
  1172. inline typename RepeatedField<Element>::const_iterator
  1173. RepeatedField<Element>::cend() const {
  1174. return unsafe_elements() + current_size_;
  1175. }
  1176. template <typename Element>
  1177. inline size_t RepeatedField<Element>::SpaceUsedExcludingSelfLong() const {
  1178. return total_size_ > 0 ? (total_size_ * sizeof(Element) + kRepHeaderSize) : 0;
  1179. }
  1180. // Avoid inlining of Reserve(): new, copy, and delete[] lead to a significant
  1181. // amount of code bloat.
  1182. template <typename Element>
  1183. void RepeatedField<Element>::Reserve(int new_size) {
  1184. if (total_size_ >= new_size) return;
  1185. Rep* old_rep = total_size_ > 0 ? rep() : NULL;
  1186. Rep* new_rep;
  1187. Arena* arena = GetArenaNoVirtual();
  1188. new_size = std::max(internal::kMinRepeatedFieldAllocationSize,
  1189. std::max(total_size_ * 2, new_size));
  1190. GOOGLE_DCHECK_LE(
  1191. static_cast<size_t>(new_size),
  1192. (std::numeric_limits<size_t>::max() - kRepHeaderSize) / sizeof(Element))
  1193. << "Requested size is too large to fit into size_t.";
  1194. size_t bytes =
  1195. kRepHeaderSize + sizeof(Element) * static_cast<size_t>(new_size);
  1196. if (arena == NULL) {
  1197. new_rep = static_cast<Rep*>(::operator new(bytes));
  1198. } else {
  1199. new_rep = reinterpret_cast<Rep*>(Arena::CreateArray<char>(arena, bytes));
  1200. }
  1201. new_rep->arena = arena;
  1202. int old_total_size = total_size_;
  1203. total_size_ = new_size;
  1204. arena_or_elements_ = new_rep->elements;
  1205. // Invoke placement-new on newly allocated elements. We shouldn't have to do
  1206. // this, since Element is supposed to be POD, but a previous version of this
  1207. // code allocated storage with "new Element[size]" and some code uses
  1208. // RepeatedField with non-POD types, relying on constructor invocation. If
  1209. // Element has a trivial constructor (e.g., int32), gcc (tested with -O2)
  1210. // completely removes this loop because the loop body is empty, so this has no
  1211. // effect unless its side-effects are required for correctness.
  1212. // Note that we do this before MoveArray() below because Element's copy
  1213. // assignment implementation will want an initialized instance first.
  1214. Element* e = &elements()[0];
  1215. Element* limit = e + total_size_;
  1216. for (; e < limit; e++) {
  1217. new (e) Element;
  1218. }
  1219. if (current_size_ > 0) {
  1220. MoveArray(&elements()[0], old_rep->elements, current_size_);
  1221. }
  1222. // Likewise, we need to invoke destructors on the old array.
  1223. InternalDeallocate(old_rep, old_total_size);
  1224. }
  1225. template <typename Element>
  1226. inline void RepeatedField<Element>::Truncate(int new_size) {
  1227. GOOGLE_DCHECK_LE(new_size, current_size_);
  1228. if (current_size_ > 0) {
  1229. current_size_ = new_size;
  1230. }
  1231. }
  1232. template <typename Element>
  1233. inline void RepeatedField<Element>::MoveArray(Element* to, Element* from,
  1234. int array_size) {
  1235. CopyArray(to, from, array_size);
  1236. }
  1237. template <typename Element>
  1238. inline void RepeatedField<Element>::CopyArray(Element* to, const Element* from,
  1239. int array_size) {
  1240. internal::ElementCopier<Element>()(to, from, array_size);
  1241. }
  1242. namespace internal {
  1243. template <typename Element, bool HasTrivialCopy>
  1244. void ElementCopier<Element, HasTrivialCopy>::operator()(Element* to,
  1245. const Element* from,
  1246. int array_size) {
  1247. std::copy(from, from + array_size, to);
  1248. }
  1249. template <typename Element>
  1250. struct ElementCopier<Element, true> {
  1251. void operator()(Element* to, const Element* from, int array_size) {
  1252. memcpy(to, from, static_cast<size_t>(array_size) * sizeof(Element));
  1253. }
  1254. };
  1255. } // namespace internal
  1256. // -------------------------------------------------------------------
  1257. namespace internal {
  1258. inline RepeatedPtrFieldBase::RepeatedPtrFieldBase()
  1259. : arena_(NULL), current_size_(0), total_size_(0), rep_(NULL) {}
  1260. inline RepeatedPtrFieldBase::RepeatedPtrFieldBase(Arena* arena)
  1261. : arena_(arena), current_size_(0), total_size_(0), rep_(NULL) {}
  1262. template <typename TypeHandler>
  1263. void RepeatedPtrFieldBase::Destroy() {
  1264. if (rep_ != NULL && arena_ == NULL) {
  1265. int n = rep_->allocated_size;
  1266. void* const* elements = rep_->elements;
  1267. for (int i = 0; i < n; i++) {
  1268. TypeHandler::Delete(cast<TypeHandler>(elements[i]), NULL);
  1269. }
  1270. #if defined(__GXX_DELETE_WITH_SIZE__) || defined(__cpp_sized_deallocation)
  1271. const size_t size = total_size_ * sizeof(elements[0]) + kRepHeaderSize;
  1272. ::operator delete(static_cast<void*>(rep_), size);
  1273. #else
  1274. ::operator delete(static_cast<void*>(rep_));
  1275. #endif
  1276. }
  1277. rep_ = NULL;
  1278. }
  1279. template <typename TypeHandler>
  1280. inline void RepeatedPtrFieldBase::Swap(RepeatedPtrFieldBase* other) {
  1281. if (other->GetArenaNoVirtual() == GetArenaNoVirtual()) {
  1282. InternalSwap(other);
  1283. } else {
  1284. SwapFallback<TypeHandler>(other);
  1285. }
  1286. }
  1287. template <typename TypeHandler>
  1288. void RepeatedPtrFieldBase::SwapFallback(RepeatedPtrFieldBase* other) {
  1289. GOOGLE_DCHECK(other->GetArenaNoVirtual() != GetArenaNoVirtual());
  1290. // Copy semantics in this case. We try to improve efficiency by placing the
  1291. // temporary on |other|'s arena so that messages are copied cross-arena only
  1292. // once, not twice.
  1293. RepeatedPtrFieldBase temp(other->GetArenaNoVirtual());
  1294. temp.MergeFrom<TypeHandler>(*this);
  1295. this->Clear<TypeHandler>();
  1296. this->MergeFrom<TypeHandler>(*other);
  1297. other->Clear<TypeHandler>();
  1298. other->InternalSwap(&temp);
  1299. temp.Destroy<TypeHandler>(); // Frees rep_ if `other` had no arena.
  1300. }
  1301. inline bool RepeatedPtrFieldBase::empty() const { return current_size_ == 0; }
  1302. inline int RepeatedPtrFieldBase::size() const { return current_size_; }
  1303. template <typename TypeHandler>
  1304. inline const typename TypeHandler::Type& RepeatedPtrFieldBase::Get(
  1305. int index) const {
  1306. GOOGLE_DCHECK_GE(index, 0);
  1307. GOOGLE_DCHECK_LT(index, current_size_);
  1308. return *cast<TypeHandler>(rep_->elements[index]);
  1309. }
  1310. template <typename TypeHandler>
  1311. inline const typename TypeHandler::Type& RepeatedPtrFieldBase::at(
  1312. int index) const {
  1313. GOOGLE_CHECK_GE(index, 0);
  1314. GOOGLE_CHECK_LT(index, current_size_);
  1315. return *cast<TypeHandler>(rep_->elements[index]);
  1316. }
  1317. template <typename TypeHandler>
  1318. inline typename TypeHandler::Type& RepeatedPtrFieldBase::at(int index) {
  1319. GOOGLE_CHECK_GE(index, 0);
  1320. GOOGLE_CHECK_LT(index, current_size_);
  1321. return *cast<TypeHandler>(rep_->elements[index]);
  1322. }
  1323. template <typename TypeHandler>
  1324. inline typename TypeHandler::Type* RepeatedPtrFieldBase::Mutable(int index) {
  1325. GOOGLE_DCHECK_GE(index, 0);
  1326. GOOGLE_DCHECK_LT(index, current_size_);
  1327. return cast<TypeHandler>(rep_->elements[index]);
  1328. }
  1329. template <typename TypeHandler>
  1330. inline void RepeatedPtrFieldBase::Delete(int index) {
  1331. GOOGLE_DCHECK_GE(index, 0);
  1332. GOOGLE_DCHECK_LT(index, current_size_);
  1333. TypeHandler::Delete(cast<TypeHandler>(rep_->elements[index]), arena_);
  1334. }
  1335. template <typename TypeHandler>
  1336. inline typename TypeHandler::Type* RepeatedPtrFieldBase::Add(
  1337. typename TypeHandler::Type* prototype) {
  1338. if (rep_ != NULL && current_size_ < rep_->allocated_size) {
  1339. return cast<TypeHandler>(rep_->elements[current_size_++]);
  1340. }
  1341. if (!rep_ || rep_->allocated_size == total_size_) {
  1342. Reserve(total_size_ + 1);
  1343. }
  1344. ++rep_->allocated_size;
  1345. typename TypeHandler::Type* result =
  1346. TypeHandler::NewFromPrototype(prototype, arena_);
  1347. rep_->elements[current_size_++] = result;
  1348. return result;
  1349. }
  1350. template <typename TypeHandler,
  1351. typename std::enable_if<TypeHandler::Movable::value>::type*>
  1352. inline void RepeatedPtrFieldBase::Add(typename TypeHandler::Type&& value) {
  1353. if (rep_ != NULL && current_size_ < rep_->allocated_size) {
  1354. *cast<TypeHandler>(rep_->elements[current_size_++]) = std::move(value);
  1355. return;
  1356. }
  1357. if (!rep_ || rep_->allocated_size == total_size_) {
  1358. Reserve(total_size_ + 1);
  1359. }
  1360. ++rep_->allocated_size;
  1361. typename TypeHandler::Type* result =
  1362. TypeHandler::New(arena_, std::move(value));
  1363. rep_->elements[current_size_++] = result;
  1364. }
  1365. template <typename TypeHandler>
  1366. inline void RepeatedPtrFieldBase::RemoveLast() {
  1367. GOOGLE_DCHECK_GT(current_size_, 0);
  1368. TypeHandler::Clear(cast<TypeHandler>(rep_->elements[--current_size_]));
  1369. }
  1370. template <typename TypeHandler>
  1371. void RepeatedPtrFieldBase::Clear() {
  1372. const int n = current_size_;
  1373. GOOGLE_DCHECK_GE(n, 0);
  1374. if (n > 0) {
  1375. void* const* elements = rep_->elements;
  1376. int i = 0;
  1377. do {
  1378. TypeHandler::Clear(cast<TypeHandler>(elements[i++]));
  1379. } while (i < n);
  1380. current_size_ = 0;
  1381. }
  1382. }
  1383. // To avoid unnecessary code duplication and reduce binary size, we use a
  1384. // layered approach to implementing MergeFrom(). The toplevel method is
  1385. // templated, so we get a small thunk per concrete message type in the binary.
  1386. // This calls a shared implementation with most of the logic, passing a function
  1387. // pointer to another type-specific piece of code that calls the object-allocate
  1388. // and merge handlers.
  1389. template <typename TypeHandler>
  1390. inline void RepeatedPtrFieldBase::MergeFrom(const RepeatedPtrFieldBase& other) {
  1391. GOOGLE_DCHECK_NE(&other, this);
  1392. if (other.current_size_ == 0) return;
  1393. MergeFromInternal(other,
  1394. &RepeatedPtrFieldBase::MergeFromInnerLoop<TypeHandler>);
  1395. }
  1396. inline void RepeatedPtrFieldBase::MergeFromInternal(
  1397. const RepeatedPtrFieldBase& other,
  1398. void (RepeatedPtrFieldBase::*inner_loop)(void**, void**, int, int)) {
  1399. // Note: wrapper has already guaranteed that other.rep_ != NULL here.
  1400. int other_size = other.current_size_;
  1401. void** other_elements = other.rep_->elements;
  1402. void** new_elements = InternalExtend(other_size);
  1403. int allocated_elems = rep_->allocated_size - current_size_;
  1404. (this->*inner_loop)(new_elements, other_elements, other_size,
  1405. allocated_elems);
  1406. current_size_ += other_size;
  1407. if (rep_->allocated_size < current_size_) {
  1408. rep_->allocated_size = current_size_;
  1409. }
  1410. }
  1411. // Merges other_elems to our_elems.
  1412. template <typename TypeHandler>
  1413. void RepeatedPtrFieldBase::MergeFromInnerLoop(void** our_elems,
  1414. void** other_elems, int length,
  1415. int already_allocated) {
  1416. // Split into two loops, over ranges [0, allocated) and [allocated, length),
  1417. // to avoid a branch within the loop.
  1418. for (int i = 0; i < already_allocated && i < length; i++) {
  1419. // Already allocated: use existing element.
  1420. typename TypeHandler::Type* other_elem =
  1421. reinterpret_cast<typename TypeHandler::Type*>(other_elems[i]);
  1422. typename TypeHandler::Type* new_elem =
  1423. reinterpret_cast<typename TypeHandler::Type*>(our_elems[i]);
  1424. TypeHandler::Merge(*other_elem, new_elem);
  1425. }
  1426. Arena* arena = GetArenaNoVirtual();
  1427. for (int i = already_allocated; i < length; i++) {
  1428. // Not allocated: alloc a new element first, then merge it.
  1429. typename TypeHandler::Type* other_elem =
  1430. reinterpret_cast<typename TypeHandler::Type*>(other_elems[i]);
  1431. typename TypeHandler::Type* new_elem =
  1432. TypeHandler::NewFromPrototype(other_elem, arena);
  1433. TypeHandler::Merge(*other_elem, new_elem);
  1434. our_elems[i] = new_elem;
  1435. }
  1436. }
  1437. template <typename TypeHandler>
  1438. inline void RepeatedPtrFieldBase::CopyFrom(const RepeatedPtrFieldBase& other) {
  1439. if (&other == this) return;
  1440. RepeatedPtrFieldBase::Clear<TypeHandler>();
  1441. RepeatedPtrFieldBase::MergeFrom<TypeHandler>(other);
  1442. }
  1443. inline int RepeatedPtrFieldBase::Capacity() const { return total_size_; }
  1444. inline void* const* RepeatedPtrFieldBase::raw_data() const {
  1445. return rep_ ? rep_->elements : NULL;
  1446. }
  1447. inline void** RepeatedPtrFieldBase::raw_mutable_data() const {
  1448. return rep_ ? const_cast<void**>(rep_->elements) : NULL;
  1449. }
  1450. template <typename TypeHandler>
  1451. inline typename TypeHandler::Type** RepeatedPtrFieldBase::mutable_data() {
  1452. // TODO(kenton): Breaks C++ aliasing rules. We should probably remove this
  1453. // method entirely.
  1454. return reinterpret_cast<typename TypeHandler::Type**>(raw_mutable_data());
  1455. }
  1456. template <typename TypeHandler>
  1457. inline const typename TypeHandler::Type* const* RepeatedPtrFieldBase::data()
  1458. const {
  1459. // TODO(kenton): Breaks C++ aliasing rules. We should probably remove this
  1460. // method entirely.
  1461. return reinterpret_cast<const typename TypeHandler::Type* const*>(raw_data());
  1462. }
  1463. inline void RepeatedPtrFieldBase::SwapElements(int index1, int index2) {
  1464. using std::swap; // enable ADL with fallback
  1465. swap(rep_->elements[index1], rep_->elements[index2]);
  1466. }
  1467. template <typename TypeHandler>
  1468. inline size_t RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong() const {
  1469. size_t allocated_bytes = static_cast<size_t>(total_size_) * sizeof(void*);
  1470. if (rep_ != NULL) {
  1471. for (int i = 0; i < rep_->allocated_size; ++i) {
  1472. allocated_bytes +=
  1473. TypeHandler::SpaceUsedLong(*cast<TypeHandler>(rep_->elements[i]));
  1474. }
  1475. allocated_bytes += kRepHeaderSize;
  1476. }
  1477. return allocated_bytes;
  1478. }
  1479. template <typename TypeHandler>
  1480. inline typename TypeHandler::Type* RepeatedPtrFieldBase::AddFromCleared() {
  1481. if (rep_ != NULL && current_size_ < rep_->allocated_size) {
  1482. return cast<TypeHandler>(rep_->elements[current_size_++]);
  1483. } else {
  1484. return NULL;
  1485. }
  1486. }
  1487. // AddAllocated version that implements arena-safe copying behavior.
  1488. template <typename TypeHandler>
  1489. void RepeatedPtrFieldBase::AddAllocatedInternal(
  1490. typename TypeHandler::Type* value, std::true_type) {
  1491. Arena* element_arena =
  1492. reinterpret_cast<Arena*>(TypeHandler::GetMaybeArenaPointer(value));
  1493. Arena* arena = GetArenaNoVirtual();
  1494. if (arena == element_arena && rep_ && rep_->allocated_size < total_size_) {
  1495. // Fast path: underlying arena representation (tagged pointer) is equal to
  1496. // our arena pointer, and we can add to array without resizing it (at least
  1497. // one slot that is not allocated).
  1498. void** elems = rep_->elements;
  1499. if (current_size_ < rep_->allocated_size) {
  1500. // Make space at [current] by moving first allocated element to end of
  1501. // allocated list.
  1502. elems[rep_->allocated_size] = elems[current_size_];
  1503. }
  1504. elems[current_size_] = value;
  1505. current_size_ = current_size_ + 1;
  1506. rep_->allocated_size = rep_->allocated_size + 1;
  1507. } else {
  1508. AddAllocatedSlowWithCopy<TypeHandler>(value, TypeHandler::GetArena(value),
  1509. arena);
  1510. }
  1511. }
  1512. // Slowpath handles all cases, copying if necessary.
  1513. template <typename TypeHandler>
  1514. void RepeatedPtrFieldBase::AddAllocatedSlowWithCopy(
  1515. // Pass value_arena and my_arena to avoid duplicate virtual call (value) or
  1516. // load (mine).
  1517. typename TypeHandler::Type* value, Arena* value_arena, Arena* my_arena) {
  1518. // Ensure that either the value is in the same arena, or if not, we do the
  1519. // appropriate thing: Own() it (if it's on heap and we're in an arena) or copy
  1520. // it to our arena/heap (otherwise).
  1521. if (my_arena != NULL && value_arena == NULL) {
  1522. my_arena->Own(value);
  1523. } else if (my_arena != value_arena) {
  1524. typename TypeHandler::Type* new_value =
  1525. TypeHandler::NewFromPrototype(value, my_arena);
  1526. TypeHandler::Merge(*value, new_value);
  1527. TypeHandler::Delete(value, value_arena);
  1528. value = new_value;
  1529. }
  1530. UnsafeArenaAddAllocated<TypeHandler>(value);
  1531. }
  1532. // AddAllocated version that does not implement arena-safe copying behavior.
  1533. template <typename TypeHandler>
  1534. void RepeatedPtrFieldBase::AddAllocatedInternal(
  1535. typename TypeHandler::Type* value, std::false_type) {
  1536. if (rep_ && rep_->allocated_size < total_size_) {
  1537. // Fast path: underlying arena representation (tagged pointer) is equal to
  1538. // our arena pointer, and we can add to array without resizing it (at least
  1539. // one slot that is not allocated).
  1540. void** elems = rep_->elements;
  1541. if (current_size_ < rep_->allocated_size) {
  1542. // Make space at [current] by moving first allocated element to end of
  1543. // allocated list.
  1544. elems[rep_->allocated_size] = elems[current_size_];
  1545. }
  1546. elems[current_size_] = value;
  1547. current_size_ = current_size_ + 1;
  1548. ++rep_->allocated_size;
  1549. } else {
  1550. UnsafeArenaAddAllocated<TypeHandler>(value);
  1551. }
  1552. }
  1553. template <typename TypeHandler>
  1554. void RepeatedPtrFieldBase::UnsafeArenaAddAllocated(
  1555. typename TypeHandler::Type* value) {
  1556. // Make room for the new pointer.
  1557. if (!rep_ || current_size_ == total_size_) {
  1558. // The array is completely full with no cleared objects, so grow it.
  1559. Reserve(total_size_ + 1);
  1560. ++rep_->allocated_size;
  1561. } else if (rep_->allocated_size == total_size_) {
  1562. // There is no more space in the pointer array because it contains some
  1563. // cleared objects awaiting reuse. We don't want to grow the array in this
  1564. // case because otherwise a loop calling AddAllocated() followed by Clear()
  1565. // would leak memory.
  1566. TypeHandler::Delete(cast<TypeHandler>(rep_->elements[current_size_]),
  1567. arena_);
  1568. } else if (current_size_ < rep_->allocated_size) {
  1569. // We have some cleared objects. We don't care about their order, so we
  1570. // can just move the first one to the end to make space.
  1571. rep_->elements[rep_->allocated_size] = rep_->elements[current_size_];
  1572. ++rep_->allocated_size;
  1573. } else {
  1574. // There are no cleared objects.
  1575. ++rep_->allocated_size;
  1576. }
  1577. rep_->elements[current_size_++] = value;
  1578. }
  1579. // ReleaseLast() for types that implement merge/copy behavior.
  1580. template <typename TypeHandler>
  1581. inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseLastInternal(
  1582. std::true_type) {
  1583. // First, release an element.
  1584. typename TypeHandler::Type* result = UnsafeArenaReleaseLast<TypeHandler>();
  1585. // Now perform a copy if we're on an arena.
  1586. Arena* arena = GetArenaNoVirtual();
  1587. if (arena == NULL) {
  1588. return result;
  1589. } else {
  1590. typename TypeHandler::Type* new_result =
  1591. TypeHandler::NewFromPrototype(result, NULL);
  1592. TypeHandler::Merge(*result, new_result);
  1593. return new_result;
  1594. }
  1595. }
  1596. // ReleaseLast() for types that *do not* implement merge/copy behavior -- this
  1597. // is the same as UnsafeArenaReleaseLast(). Note that we GOOGLE_DCHECK-fail if we're on
  1598. // an arena, since the user really should implement the copy operation in this
  1599. // case.
  1600. template <typename TypeHandler>
  1601. inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseLastInternal(
  1602. std::false_type) {
  1603. GOOGLE_DCHECK(GetArenaNoVirtual() == NULL)
  1604. << "ReleaseLast() called on a RepeatedPtrField that is on an arena, "
  1605. << "with a type that does not implement MergeFrom. This is unsafe; "
  1606. << "please implement MergeFrom for your type.";
  1607. return UnsafeArenaReleaseLast<TypeHandler>();
  1608. }
  1609. template <typename TypeHandler>
  1610. inline typename TypeHandler::Type*
  1611. RepeatedPtrFieldBase::UnsafeArenaReleaseLast() {
  1612. GOOGLE_DCHECK_GT(current_size_, 0);
  1613. typename TypeHandler::Type* result =
  1614. cast<TypeHandler>(rep_->elements[--current_size_]);
  1615. --rep_->allocated_size;
  1616. if (current_size_ < rep_->allocated_size) {
  1617. // There are cleared elements on the end; replace the removed element
  1618. // with the last allocated element.
  1619. rep_->elements[current_size_] = rep_->elements[rep_->allocated_size];
  1620. }
  1621. return result;
  1622. }
  1623. inline int RepeatedPtrFieldBase::ClearedCount() const {
  1624. return rep_ ? (rep_->allocated_size - current_size_) : 0;
  1625. }
  1626. template <typename TypeHandler>
  1627. inline void RepeatedPtrFieldBase::AddCleared(
  1628. typename TypeHandler::Type* value) {
  1629. GOOGLE_DCHECK(GetArenaNoVirtual() == NULL)
  1630. << "AddCleared() can only be used on a RepeatedPtrField not on an arena.";
  1631. GOOGLE_DCHECK(TypeHandler::GetArena(value) == NULL)
  1632. << "AddCleared() can only accept values not on an arena.";
  1633. if (!rep_ || rep_->allocated_size == total_size_) {
  1634. Reserve(total_size_ + 1);
  1635. }
  1636. rep_->elements[rep_->allocated_size++] = value;
  1637. }
  1638. template <typename TypeHandler>
  1639. inline typename TypeHandler::Type* RepeatedPtrFieldBase::ReleaseCleared() {
  1640. GOOGLE_DCHECK(GetArenaNoVirtual() == NULL)
  1641. << "ReleaseCleared() can only be used on a RepeatedPtrField not on "
  1642. << "an arena.";
  1643. GOOGLE_DCHECK(GetArenaNoVirtual() == NULL);
  1644. GOOGLE_DCHECK(rep_ != NULL);
  1645. GOOGLE_DCHECK_GT(rep_->allocated_size, current_size_);
  1646. return cast<TypeHandler>(rep_->elements[--rep_->allocated_size]);
  1647. }
  1648. } // namespace internal
  1649. // -------------------------------------------------------------------
  1650. template <typename Element>
  1651. class RepeatedPtrField<Element>::TypeHandler
  1652. : public internal::GenericTypeHandler<Element> {};
  1653. template <>
  1654. class RepeatedPtrField<std::string>::TypeHandler
  1655. : public internal::StringTypeHandler {};
  1656. template <typename Element>
  1657. inline RepeatedPtrField<Element>::RepeatedPtrField() : RepeatedPtrFieldBase() {}
  1658. template <typename Element>
  1659. inline RepeatedPtrField<Element>::RepeatedPtrField(Arena* arena)
  1660. : RepeatedPtrFieldBase(arena) {}
  1661. template <typename Element>
  1662. inline RepeatedPtrField<Element>::RepeatedPtrField(
  1663. const RepeatedPtrField& other)
  1664. : RepeatedPtrFieldBase() {
  1665. MergeFrom(other);
  1666. }
  1667. template <typename Element>
  1668. template <typename Iter>
  1669. inline RepeatedPtrField<Element>::RepeatedPtrField(Iter begin,
  1670. const Iter& end) {
  1671. int reserve = internal::CalculateReserve(begin, end);
  1672. if (reserve != -1) {
  1673. Reserve(reserve);
  1674. }
  1675. for (; begin != end; ++begin) {
  1676. *Add() = *begin;
  1677. }
  1678. }
  1679. template <typename Element>
  1680. RepeatedPtrField<Element>::~RepeatedPtrField() {
  1681. Destroy<TypeHandler>();
  1682. }
  1683. template <typename Element>
  1684. inline RepeatedPtrField<Element>& RepeatedPtrField<Element>::operator=(
  1685. const RepeatedPtrField& other) {
  1686. if (this != &other) CopyFrom(other);
  1687. return *this;
  1688. }
  1689. template <typename Element>
  1690. inline RepeatedPtrField<Element>::RepeatedPtrField(
  1691. RepeatedPtrField&& other) noexcept
  1692. : RepeatedPtrField() {
  1693. // We don't just call Swap(&other) here because it would perform 3 copies if
  1694. // other is on an arena. This field can't be on an arena because arena
  1695. // construction always uses the Arena* accepting constructor.
  1696. if (other.GetArenaNoVirtual()) {
  1697. CopyFrom(other);
  1698. } else {
  1699. InternalSwap(&other);
  1700. }
  1701. }
  1702. template <typename Element>
  1703. inline RepeatedPtrField<Element>& RepeatedPtrField<Element>::operator=(
  1704. RepeatedPtrField&& other) noexcept {
  1705. // We don't just call Swap(&other) here because it would perform 3 copies if
  1706. // the two fields are on different arenas.
  1707. if (this != &other) {
  1708. if (this->GetArenaNoVirtual() != other.GetArenaNoVirtual()) {
  1709. CopyFrom(other);
  1710. } else {
  1711. InternalSwap(&other);
  1712. }
  1713. }
  1714. return *this;
  1715. }
  1716. template <typename Element>
  1717. inline bool RepeatedPtrField<Element>::empty() const {
  1718. return RepeatedPtrFieldBase::empty();
  1719. }
  1720. template <typename Element>
  1721. inline int RepeatedPtrField<Element>::size() const {
  1722. return RepeatedPtrFieldBase::size();
  1723. }
  1724. template <typename Element>
  1725. inline const Element& RepeatedPtrField<Element>::Get(int index) const {
  1726. return RepeatedPtrFieldBase::Get<TypeHandler>(index);
  1727. }
  1728. template <typename Element>
  1729. inline const Element& RepeatedPtrField<Element>::at(int index) const {
  1730. return RepeatedPtrFieldBase::at<TypeHandler>(index);
  1731. }
  1732. template <typename Element>
  1733. inline Element& RepeatedPtrField<Element>::at(int index) {
  1734. return RepeatedPtrFieldBase::at<TypeHandler>(index);
  1735. }
  1736. template <typename Element>
  1737. inline Element* RepeatedPtrField<Element>::Mutable(int index) {
  1738. return RepeatedPtrFieldBase::Mutable<TypeHandler>(index);
  1739. }
  1740. template <typename Element>
  1741. inline Element* RepeatedPtrField<Element>::Add() {
  1742. return RepeatedPtrFieldBase::Add<TypeHandler>();
  1743. }
  1744. template <typename Element>
  1745. inline void RepeatedPtrField<Element>::Add(Element&& value) {
  1746. RepeatedPtrFieldBase::Add<TypeHandler>(std::move(value));
  1747. }
  1748. template <typename Element>
  1749. inline void RepeatedPtrField<Element>::RemoveLast() {
  1750. RepeatedPtrFieldBase::RemoveLast<TypeHandler>();
  1751. }
  1752. template <typename Element>
  1753. inline void RepeatedPtrField<Element>::DeleteSubrange(int start, int num) {
  1754. GOOGLE_DCHECK_GE(start, 0);
  1755. GOOGLE_DCHECK_GE(num, 0);
  1756. GOOGLE_DCHECK_LE(start + num, size());
  1757. for (int i = 0; i < num; ++i) {
  1758. RepeatedPtrFieldBase::Delete<TypeHandler>(start + i);
  1759. }
  1760. ExtractSubrange(start, num, NULL);
  1761. }
  1762. template <typename Element>
  1763. inline void RepeatedPtrField<Element>::ExtractSubrange(int start, int num,
  1764. Element** elements) {
  1765. typename internal::TypeImplementsMergeBehavior<
  1766. typename TypeHandler::Type>::type t;
  1767. ExtractSubrangeInternal(start, num, elements, t);
  1768. }
  1769. // ExtractSubrange() implementation for types that implement merge/copy
  1770. // behavior.
  1771. template <typename Element>
  1772. inline void RepeatedPtrField<Element>::ExtractSubrangeInternal(
  1773. int start, int num, Element** elements, std::true_type) {
  1774. GOOGLE_DCHECK_GE(start, 0);
  1775. GOOGLE_DCHECK_GE(num, 0);
  1776. GOOGLE_DCHECK_LE(start + num, size());
  1777. if (num > 0) {
  1778. // Save the values of the removed elements if requested.
  1779. if (elements != NULL) {
  1780. if (GetArenaNoVirtual() != NULL) {
  1781. // If we're on an arena, we perform a copy for each element so that the
  1782. // returned elements are heap-allocated.
  1783. for (int i = 0; i < num; ++i) {
  1784. Element* element =
  1785. RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start);
  1786. typename TypeHandler::Type* new_value =
  1787. TypeHandler::NewFromPrototype(element, NULL);
  1788. TypeHandler::Merge(*element, new_value);
  1789. elements[i] = new_value;
  1790. }
  1791. } else {
  1792. for (int i = 0; i < num; ++i) {
  1793. elements[i] = RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start);
  1794. }
  1795. }
  1796. }
  1797. CloseGap(start, num);
  1798. }
  1799. }
  1800. // ExtractSubrange() implementation for types that do not implement merge/copy
  1801. // behavior.
  1802. template <typename Element>
  1803. inline void RepeatedPtrField<Element>::ExtractSubrangeInternal(
  1804. int start, int num, Element** elements, std::false_type) {
  1805. // This case is identical to UnsafeArenaExtractSubrange(). However, since
  1806. // ExtractSubrange() must return heap-allocated objects by contract, and we
  1807. // cannot fulfill this contract if we are an on arena, we must GOOGLE_DCHECK() that
  1808. // we are not on an arena.
  1809. GOOGLE_DCHECK(GetArenaNoVirtual() == NULL)
  1810. << "ExtractSubrange() when arena is non-NULL is only supported when "
  1811. << "the Element type supplies a MergeFrom() operation to make copies.";
  1812. UnsafeArenaExtractSubrange(start, num, elements);
  1813. }
  1814. template <typename Element>
  1815. inline void RepeatedPtrField<Element>::UnsafeArenaExtractSubrange(
  1816. int start, int num, Element** elements) {
  1817. GOOGLE_DCHECK_GE(start, 0);
  1818. GOOGLE_DCHECK_GE(num, 0);
  1819. GOOGLE_DCHECK_LE(start + num, size());
  1820. if (num > 0) {
  1821. // Save the values of the removed elements if requested.
  1822. if (elements != NULL) {
  1823. for (int i = 0; i < num; ++i) {
  1824. elements[i] = RepeatedPtrFieldBase::Mutable<TypeHandler>(i + start);
  1825. }
  1826. }
  1827. CloseGap(start, num);
  1828. }
  1829. }
  1830. template <typename Element>
  1831. inline void RepeatedPtrField<Element>::Clear() {
  1832. RepeatedPtrFieldBase::Clear<TypeHandler>();
  1833. }
  1834. template <typename Element>
  1835. inline void RepeatedPtrField<Element>::MergeFrom(
  1836. const RepeatedPtrField& other) {
  1837. RepeatedPtrFieldBase::MergeFrom<TypeHandler>(other);
  1838. }
  1839. template <typename Element>
  1840. inline void RepeatedPtrField<Element>::CopyFrom(const RepeatedPtrField& other) {
  1841. RepeatedPtrFieldBase::CopyFrom<TypeHandler>(other);
  1842. }
  1843. template <typename Element>
  1844. inline typename RepeatedPtrField<Element>::iterator
  1845. RepeatedPtrField<Element>::erase(const_iterator position) {
  1846. return erase(position, position + 1);
  1847. }
  1848. template <typename Element>
  1849. inline typename RepeatedPtrField<Element>::iterator
  1850. RepeatedPtrField<Element>::erase(const_iterator first, const_iterator last) {
  1851. size_type pos_offset = std::distance(cbegin(), first);
  1852. size_type last_offset = std::distance(cbegin(), last);
  1853. DeleteSubrange(pos_offset, last_offset - pos_offset);
  1854. return begin() + pos_offset;
  1855. }
  1856. template <typename Element>
  1857. inline Element** RepeatedPtrField<Element>::mutable_data() {
  1858. return RepeatedPtrFieldBase::mutable_data<TypeHandler>();
  1859. }
  1860. template <typename Element>
  1861. inline const Element* const* RepeatedPtrField<Element>::data() const {
  1862. return RepeatedPtrFieldBase::data<TypeHandler>();
  1863. }
  1864. template <typename Element>
  1865. inline void RepeatedPtrField<Element>::Swap(RepeatedPtrField* other) {
  1866. if (this == other) return;
  1867. RepeatedPtrFieldBase::Swap<TypeHandler>(other);
  1868. }
  1869. template <typename Element>
  1870. inline void RepeatedPtrField<Element>::UnsafeArenaSwap(
  1871. RepeatedPtrField* other) {
  1872. if (this == other) return;
  1873. RepeatedPtrFieldBase::InternalSwap(other);
  1874. }
  1875. template <typename Element>
  1876. inline void RepeatedPtrField<Element>::SwapElements(int index1, int index2) {
  1877. RepeatedPtrFieldBase::SwapElements(index1, index2);
  1878. }
  1879. template <typename Element>
  1880. inline Arena* RepeatedPtrField<Element>::GetArenaNoVirtual() const {
  1881. return RepeatedPtrFieldBase::GetArenaNoVirtual();
  1882. }
  1883. template <typename Element>
  1884. inline size_t RepeatedPtrField<Element>::SpaceUsedExcludingSelfLong() const {
  1885. return RepeatedPtrFieldBase::SpaceUsedExcludingSelfLong<TypeHandler>();
  1886. }
  1887. template <typename Element>
  1888. inline void RepeatedPtrField<Element>::AddAllocated(Element* value) {
  1889. RepeatedPtrFieldBase::AddAllocated<TypeHandler>(value);
  1890. }
  1891. template <typename Element>
  1892. inline void RepeatedPtrField<Element>::UnsafeArenaAddAllocated(Element* value) {
  1893. RepeatedPtrFieldBase::UnsafeArenaAddAllocated<TypeHandler>(value);
  1894. }
  1895. template <typename Element>
  1896. inline Element* RepeatedPtrField<Element>::ReleaseLast() {
  1897. return RepeatedPtrFieldBase::ReleaseLast<TypeHandler>();
  1898. }
  1899. template <typename Element>
  1900. inline Element* RepeatedPtrField<Element>::UnsafeArenaReleaseLast() {
  1901. return RepeatedPtrFieldBase::UnsafeArenaReleaseLast<TypeHandler>();
  1902. }
  1903. template <typename Element>
  1904. inline int RepeatedPtrField<Element>::ClearedCount() const {
  1905. return RepeatedPtrFieldBase::ClearedCount();
  1906. }
  1907. template <typename Element>
  1908. inline void RepeatedPtrField<Element>::AddCleared(Element* value) {
  1909. return RepeatedPtrFieldBase::AddCleared<TypeHandler>(value);
  1910. }
  1911. template <typename Element>
  1912. inline Element* RepeatedPtrField<Element>::ReleaseCleared() {
  1913. return RepeatedPtrFieldBase::ReleaseCleared<TypeHandler>();
  1914. }
  1915. template <typename Element>
  1916. inline void RepeatedPtrField<Element>::Reserve(int new_size) {
  1917. return RepeatedPtrFieldBase::Reserve(new_size);
  1918. }
  1919. template <typename Element>
  1920. inline int RepeatedPtrField<Element>::Capacity() const {
  1921. return RepeatedPtrFieldBase::Capacity();
  1922. }
  1923. // -------------------------------------------------------------------
  1924. namespace internal {
  1925. // STL-like iterator implementation for RepeatedPtrField. You should not
  1926. // refer to this class directly; use RepeatedPtrField<T>::iterator instead.
  1927. //
  1928. // The iterator for RepeatedPtrField<T>, RepeatedPtrIterator<T>, is
  1929. // very similar to iterator_ptr<T**> in util/gtl/iterator_adaptors.h,
  1930. // but adds random-access operators and is modified to wrap a void** base
  1931. // iterator (since RepeatedPtrField stores its array as a void* array and
  1932. // casting void** to T** would violate C++ aliasing rules).
  1933. //
  1934. // This code based on net/proto/proto-array-internal.h by Jeffrey Yasskin
  1935. // (jyasskin@google.com).
  1936. template <typename Element>
  1937. class RepeatedPtrIterator {
  1938. public:
  1939. using iterator = RepeatedPtrIterator<Element>;
  1940. using iterator_category = std::random_access_iterator_tag;
  1941. using value_type = typename std::remove_const<Element>::type;
  1942. using difference_type = std::ptrdiff_t;
  1943. using pointer = Element*;
  1944. using reference = Element&;
  1945. RepeatedPtrIterator() : it_(NULL) {}
  1946. explicit RepeatedPtrIterator(void* const* it) : it_(it) {}
  1947. // Allow "upcasting" from RepeatedPtrIterator<T**> to
  1948. // RepeatedPtrIterator<const T*const*>.
  1949. template <typename OtherElement>
  1950. RepeatedPtrIterator(const RepeatedPtrIterator<OtherElement>& other)
  1951. : it_(other.it_) {
  1952. // Force a compiler error if the other type is not convertible to ours.
  1953. if (false) {
  1954. implicit_cast<Element*>(static_cast<OtherElement*>(nullptr));
  1955. }
  1956. }
  1957. // dereferenceable
  1958. reference operator*() const { return *reinterpret_cast<Element*>(*it_); }
  1959. pointer operator->() const { return &(operator*()); }
  1960. // {inc,dec}rementable
  1961. iterator& operator++() {
  1962. ++it_;
  1963. return *this;
  1964. }
  1965. iterator operator++(int) { return iterator(it_++); }
  1966. iterator& operator--() {
  1967. --it_;
  1968. return *this;
  1969. }
  1970. iterator operator--(int) { return iterator(it_--); }
  1971. // equality_comparable
  1972. bool operator==(const iterator& x) const { return it_ == x.it_; }
  1973. bool operator!=(const iterator& x) const { return it_ != x.it_; }
  1974. // less_than_comparable
  1975. bool operator<(const iterator& x) const { return it_ < x.it_; }
  1976. bool operator<=(const iterator& x) const { return it_ <= x.it_; }
  1977. bool operator>(const iterator& x) const { return it_ > x.it_; }
  1978. bool operator>=(const iterator& x) const { return it_ >= x.it_; }
  1979. // addable, subtractable
  1980. iterator& operator+=(difference_type d) {
  1981. it_ += d;
  1982. return *this;
  1983. }
  1984. friend iterator operator+(iterator it, const difference_type d) {
  1985. it += d;
  1986. return it;
  1987. }
  1988. friend iterator operator+(const difference_type d, iterator it) {
  1989. it += d;
  1990. return it;
  1991. }
  1992. iterator& operator-=(difference_type d) {
  1993. it_ -= d;
  1994. return *this;
  1995. }
  1996. friend iterator operator-(iterator it, difference_type d) {
  1997. it -= d;
  1998. return it;
  1999. }
  2000. // indexable
  2001. reference operator[](difference_type d) const { return *(*this + d); }
  2002. // random access iterator
  2003. difference_type operator-(const iterator& x) const { return it_ - x.it_; }
  2004. private:
  2005. template <typename OtherElement>
  2006. friend class RepeatedPtrIterator;
  2007. // The internal iterator.
  2008. void* const* it_;
  2009. };
  2010. // Provide an iterator that operates on pointers to the underlying objects
  2011. // rather than the objects themselves as RepeatedPtrIterator does.
  2012. // Consider using this when working with stl algorithms that change
  2013. // the array.
  2014. // The VoidPtr template parameter holds the type-agnostic pointer value
  2015. // referenced by the iterator. It should either be "void *" for a mutable
  2016. // iterator, or "const void* const" for a constant iterator.
  2017. template <typename Element, typename VoidPtr>
  2018. class RepeatedPtrOverPtrsIterator {
  2019. public:
  2020. using iterator = RepeatedPtrOverPtrsIterator<Element, VoidPtr>;
  2021. using iterator_category = std::random_access_iterator_tag;
  2022. using value_type = typename std::remove_const<Element>::type;
  2023. using difference_type = std::ptrdiff_t;
  2024. using pointer = Element*;
  2025. using reference = Element&;
  2026. RepeatedPtrOverPtrsIterator() : it_(NULL) {}
  2027. explicit RepeatedPtrOverPtrsIterator(VoidPtr* it) : it_(it) {}
  2028. // dereferenceable
  2029. reference operator*() const { return *reinterpret_cast<Element*>(it_); }
  2030. pointer operator->() const { return &(operator*()); }
  2031. // {inc,dec}rementable
  2032. iterator& operator++() {
  2033. ++it_;
  2034. return *this;
  2035. }
  2036. iterator operator++(int) { return iterator(it_++); }
  2037. iterator& operator--() {
  2038. --it_;
  2039. return *this;
  2040. }
  2041. iterator operator--(int) { return iterator(it_--); }
  2042. // equality_comparable
  2043. bool operator==(const iterator& x) const { return it_ == x.it_; }
  2044. bool operator!=(const iterator& x) const { return it_ != x.it_; }
  2045. // less_than_comparable
  2046. bool operator<(const iterator& x) const { return it_ < x.it_; }
  2047. bool operator<=(const iterator& x) const { return it_ <= x.it_; }
  2048. bool operator>(const iterator& x) const { return it_ > x.it_; }
  2049. bool operator>=(const iterator& x) const { return it_ >= x.it_; }
  2050. // addable, subtractable
  2051. iterator& operator+=(difference_type d) {
  2052. it_ += d;
  2053. return *this;
  2054. }
  2055. friend iterator operator+(iterator it, difference_type d) {
  2056. it += d;
  2057. return it;
  2058. }
  2059. friend iterator operator+(difference_type d, iterator it) {
  2060. it += d;
  2061. return it;
  2062. }
  2063. iterator& operator-=(difference_type d) {
  2064. it_ -= d;
  2065. return *this;
  2066. }
  2067. friend iterator operator-(iterator it, difference_type d) {
  2068. it -= d;
  2069. return it;
  2070. }
  2071. // indexable
  2072. reference operator[](difference_type d) const { return *(*this + d); }
  2073. // random access iterator
  2074. difference_type operator-(const iterator& x) const { return it_ - x.it_; }
  2075. private:
  2076. template <typename OtherElement>
  2077. friend class RepeatedPtrIterator;
  2078. // The internal iterator.
  2079. VoidPtr* it_;
  2080. };
  2081. void RepeatedPtrFieldBase::InternalSwap(RepeatedPtrFieldBase* other) {
  2082. GOOGLE_DCHECK(this != other);
  2083. GOOGLE_DCHECK(GetArenaNoVirtual() == other->GetArenaNoVirtual());
  2084. std::swap(rep_, other->rep_);
  2085. std::swap(current_size_, other->current_size_);
  2086. std::swap(total_size_, other->total_size_);
  2087. }
  2088. } // namespace internal
  2089. template <typename Element>
  2090. inline typename RepeatedPtrField<Element>::iterator
  2091. RepeatedPtrField<Element>::begin() {
  2092. return iterator(raw_data());
  2093. }
  2094. template <typename Element>
  2095. inline typename RepeatedPtrField<Element>::const_iterator
  2096. RepeatedPtrField<Element>::begin() const {
  2097. return iterator(raw_data());
  2098. }
  2099. template <typename Element>
  2100. inline typename RepeatedPtrField<Element>::const_iterator
  2101. RepeatedPtrField<Element>::cbegin() const {
  2102. return begin();
  2103. }
  2104. template <typename Element>
  2105. inline typename RepeatedPtrField<Element>::iterator
  2106. RepeatedPtrField<Element>::end() {
  2107. return iterator(raw_data() + size());
  2108. }
  2109. template <typename Element>
  2110. inline typename RepeatedPtrField<Element>::const_iterator
  2111. RepeatedPtrField<Element>::end() const {
  2112. return iterator(raw_data() + size());
  2113. }
  2114. template <typename Element>
  2115. inline typename RepeatedPtrField<Element>::const_iterator
  2116. RepeatedPtrField<Element>::cend() const {
  2117. return end();
  2118. }
  2119. template <typename Element>
  2120. inline typename RepeatedPtrField<Element>::pointer_iterator
  2121. RepeatedPtrField<Element>::pointer_begin() {
  2122. return pointer_iterator(raw_mutable_data());
  2123. }
  2124. template <typename Element>
  2125. inline typename RepeatedPtrField<Element>::const_pointer_iterator
  2126. RepeatedPtrField<Element>::pointer_begin() const {
  2127. return const_pointer_iterator(const_cast<const void* const*>(raw_data()));
  2128. }
  2129. template <typename Element>
  2130. inline typename RepeatedPtrField<Element>::pointer_iterator
  2131. RepeatedPtrField<Element>::pointer_end() {
  2132. return pointer_iterator(raw_mutable_data() + size());
  2133. }
  2134. template <typename Element>
  2135. inline typename RepeatedPtrField<Element>::const_pointer_iterator
  2136. RepeatedPtrField<Element>::pointer_end() const {
  2137. return const_pointer_iterator(
  2138. const_cast<const void* const*>(raw_data() + size()));
  2139. }
  2140. // Iterators and helper functions that follow the spirit of the STL
  2141. // std::back_insert_iterator and std::back_inserter but are tailor-made
  2142. // for RepeatedField and RepeatedPtrField. Typical usage would be:
  2143. //
  2144. // std::copy(some_sequence.begin(), some_sequence.end(),
  2145. // RepeatedFieldBackInserter(proto.mutable_sequence()));
  2146. //
  2147. // Ported by johannes from util/gtl/proto-array-iterators.h
  2148. namespace internal {
  2149. // A back inserter for RepeatedField objects.
  2150. template <typename T>
  2151. class RepeatedFieldBackInsertIterator
  2152. : public std::iterator<std::output_iterator_tag, T> {
  2153. public:
  2154. explicit RepeatedFieldBackInsertIterator(
  2155. RepeatedField<T>* const mutable_field)
  2156. : field_(mutable_field) {}
  2157. RepeatedFieldBackInsertIterator<T>& operator=(const T& value) {
  2158. field_->Add(value);
  2159. return *this;
  2160. }
  2161. RepeatedFieldBackInsertIterator<T>& operator*() { return *this; }
  2162. RepeatedFieldBackInsertIterator<T>& operator++() { return *this; }
  2163. RepeatedFieldBackInsertIterator<T>& operator++(int /* unused */) {
  2164. return *this;
  2165. }
  2166. private:
  2167. RepeatedField<T>* field_;
  2168. };
  2169. // A back inserter for RepeatedPtrField objects.
  2170. template <typename T>
  2171. class RepeatedPtrFieldBackInsertIterator
  2172. : public std::iterator<std::output_iterator_tag, T> {
  2173. public:
  2174. RepeatedPtrFieldBackInsertIterator(RepeatedPtrField<T>* const mutable_field)
  2175. : field_(mutable_field) {}
  2176. RepeatedPtrFieldBackInsertIterator<T>& operator=(const T& value) {
  2177. *field_->Add() = value;
  2178. return *this;
  2179. }
  2180. RepeatedPtrFieldBackInsertIterator<T>& operator=(
  2181. const T* const ptr_to_value) {
  2182. *field_->Add() = *ptr_to_value;
  2183. return *this;
  2184. }
  2185. RepeatedPtrFieldBackInsertIterator<T>& operator=(T&& value) {
  2186. *field_->Add() = std::move(value);
  2187. return *this;
  2188. }
  2189. RepeatedPtrFieldBackInsertIterator<T>& operator*() { return *this; }
  2190. RepeatedPtrFieldBackInsertIterator<T>& operator++() { return *this; }
  2191. RepeatedPtrFieldBackInsertIterator<T>& operator++(int /* unused */) {
  2192. return *this;
  2193. }
  2194. private:
  2195. RepeatedPtrField<T>* field_;
  2196. };
  2197. // A back inserter for RepeatedPtrFields that inserts by transferring ownership
  2198. // of a pointer.
  2199. template <typename T>
  2200. class AllocatedRepeatedPtrFieldBackInsertIterator
  2201. : public std::iterator<std::output_iterator_tag, T> {
  2202. public:
  2203. explicit AllocatedRepeatedPtrFieldBackInsertIterator(
  2204. RepeatedPtrField<T>* const mutable_field)
  2205. : field_(mutable_field) {}
  2206. AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator=(
  2207. T* const ptr_to_value) {
  2208. field_->AddAllocated(ptr_to_value);
  2209. return *this;
  2210. }
  2211. AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator*() { return *this; }
  2212. AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++() { return *this; }
  2213. AllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++(int /* unused */) {
  2214. return *this;
  2215. }
  2216. private:
  2217. RepeatedPtrField<T>* field_;
  2218. };
  2219. // Almost identical to AllocatedRepeatedPtrFieldBackInsertIterator. This one
  2220. // uses the UnsafeArenaAddAllocated instead.
  2221. template <typename T>
  2222. class UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator
  2223. : public std::iterator<std::output_iterator_tag, T> {
  2224. public:
  2225. explicit UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator(
  2226. RepeatedPtrField<T>* const mutable_field)
  2227. : field_(mutable_field) {}
  2228. UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator=(
  2229. T const* const ptr_to_value) {
  2230. field_->UnsafeArenaAddAllocated(const_cast<T*>(ptr_to_value));
  2231. return *this;
  2232. }
  2233. UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator*() {
  2234. return *this;
  2235. }
  2236. UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++() {
  2237. return *this;
  2238. }
  2239. UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++(
  2240. int /* unused */) {
  2241. return *this;
  2242. }
  2243. private:
  2244. RepeatedPtrField<T>* field_;
  2245. };
  2246. } // namespace internal
  2247. // Provides a back insert iterator for RepeatedField instances,
  2248. // similar to std::back_inserter().
  2249. template <typename T>
  2250. internal::RepeatedFieldBackInsertIterator<T> RepeatedFieldBackInserter(
  2251. RepeatedField<T>* const mutable_field) {
  2252. return internal::RepeatedFieldBackInsertIterator<T>(mutable_field);
  2253. }
  2254. // Provides a back insert iterator for RepeatedPtrField instances,
  2255. // similar to std::back_inserter().
  2256. template <typename T>
  2257. internal::RepeatedPtrFieldBackInsertIterator<T> RepeatedPtrFieldBackInserter(
  2258. RepeatedPtrField<T>* const mutable_field) {
  2259. return internal::RepeatedPtrFieldBackInsertIterator<T>(mutable_field);
  2260. }
  2261. // Special back insert iterator for RepeatedPtrField instances, just in
  2262. // case someone wants to write generic template code that can access both
  2263. // RepeatedFields and RepeatedPtrFields using a common name.
  2264. template <typename T>
  2265. internal::RepeatedPtrFieldBackInsertIterator<T> RepeatedFieldBackInserter(
  2266. RepeatedPtrField<T>* const mutable_field) {
  2267. return internal::RepeatedPtrFieldBackInsertIterator<T>(mutable_field);
  2268. }
  2269. // Provides a back insert iterator for RepeatedPtrField instances
  2270. // similar to std::back_inserter() which transfers the ownership while
  2271. // copying elements.
  2272. template <typename T>
  2273. internal::AllocatedRepeatedPtrFieldBackInsertIterator<T>
  2274. AllocatedRepeatedPtrFieldBackInserter(
  2275. RepeatedPtrField<T>* const mutable_field) {
  2276. return internal::AllocatedRepeatedPtrFieldBackInsertIterator<T>(
  2277. mutable_field);
  2278. }
  2279. // Similar to AllocatedRepeatedPtrFieldBackInserter, using
  2280. // UnsafeArenaAddAllocated instead of AddAllocated.
  2281. // This is slightly faster if that matters. It is also useful in legacy code
  2282. // that uses temporary ownership to avoid copies. Example:
  2283. // RepeatedPtrField<T> temp_field;
  2284. // temp_field.AddAllocated(new T);
  2285. // ... // Do something with temp_field
  2286. // temp_field.ExtractSubrange(0, temp_field.size(), nullptr);
  2287. // If you put temp_field on the arena this fails, because the ownership
  2288. // transfers to the arena at the "AddAllocated" call and is not released anymore
  2289. // causing a double delete. Using UnsafeArenaAddAllocated prevents this.
  2290. template <typename T>
  2291. internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>
  2292. UnsafeArenaAllocatedRepeatedPtrFieldBackInserter(
  2293. RepeatedPtrField<T>* const mutable_field) {
  2294. return internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>(
  2295. mutable_field);
  2296. }
  2297. // Extern declarations of common instantiations to reduce libray bloat.
  2298. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<bool>;
  2299. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<int32>;
  2300. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<uint32>;
  2301. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<int64>;
  2302. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<uint64>;
  2303. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<float>;
  2304. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE RepeatedField<double>;
  2305. extern template class PROTOBUF_EXPORT_TEMPLATE_DECLARE
  2306. RepeatedPtrField<std::string>;
  2307. } // namespace protobuf
  2308. } // namespace google
  2309. #include <google/protobuf/port_undef.inc>
  2310. #endif // GOOGLE_PROTOBUF_REPEATED_FIELD_H__