xref: /freebsd-src/contrib/libevent/evbuffer-internal.h (revision c43e99fd14c915adcb7173dd49c31e803ceadfe0)
1*c43e99fdSEd Maste /*
2*c43e99fdSEd Maste  * Copyright (c) 2000-2007 Niels Provos <provos@citi.umich.edu>
3*c43e99fdSEd Maste  * Copyright (c) 2007-2012 Niels Provos and Nick Mathewson
4*c43e99fdSEd Maste  *
5*c43e99fdSEd Maste  * Redistribution and use in source and binary forms, with or without
6*c43e99fdSEd Maste  * modification, are permitted provided that the following conditions
7*c43e99fdSEd Maste  * are met:
8*c43e99fdSEd Maste  * 1. Redistributions of source code must retain the above copyright
9*c43e99fdSEd Maste  *    notice, this list of conditions and the following disclaimer.
10*c43e99fdSEd Maste  * 2. Redistributions in binary form must reproduce the above copyright
11*c43e99fdSEd Maste  *    notice, this list of conditions and the following disclaimer in the
12*c43e99fdSEd Maste  *    documentation and/or other materials provided with the distribution.
13*c43e99fdSEd Maste  * 3. The name of the author may not be used to endorse or promote products
14*c43e99fdSEd Maste  *    derived from this software without specific prior written permission.
15*c43e99fdSEd Maste  *
16*c43e99fdSEd Maste  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17*c43e99fdSEd Maste  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18*c43e99fdSEd Maste  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19*c43e99fdSEd Maste  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20*c43e99fdSEd Maste  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21*c43e99fdSEd Maste  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22*c43e99fdSEd Maste  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23*c43e99fdSEd Maste  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24*c43e99fdSEd Maste  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25*c43e99fdSEd Maste  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26*c43e99fdSEd Maste  */
27*c43e99fdSEd Maste #ifndef EVBUFFER_INTERNAL_H_INCLUDED_
28*c43e99fdSEd Maste #define EVBUFFER_INTERNAL_H_INCLUDED_
29*c43e99fdSEd Maste 
30*c43e99fdSEd Maste #ifdef __cplusplus
31*c43e99fdSEd Maste extern "C" {
32*c43e99fdSEd Maste #endif
33*c43e99fdSEd Maste 
34*c43e99fdSEd Maste #include "event2/event-config.h"
35*c43e99fdSEd Maste #include "evconfig-private.h"
36*c43e99fdSEd Maste #include "event2/util.h"
37*c43e99fdSEd Maste #include "event2/event_struct.h"
38*c43e99fdSEd Maste #include "util-internal.h"
39*c43e99fdSEd Maste #include "defer-internal.h"
40*c43e99fdSEd Maste 
41*c43e99fdSEd Maste /* Experimental cb flag: "never deferred."  Implementation note:
42*c43e99fdSEd Maste  * these callbacks may get an inaccurate view of n_del/n_added in their
43*c43e99fdSEd Maste  * arguments. */
44*c43e99fdSEd Maste #define EVBUFFER_CB_NODEFER 2
45*c43e99fdSEd Maste 
46*c43e99fdSEd Maste #ifdef _WIN32
47*c43e99fdSEd Maste #include <winsock2.h>
48*c43e99fdSEd Maste #endif
49*c43e99fdSEd Maste #include <sys/queue.h>
50*c43e99fdSEd Maste 
51*c43e99fdSEd Maste /* Minimum allocation for a chain.  We define this so that we're burning no
52*c43e99fdSEd Maste  * more than 5% of each allocation on overhead.  It would be nice to lose even
53*c43e99fdSEd Maste  * less space, though. */
54*c43e99fdSEd Maste #if EVENT__SIZEOF_VOID_P < 8
55*c43e99fdSEd Maste #define MIN_BUFFER_SIZE	512
56*c43e99fdSEd Maste #else
57*c43e99fdSEd Maste #define MIN_BUFFER_SIZE	1024
58*c43e99fdSEd Maste #endif
59*c43e99fdSEd Maste 
60*c43e99fdSEd Maste /** A single evbuffer callback for an evbuffer. This function will be invoked
61*c43e99fdSEd Maste  * when bytes are added to or removed from the evbuffer. */
62*c43e99fdSEd Maste struct evbuffer_cb_entry {
63*c43e99fdSEd Maste 	/** Structures to implement a doubly-linked queue of callbacks */
64*c43e99fdSEd Maste 	LIST_ENTRY(evbuffer_cb_entry) next;
65*c43e99fdSEd Maste 	/** The callback function to invoke when this callback is called.
66*c43e99fdSEd Maste 	    If EVBUFFER_CB_OBSOLETE is set in flags, the cb_obsolete field is
67*c43e99fdSEd Maste 	    valid; otherwise, cb_func is valid. */
68*c43e99fdSEd Maste 	union {
69*c43e99fdSEd Maste 		evbuffer_cb_func cb_func;
70*c43e99fdSEd Maste 		evbuffer_cb cb_obsolete;
71*c43e99fdSEd Maste 	} cb;
72*c43e99fdSEd Maste 	/** Argument to pass to cb. */
73*c43e99fdSEd Maste 	void *cbarg;
74*c43e99fdSEd Maste 	/** Currently set flags on this callback. */
75*c43e99fdSEd Maste 	ev_uint32_t flags;
76*c43e99fdSEd Maste };
77*c43e99fdSEd Maste 
78*c43e99fdSEd Maste struct bufferevent;
79*c43e99fdSEd Maste struct evbuffer_chain;
80*c43e99fdSEd Maste struct evbuffer {
81*c43e99fdSEd Maste 	/** The first chain in this buffer's linked list of chains. */
82*c43e99fdSEd Maste 	struct evbuffer_chain *first;
83*c43e99fdSEd Maste 	/** The last chain in this buffer's linked list of chains. */
84*c43e99fdSEd Maste 	struct evbuffer_chain *last;
85*c43e99fdSEd Maste 
86*c43e99fdSEd Maste 	/** Pointer to the next pointer pointing at the 'last_with_data' chain.
87*c43e99fdSEd Maste 	 *
88*c43e99fdSEd Maste 	 * To unpack:
89*c43e99fdSEd Maste 	 *
90*c43e99fdSEd Maste 	 * The last_with_data chain is the last chain that has any data in it.
91*c43e99fdSEd Maste 	 * If all chains in the buffer are empty, it is the first chain.
92*c43e99fdSEd Maste 	 * If the buffer has no chains, it is NULL.
93*c43e99fdSEd Maste 	 *
94*c43e99fdSEd Maste 	 * The last_with_datap pointer points at _whatever 'next' pointer_
95*c43e99fdSEd Maste 	 * points at the last_with_datap chain.  If the last_with_data chain
96*c43e99fdSEd Maste 	 * is the first chain, or it is NULL, then the last_with_datap pointer
97*c43e99fdSEd Maste 	 * is &buf->first.
98*c43e99fdSEd Maste 	 */
99*c43e99fdSEd Maste 	struct evbuffer_chain **last_with_datap;
100*c43e99fdSEd Maste 
101*c43e99fdSEd Maste 	/** Total amount of bytes stored in all chains.*/
102*c43e99fdSEd Maste 	size_t total_len;
103*c43e99fdSEd Maste 
104*c43e99fdSEd Maste 	/** Number of bytes we have added to the buffer since we last tried to
105*c43e99fdSEd Maste 	 * invoke callbacks. */
106*c43e99fdSEd Maste 	size_t n_add_for_cb;
107*c43e99fdSEd Maste 	/** Number of bytes we have removed from the buffer since we last
108*c43e99fdSEd Maste 	 * tried to invoke callbacks. */
109*c43e99fdSEd Maste 	size_t n_del_for_cb;
110*c43e99fdSEd Maste 
111*c43e99fdSEd Maste #ifndef EVENT__DISABLE_THREAD_SUPPORT
112*c43e99fdSEd Maste 	/** A lock used to mediate access to this buffer. */
113*c43e99fdSEd Maste 	void *lock;
114*c43e99fdSEd Maste #endif
115*c43e99fdSEd Maste 	/** True iff we should free the lock field when we free this
116*c43e99fdSEd Maste 	 * evbuffer. */
117*c43e99fdSEd Maste 	unsigned own_lock : 1;
118*c43e99fdSEd Maste 	/** True iff we should not allow changes to the front of the buffer
119*c43e99fdSEd Maste 	 * (drains or prepends). */
120*c43e99fdSEd Maste 	unsigned freeze_start : 1;
121*c43e99fdSEd Maste 	/** True iff we should not allow changes to the end of the buffer
122*c43e99fdSEd Maste 	 * (appends) */
123*c43e99fdSEd Maste 	unsigned freeze_end : 1;
124*c43e99fdSEd Maste 	/** True iff this evbuffer's callbacks are not invoked immediately
125*c43e99fdSEd Maste 	 * upon a change in the buffer, but instead are deferred to be invoked
126*c43e99fdSEd Maste 	 * from the event_base's loop.	Useful for preventing enormous stack
127*c43e99fdSEd Maste 	 * overflows when we have mutually recursive callbacks, and for
128*c43e99fdSEd Maste 	 * serializing callbacks in a single thread. */
129*c43e99fdSEd Maste 	unsigned deferred_cbs : 1;
130*c43e99fdSEd Maste #ifdef _WIN32
131*c43e99fdSEd Maste 	/** True iff this buffer is set up for overlapped IO. */
132*c43e99fdSEd Maste 	unsigned is_overlapped : 1;
133*c43e99fdSEd Maste #endif
134*c43e99fdSEd Maste 	/** Zero or more EVBUFFER_FLAG_* bits */
135*c43e99fdSEd Maste 	ev_uint32_t flags;
136*c43e99fdSEd Maste 
137*c43e99fdSEd Maste 	/** Used to implement deferred callbacks. */
138*c43e99fdSEd Maste 	struct event_base *cb_queue;
139*c43e99fdSEd Maste 
140*c43e99fdSEd Maste 	/** A reference count on this evbuffer.	 When the reference count
141*c43e99fdSEd Maste 	 * reaches 0, the buffer is destroyed.	Manipulated with
142*c43e99fdSEd Maste 	 * evbuffer_incref and evbuffer_decref_and_unlock and
143*c43e99fdSEd Maste 	 * evbuffer_free. */
144*c43e99fdSEd Maste 	int refcnt;
145*c43e99fdSEd Maste 
146*c43e99fdSEd Maste 	/** A struct event_callback handle to make all of this buffer's callbacks
147*c43e99fdSEd Maste 	 * invoked from the event loop. */
148*c43e99fdSEd Maste 	struct event_callback deferred;
149*c43e99fdSEd Maste 
150*c43e99fdSEd Maste 	/** A doubly-linked-list of callback functions */
151*c43e99fdSEd Maste 	LIST_HEAD(evbuffer_cb_queue, evbuffer_cb_entry) callbacks;
152*c43e99fdSEd Maste 
153*c43e99fdSEd Maste 	/** The parent bufferevent object this evbuffer belongs to.
154*c43e99fdSEd Maste 	 * NULL if the evbuffer stands alone. */
155*c43e99fdSEd Maste 	struct bufferevent *parent;
156*c43e99fdSEd Maste };
157*c43e99fdSEd Maste 
158*c43e99fdSEd Maste #if EVENT__SIZEOF_OFF_T < EVENT__SIZEOF_SIZE_T
159*c43e99fdSEd Maste typedef ev_ssize_t ev_misalign_t;
160*c43e99fdSEd Maste #define EVBUFFER_CHAIN_MAX ((size_t)EV_SSIZE_MAX)
161*c43e99fdSEd Maste #else
162*c43e99fdSEd Maste typedef ev_off_t ev_misalign_t;
163*c43e99fdSEd Maste #if EVENT__SIZEOF_OFF_T > EVENT__SIZEOF_SIZE_T
164*c43e99fdSEd Maste #define EVBUFFER_CHAIN_MAX EV_SIZE_MAX
165*c43e99fdSEd Maste #else
166*c43e99fdSEd Maste #define EVBUFFER_CHAIN_MAX ((size_t)EV_SSIZE_MAX)
167*c43e99fdSEd Maste #endif
168*c43e99fdSEd Maste #endif
169*c43e99fdSEd Maste 
170*c43e99fdSEd Maste /** A single item in an evbuffer. */
171*c43e99fdSEd Maste struct evbuffer_chain {
172*c43e99fdSEd Maste 	/** points to next buffer in the chain */
173*c43e99fdSEd Maste 	struct evbuffer_chain *next;
174*c43e99fdSEd Maste 
175*c43e99fdSEd Maste 	/** total allocation available in the buffer field. */
176*c43e99fdSEd Maste 	size_t buffer_len;
177*c43e99fdSEd Maste 
178*c43e99fdSEd Maste 	/** unused space at the beginning of buffer or an offset into a
179*c43e99fdSEd Maste 	 * file for sendfile buffers. */
180*c43e99fdSEd Maste 	ev_misalign_t misalign;
181*c43e99fdSEd Maste 
182*c43e99fdSEd Maste 	/** Offset into buffer + misalign at which to start writing.
183*c43e99fdSEd Maste 	 * In other words, the total number of bytes actually stored
184*c43e99fdSEd Maste 	 * in buffer. */
185*c43e99fdSEd Maste 	size_t off;
186*c43e99fdSEd Maste 
187*c43e99fdSEd Maste 	/** Set if special handling is required for this chain */
188*c43e99fdSEd Maste 	unsigned flags;
189*c43e99fdSEd Maste #define EVBUFFER_FILESEGMENT	0x0001  /**< A chain used for a file segment */
190*c43e99fdSEd Maste #define EVBUFFER_SENDFILE	0x0002	/**< a chain used with sendfile */
191*c43e99fdSEd Maste #define EVBUFFER_REFERENCE	0x0004	/**< a chain with a mem reference */
192*c43e99fdSEd Maste #define EVBUFFER_IMMUTABLE	0x0008	/**< read-only chain */
193*c43e99fdSEd Maste 	/** a chain that mustn't be reallocated or freed, or have its contents
194*c43e99fdSEd Maste 	 * memmoved, until the chain is un-pinned. */
195*c43e99fdSEd Maste #define EVBUFFER_MEM_PINNED_R	0x0010
196*c43e99fdSEd Maste #define EVBUFFER_MEM_PINNED_W	0x0020
197*c43e99fdSEd Maste #define EVBUFFER_MEM_PINNED_ANY (EVBUFFER_MEM_PINNED_R|EVBUFFER_MEM_PINNED_W)
198*c43e99fdSEd Maste 	/** a chain that should be freed, but can't be freed until it is
199*c43e99fdSEd Maste 	 * un-pinned. */
200*c43e99fdSEd Maste #define EVBUFFER_DANGLING	0x0040
201*c43e99fdSEd Maste 	/** a chain that is a referenced copy of another chain */
202*c43e99fdSEd Maste #define EVBUFFER_MULTICAST	0x0080
203*c43e99fdSEd Maste 
204*c43e99fdSEd Maste 	/** number of references to this chain */
205*c43e99fdSEd Maste 	int refcnt;
206*c43e99fdSEd Maste 
207*c43e99fdSEd Maste 	/** Usually points to the read-write memory belonging to this
208*c43e99fdSEd Maste 	 * buffer allocated as part of the evbuffer_chain allocation.
209*c43e99fdSEd Maste 	 * For mmap, this can be a read-only buffer and
210*c43e99fdSEd Maste 	 * EVBUFFER_IMMUTABLE will be set in flags.  For sendfile, it
211*c43e99fdSEd Maste 	 * may point to NULL.
212*c43e99fdSEd Maste 	 */
213*c43e99fdSEd Maste 	unsigned char *buffer;
214*c43e99fdSEd Maste };
215*c43e99fdSEd Maste 
216*c43e99fdSEd Maste /** callback for a reference chain; lets us know what to do with it when
217*c43e99fdSEd Maste  * we're done with it. Lives at the end of an evbuffer_chain with the
218*c43e99fdSEd Maste  * EVBUFFER_REFERENCE flag set */
219*c43e99fdSEd Maste struct evbuffer_chain_reference {
220*c43e99fdSEd Maste 	evbuffer_ref_cleanup_cb cleanupfn;
221*c43e99fdSEd Maste 	void *extra;
222*c43e99fdSEd Maste };
223*c43e99fdSEd Maste 
224*c43e99fdSEd Maste /** File segment for a file-segment chain.  Lives at the end of an
225*c43e99fdSEd Maste  * evbuffer_chain with the EVBUFFER_FILESEGMENT flag set.  */
226*c43e99fdSEd Maste struct evbuffer_chain_file_segment {
227*c43e99fdSEd Maste 	struct evbuffer_file_segment *segment;
228*c43e99fdSEd Maste #ifdef _WIN32
229*c43e99fdSEd Maste 	/** If we're using CreateFileMapping, this is the handle to the view. */
230*c43e99fdSEd Maste 	HANDLE view_handle;
231*c43e99fdSEd Maste #endif
232*c43e99fdSEd Maste };
233*c43e99fdSEd Maste 
234*c43e99fdSEd Maste /* Declared in event2/buffer.h; defined here. */
235*c43e99fdSEd Maste struct evbuffer_file_segment {
236*c43e99fdSEd Maste 	void *lock; /**< lock prevent concurrent access to refcnt */
237*c43e99fdSEd Maste 	int refcnt; /**< Reference count for this file segment */
238*c43e99fdSEd Maste 	unsigned flags; /**< combination of EVBUF_FS_* flags  */
239*c43e99fdSEd Maste 
240*c43e99fdSEd Maste 	/** What kind of file segment is this? */
241*c43e99fdSEd Maste 	unsigned can_sendfile : 1;
242*c43e99fdSEd Maste 	unsigned is_mapping : 1;
243*c43e99fdSEd Maste 
244*c43e99fdSEd Maste 	/** The fd that we read the data from. */
245*c43e99fdSEd Maste 	int fd;
246*c43e99fdSEd Maste 	/** If we're using mmap, this is the raw mapped memory. */
247*c43e99fdSEd Maste 	void *mapping;
248*c43e99fdSEd Maste #ifdef _WIN32
249*c43e99fdSEd Maste 	/** If we're using CreateFileMapping, this is the mapping */
250*c43e99fdSEd Maste 	HANDLE mapping_handle;
251*c43e99fdSEd Maste #endif
252*c43e99fdSEd Maste 	/** If we're using mmap or IO, this is the content of the file
253*c43e99fdSEd Maste 	 * segment. */
254*c43e99fdSEd Maste 	char *contents;
255*c43e99fdSEd Maste 	/** Position of this segment within the file. */
256*c43e99fdSEd Maste 	ev_off_t file_offset;
257*c43e99fdSEd Maste 	/** If we're using mmap, this is the offset within 'mapping' where
258*c43e99fdSEd Maste 	 * this data segment begins. */
259*c43e99fdSEd Maste 	ev_off_t mmap_offset;
260*c43e99fdSEd Maste 	/** The length of this segment. */
261*c43e99fdSEd Maste 	ev_off_t length;
262*c43e99fdSEd Maste 	/** Cleanup callback function */
263*c43e99fdSEd Maste 	evbuffer_file_segment_cleanup_cb cleanup_cb;
264*c43e99fdSEd Maste 	/** Argument to be pass to cleanup callback function */
265*c43e99fdSEd Maste 	void *cleanup_cb_arg;
266*c43e99fdSEd Maste };
267*c43e99fdSEd Maste 
268*c43e99fdSEd Maste /** Information about the multicast parent of a chain.  Lives at the
269*c43e99fdSEd Maste  * end of an evbuffer_chain with the EVBUFFER_MULTICAST flag set.  */
270*c43e99fdSEd Maste struct evbuffer_multicast_parent {
271*c43e99fdSEd Maste 	/** source buffer the multicast parent belongs to */
272*c43e99fdSEd Maste 	struct evbuffer *source;
273*c43e99fdSEd Maste 	/** multicast parent for this chain */
274*c43e99fdSEd Maste 	struct evbuffer_chain *parent;
275*c43e99fdSEd Maste };
276*c43e99fdSEd Maste 
277*c43e99fdSEd Maste #define EVBUFFER_CHAIN_SIZE sizeof(struct evbuffer_chain)
278*c43e99fdSEd Maste /** Return a pointer to extra data allocated along with an evbuffer. */
279*c43e99fdSEd Maste #define EVBUFFER_CHAIN_EXTRA(t, c) (t *)((struct evbuffer_chain *)(c) + 1)
280*c43e99fdSEd Maste 
281*c43e99fdSEd Maste /** Assert that we are holding the lock on an evbuffer */
282*c43e99fdSEd Maste #define ASSERT_EVBUFFER_LOCKED(buffer)			\
283*c43e99fdSEd Maste 	EVLOCK_ASSERT_LOCKED((buffer)->lock)
284*c43e99fdSEd Maste 
285*c43e99fdSEd Maste #define EVBUFFER_LOCK(buffer)						\
286*c43e99fdSEd Maste 	do {								\
287*c43e99fdSEd Maste 		EVLOCK_LOCK((buffer)->lock, 0);				\
288*c43e99fdSEd Maste 	} while (0)
289*c43e99fdSEd Maste #define EVBUFFER_UNLOCK(buffer)						\
290*c43e99fdSEd Maste 	do {								\
291*c43e99fdSEd Maste 		EVLOCK_UNLOCK((buffer)->lock, 0);			\
292*c43e99fdSEd Maste 	} while (0)
293*c43e99fdSEd Maste #define EVBUFFER_LOCK2(buffer1, buffer2)				\
294*c43e99fdSEd Maste 	do {								\
295*c43e99fdSEd Maste 		EVLOCK_LOCK2((buffer1)->lock, (buffer2)->lock, 0, 0);	\
296*c43e99fdSEd Maste 	} while (0)
297*c43e99fdSEd Maste #define EVBUFFER_UNLOCK2(buffer1, buffer2)				\
298*c43e99fdSEd Maste 	do {								\
299*c43e99fdSEd Maste 		EVLOCK_UNLOCK2((buffer1)->lock, (buffer2)->lock, 0, 0);	\
300*c43e99fdSEd Maste 	} while (0)
301*c43e99fdSEd Maste 
302*c43e99fdSEd Maste /** Increase the reference count of buf by one. */
303*c43e99fdSEd Maste void evbuffer_incref_(struct evbuffer *buf);
304*c43e99fdSEd Maste /** Increase the reference count of buf by one and acquire the lock. */
305*c43e99fdSEd Maste void evbuffer_incref_and_lock_(struct evbuffer *buf);
306*c43e99fdSEd Maste /** Pin a single buffer chain using a given flag. A pinned chunk may not be
307*c43e99fdSEd Maste  * moved or freed until it is unpinned. */
308*c43e99fdSEd Maste void evbuffer_chain_pin_(struct evbuffer_chain *chain, unsigned flag);
309*c43e99fdSEd Maste /** Unpin a single buffer chain using a given flag. */
310*c43e99fdSEd Maste void evbuffer_chain_unpin_(struct evbuffer_chain *chain, unsigned flag);
311*c43e99fdSEd Maste /** As evbuffer_free, but requires that we hold a lock on the buffer, and
312*c43e99fdSEd Maste  * releases the lock before freeing it and the buffer. */
313*c43e99fdSEd Maste void evbuffer_decref_and_unlock_(struct evbuffer *buffer);
314*c43e99fdSEd Maste 
315*c43e99fdSEd Maste /** As evbuffer_expand, but does not guarantee that the newly allocated memory
316*c43e99fdSEd Maste  * is contiguous.  Instead, it may be split across two or more chunks. */
317*c43e99fdSEd Maste int evbuffer_expand_fast_(struct evbuffer *, size_t, int);
318*c43e99fdSEd Maste 
319*c43e99fdSEd Maste /** Helper: prepares for a readv/WSARecv call by expanding the buffer to
320*c43e99fdSEd Maste  * hold enough memory to read 'howmuch' bytes in possibly noncontiguous memory.
321*c43e99fdSEd Maste  * Sets up the one or two iovecs in 'vecs' to point to the free memory and its
322*c43e99fdSEd Maste  * extent, and *chainp to point to the first chain that we'll try to read into.
323*c43e99fdSEd Maste  * Returns the number of vecs used.
324*c43e99fdSEd Maste  */
325*c43e99fdSEd Maste int evbuffer_read_setup_vecs_(struct evbuffer *buf, ev_ssize_t howmuch,
326*c43e99fdSEd Maste     struct evbuffer_iovec *vecs, int n_vecs, struct evbuffer_chain ***chainp,
327*c43e99fdSEd Maste     int exact);
328*c43e99fdSEd Maste 
329*c43e99fdSEd Maste /* Helper macro: copies an evbuffer_iovec in ei to a win32 WSABUF in i. */
330*c43e99fdSEd Maste #define WSABUF_FROM_EVBUFFER_IOV(i,ei) do {		\
331*c43e99fdSEd Maste 		(i)->buf = (ei)->iov_base;		\
332*c43e99fdSEd Maste 		(i)->len = (unsigned long)(ei)->iov_len;	\
333*c43e99fdSEd Maste 	} while (0)
334*c43e99fdSEd Maste /* XXXX the cast above is safe for now, but not if we allow mmaps on win64.
335*c43e99fdSEd Maste  * See note in buffer_iocp's launch_write function */
336*c43e99fdSEd Maste 
337*c43e99fdSEd Maste /** Set the parent bufferevent object for buf to bev */
338*c43e99fdSEd Maste void evbuffer_set_parent_(struct evbuffer *buf, struct bufferevent *bev);
339*c43e99fdSEd Maste 
340*c43e99fdSEd Maste void evbuffer_invoke_callbacks_(struct evbuffer *buf);
341*c43e99fdSEd Maste 
342*c43e99fdSEd Maste 
343*c43e99fdSEd Maste int evbuffer_get_callbacks_(struct evbuffer *buffer,
344*c43e99fdSEd Maste     struct event_callback **cbs,
345*c43e99fdSEd Maste     int max_cbs);
346*c43e99fdSEd Maste 
347*c43e99fdSEd Maste #ifdef __cplusplus
348*c43e99fdSEd Maste }
349*c43e99fdSEd Maste #endif
350*c43e99fdSEd Maste 
351*c43e99fdSEd Maste #endif /* EVBUFFER_INTERNAL_H_INCLUDED_ */
352