1 /* -*- Mode: C; c-basic-offset: 4 -*-
2 * vim: tabstop=4 shiftwidth=4 expandtab
4 * Copyright (C) 2011 John (J5) Palmieri <johnp@redhat.com>
5 * Copyright (C) 2014 Simon Feltman <sfeltman@gnome.org>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, see <http://www.gnu.org/licenses/>.
23 #include <pyglib-python-compat.h>
25 #include "pygi-array.h"
26 #include "pygi-info.h"
27 #include "pygi-marshal-cleanup.h"
28 #include "pygi-basictype.h"
29 #include "pygi-util.h"
31 /* Needed for _pygi_marshal_cleanup_from_py_interface_struct_gvalue hack */
32 #include "pygi-struct-marshal.h"
39 gi_argument_from_py_ssize_t (GIArgument *arg_out,
44 case GI_TYPE_TAG_VOID:
45 case GI_TYPE_TAG_BOOLEAN:
48 case GI_TYPE_TAG_INT8:
49 if (size_in >= G_MININT8 && size_in <= G_MAXINT8) {
50 arg_out->v_int8 = size_in;
56 case GI_TYPE_TAG_UINT8:
57 if (size_in >= 0 && size_in <= G_MAXUINT8) {
58 arg_out->v_uint8 = size_in;
64 case GI_TYPE_TAG_INT16:
65 if (size_in >= G_MININT16 && size_in <= G_MAXINT16) {
66 arg_out->v_int16 = size_in;
72 case GI_TYPE_TAG_UINT16:
73 if (size_in >= 0 && size_in <= G_MAXUINT16) {
74 arg_out->v_uint16 = size_in;
80 /* Ranges assume two's complement */
81 case GI_TYPE_TAG_INT32:
82 if (size_in >= G_MININT32 && size_in <= G_MAXINT32) {
83 arg_out->v_int32 = size_in;
89 case GI_TYPE_TAG_UINT32:
90 if (size_in >= 0 && size_in <= G_MAXUINT32) {
91 arg_out->v_uint32 = size_in;
97 case GI_TYPE_TAG_INT64:
98 arg_out->v_int64 = size_in;
101 case GI_TYPE_TAG_UINT64:
103 arg_out->v_uint64 = size_in;
109 case GI_TYPE_TAG_FLOAT:
110 case GI_TYPE_TAG_DOUBLE:
111 case GI_TYPE_TAG_GTYPE:
112 case GI_TYPE_TAG_UTF8:
113 case GI_TYPE_TAG_FILENAME:
114 case GI_TYPE_TAG_ARRAY:
115 case GI_TYPE_TAG_INTERFACE:
116 case GI_TYPE_TAG_GLIST:
117 case GI_TYPE_TAG_GSLIST:
118 case GI_TYPE_TAG_GHASH:
119 case GI_TYPE_TAG_ERROR:
120 case GI_TYPE_TAG_UNICHAR:
126 PyErr_Format (PyExc_OverflowError,
127 "Unable to marshal C Py_ssize_t %zd to %s",
129 g_type_tag_to_string (type_tag));
133 PyErr_Format (PyExc_TypeError,
134 "Unable to marshal C Py_ssize_t %zd to %s",
136 g_type_tag_to_string (type_tag));
141 gi_argument_to_gsize (GIArgument *arg_in,
146 case GI_TYPE_TAG_INT8:
147 *gsize_out = arg_in->v_int8;
149 case GI_TYPE_TAG_UINT8:
150 *gsize_out = arg_in->v_uint8;
152 case GI_TYPE_TAG_INT16:
153 *gsize_out = arg_in->v_int16;
155 case GI_TYPE_TAG_UINT16:
156 *gsize_out = arg_in->v_uint16;
158 case GI_TYPE_TAG_INT32:
159 *gsize_out = arg_in->v_int32;
161 case GI_TYPE_TAG_UINT32:
162 *gsize_out = arg_in->v_uint32;
164 case GI_TYPE_TAG_INT64:
165 *gsize_out = arg_in->v_int64;
167 case GI_TYPE_TAG_UINT64:
168 *gsize_out = arg_in->v_uint64;
171 PyErr_Format (PyExc_TypeError,
172 "Unable to marshal %s to gsize",
173 g_type_tag_to_string (type_tag));
179 _pygi_marshal_from_py_array (PyGIInvokeState *state,
180 PyGICallableCache *callable_cache,
181 PyGIArgCache *arg_cache,
184 gpointer *cleanup_data)
186 PyGIMarshalFromPyFunc from_py_marshaller;
188 int success_count = 0;
191 gboolean is_ptr_array;
192 GArray *array_ = NULL;
193 PyGISequenceCache *sequence_cache = (PyGISequenceCache *)arg_cache;
194 PyGIArgGArray *array_cache = (PyGIArgGArray *)arg_cache;
195 GITransfer cleanup_transfer = arg_cache->transfer;
198 if (py_arg == Py_None) {
199 arg->v_pointer = NULL;
203 if (!PySequence_Check (py_arg)) {
204 PyErr_Format (PyExc_TypeError, "Must be sequence, not %s",
205 py_arg->ob_type->tp_name);
209 length = PySequence_Length (py_arg);
213 if (array_cache->fixed_size >= 0 &&
214 array_cache->fixed_size != length) {
215 PyErr_Format (PyExc_ValueError, "Must contain %zd items, not %zd",
216 array_cache->fixed_size, length);
221 item_size = array_cache->item_size;
222 is_ptr_array = (array_cache->array_type == GI_ARRAY_TYPE_PTR_ARRAY);
224 array_ = (GArray *)g_ptr_array_sized_new (length);
226 array_ = g_array_sized_new (array_cache->is_zero_terminated,
232 if (array_ == NULL) {
237 if (sequence_cache->item_cache->type_tag == GI_TYPE_TAG_UINT8 &&
238 PYGLIB_PyBytes_Check (py_arg)) {
239 gchar *data = PYGLIB_PyBytes_AsString (py_arg);
241 /* Avoid making a copy if the data
242 * is not transferred to the C function
243 * and cannot not be modified by it.
245 if (array_cache->array_type == GI_ARRAY_TYPE_C &&
246 arg_cache->transfer == GI_TRANSFER_NOTHING &&
247 !array_cache->is_zero_terminated) {
248 g_free (array_->data);
250 cleanup_transfer = GI_TRANSFER_EVERYTHING;
252 memcpy (array_->data, data, length);
254 array_->len = length;
255 if (array_cache->is_zero_terminated) {
256 /* If array_ has been created with zero_termination, space for the
257 * terminator is properly allocated, so we're not off-by-one here. */
258 array_->data[length] = '\0';
263 from_py_marshaller = sequence_cache->item_cache->from_py_marshaller;
264 for (i = 0, success_count = 0; i < length; i++) {
265 GIArgument item = {0};
266 gpointer item_cleanup_data = NULL;
267 PyObject *py_item = PySequence_GetItem (py_arg, i);
271 if (!from_py_marshaller ( state,
273 sequence_cache->item_cache,
276 &item_cleanup_data)) {
282 if (item_cleanup_data != NULL && item_cleanup_data != item.v_pointer) {
283 /* We only support one level of data discrepancy between an items
284 * data and its cleanup data. This is because we only track a single
285 * extra cleanup data pointer per-argument and cannot track the entire
286 * array of items differing data and cleanup_data.
287 * For example, this would fail if trying to marshal an array of
288 * callback closures marked with SCOPE call type where the cleanup data
289 * is different from the items v_pointer, likewise an array of arrays.
291 PyErr_SetString(PyExc_RuntimeError, "Cannot cleanup item data for array due to "
292 "the items data its cleanup data being different.");
296 /* FIXME: it is much more efficent to have seperate marshaller
297 * for ptr arrays than doing the evaluation
298 * and casting each loop iteration
301 g_ptr_array_add((GPtrArray *)array_, item.v_pointer);
302 } else if (sequence_cache->item_cache->is_pointer) {
303 /* if the item is a pointer, simply copy the pointer */
304 g_assert (item_size == sizeof (item.v_pointer));
305 g_array_insert_val (array_, i, item);
306 } else if (sequence_cache->item_cache->type_tag == GI_TYPE_TAG_INTERFACE) {
307 /* Special case handling of flat arrays of gvalue/boxed/struct */
308 PyGIInterfaceCache *item_iface_cache = (PyGIInterfaceCache *) sequence_cache->item_cache;
309 GIBaseInfo *base_info = (GIBaseInfo *) item_iface_cache->interface_info;
310 GIInfoType info_type = g_base_info_get_type (base_info);
313 case GI_INFO_TYPE_UNION:
314 case GI_INFO_TYPE_STRUCT:
316 PyGIArgCache *item_arg_cache = (PyGIArgCache *)item_iface_cache;
317 PyGIMarshalCleanupFunc from_py_cleanup = item_arg_cache->from_py_cleanup;
319 if (g_type_is_a (item_iface_cache->g_type, G_TYPE_VALUE)) {
320 /* Special case GValue flat arrays to properly init and copy the contents. */
321 GValue* dest = (GValue*) (array_->data + (i * item_size));
322 if (item.v_pointer != NULL) {
323 memset (dest, 0, item_size);
324 g_value_init (dest, G_VALUE_TYPE ((GValue*) item.v_pointer));
325 g_value_copy ((GValue*) item.v_pointer, dest);
327 /* Manually increment the length because we are manually setting the memory. */
331 /* Handles flat arrays of boxed or struct types. */
332 g_array_insert_vals (array_, i, item.v_pointer, 1);
335 /* Cleanup any memory left by the per-item marshaler because
336 * _pygi_marshal_cleanup_from_py_array will not know about this
337 * due to "item" being a temporarily marshaled value done on the stack.
340 from_py_cleanup (state, item_arg_cache, py_item, item_cleanup_data, TRUE);
345 g_array_insert_val (array_, i, item);
348 /* default value copy of a simple type */
349 g_array_insert_val (array_, i, item);
355 if (sequence_cache->item_cache->from_py_cleanup != NULL) {
357 PyGIMarshalCleanupFunc cleanup_func =
358 sequence_cache->item_cache->from_py_cleanup;
360 /* Only attempt per item cleanup on pointer items */
361 if (sequence_cache->item_cache->is_pointer) {
362 for(j = 0; j < success_count; j++) {
363 PyObject *py_item = PySequence_GetItem (py_arg, j);
365 sequence_cache->item_cache,
368 g_ptr_array_index ((GPtrArray *)array_, j) :
369 g_array_index (array_, gpointer, j),
377 g_ptr_array_free ( ( GPtrArray *)array_, TRUE);
379 g_array_free (array_, TRUE);
380 _PyGI_ERROR_PREFIX ("Item %i: ", i);
385 if (array_cache->len_arg_index >= 0) {
386 /* we have an child arg to handle */
387 PyGIArgCache *child_cache =
388 _pygi_callable_cache_get_arg (callable_cache, array_cache->len_arg_index);
390 if (!gi_argument_from_py_ssize_t (&state->args[child_cache->c_arg_index].arg_value,
392 child_cache->type_tag)) {
397 if (array_cache->array_type == GI_ARRAY_TYPE_C) {
398 /* In the case of GI_ARRAY_C, we give the data directly as the argument
399 * but keep the array_ wrapper as cleanup data so we don't have to find
402 arg->v_pointer = array_->data;
404 if (cleanup_transfer == GI_TRANSFER_EVERYTHING) {
405 g_array_free (array_, FALSE);
406 *cleanup_data = NULL;
408 *cleanup_data = array_;
411 arg->v_pointer = array_;
413 if (cleanup_transfer == GI_TRANSFER_NOTHING) {
414 /* Free everything in cleanup. */
415 *cleanup_data = array_;
416 } else if (cleanup_transfer == GI_TRANSFER_CONTAINER) {
417 /* Make a shallow copy so we can free the elements later in cleanup
418 * because it is possible invoke will free the list before our cleanup. */
419 *cleanup_data = is_ptr_array ?
420 (gpointer)g_ptr_array_ref ((GPtrArray *)array_) :
421 (gpointer)g_array_ref (array_);
422 } else { /* GI_TRANSFER_EVERYTHING */
423 /* No cleanup, everything is given to the callee. */
424 *cleanup_data = NULL;
432 _pygi_marshal_cleanup_from_py_array (PyGIInvokeState *state,
433 PyGIArgCache *arg_cache,
436 gboolean was_processed)
439 GArray *array_ = NULL;
440 GPtrArray *ptr_array_ = NULL;
441 PyGISequenceCache *sequence_cache = (PyGISequenceCache *)arg_cache;
442 PyGIArgGArray *array_cache = (PyGIArgGArray *)arg_cache;
444 if (array_cache->array_type == GI_ARRAY_TYPE_PTR_ARRAY) {
445 ptr_array_ = (GPtrArray *) data;
447 array_ = (GArray *) data;
450 /* clean up items first */
451 if (sequence_cache->item_cache->from_py_cleanup != NULL) {
453 guint len = (array_ != NULL) ? array_->len : ptr_array_->len;
454 PyGIMarshalCleanupFunc cleanup_func =
455 sequence_cache->item_cache->from_py_cleanup;
457 for (i = 0; i < len; i++) {
459 PyObject *py_item = NULL;
461 /* case 1: GPtrArray */
462 if (ptr_array_ != NULL)
463 item = g_ptr_array_index (ptr_array_, i);
464 /* case 2: C array or GArray with object pointers */
465 else if (sequence_cache->item_cache->is_pointer)
466 item = g_array_index (array_, gpointer, i);
467 /* case 3: C array or GArray with simple types or structs */
469 item = array_->data + i * array_cache->item_size;
470 /* special-case hack: GValue array items do not get slice
471 * allocated in _pygi_marshal_from_py_array(), so we must
472 * not try to deallocate it as a slice and thus
473 * short-circuit cleanup_func. */
474 if (cleanup_func == pygi_arg_gvalue_from_py_cleanup) {
475 g_value_unset ((GValue*) item);
480 py_item = PySequence_GetItem (py_arg, i);
481 cleanup_func (state, sequence_cache->item_cache, py_item, item, TRUE);
482 Py_XDECREF (py_item);
486 /* Only free the array when we didn't transfer ownership */
487 if (array_cache->array_type == GI_ARRAY_TYPE_C) {
488 /* always free the GArray wrapper created in from_py marshaling and
489 * passed back as cleanup_data
491 g_array_free (array_, arg_cache->transfer == GI_TRANSFER_NOTHING);
494 g_array_unref (array_);
496 g_ptr_array_unref (ptr_array_);
505 _pygi_marshal_to_py_array (PyGIInvokeState *state,
506 PyGICallableCache *callable_cache,
507 PyGIArgCache *arg_cache,
511 PyObject *py_obj = NULL;
512 PyGISequenceCache *seq_cache = (PyGISequenceCache *)arg_cache;
513 PyGIArgGArray *array_cache = (PyGIArgGArray *)arg_cache;
514 gsize processed_items = 0;
516 /* GArrays make it easier to iterate over arrays
517 * with different element sizes but requires that
518 * we allocate a GArray if the argument was a C array
520 if (array_cache->array_type == GI_ARRAY_TYPE_C) {
522 if (array_cache->fixed_size >= 0) {
523 g_assert(arg->v_pointer != NULL);
524 len = array_cache->fixed_size;
525 } else if (array_cache->is_zero_terminated) {
526 if (arg->v_pointer == NULL) {
528 } else if (seq_cache->item_cache->type_tag == GI_TYPE_TAG_UINT8) {
529 len = strlen (arg->v_pointer);
531 len = g_strv_length ((gchar **)arg->v_pointer);
534 GIArgument *len_arg = &state->args[array_cache->len_arg_index].arg_value;
535 PyGIArgCache *arg_cache = _pygi_callable_cache_get_arg (callable_cache,
536 array_cache->len_arg_index);
538 if (!gi_argument_to_gsize (len_arg, &len, arg_cache->type_tag)) {
543 array_ = g_array_new (FALSE,
545 array_cache->item_size);
546 if (array_ == NULL) {
549 if (arg_cache->transfer == GI_TRANSFER_EVERYTHING && arg->v_pointer != NULL)
550 g_free (arg->v_pointer);
555 if (array_->data != NULL)
556 g_free (array_->data);
557 array_->data = arg->v_pointer;
560 array_ = arg->v_pointer;
563 if (seq_cache->item_cache->type_tag == GI_TYPE_TAG_UINT8) {
564 if (arg->v_pointer == NULL) {
565 py_obj = PYGLIB_PyBytes_FromString ("");
567 py_obj = PYGLIB_PyBytes_FromStringAndSize (array_->data, array_->len);
570 if (arg->v_pointer == NULL) {
571 py_obj = PyList_New (0);
576 PyGIMarshalToPyFunc item_to_py_marshaller;
577 PyGIArgCache *item_arg_cache;
579 py_obj = PyList_New (array_->len);
584 item_arg_cache = seq_cache->item_cache;
585 item_to_py_marshaller = item_arg_cache->to_py_marshaller;
587 item_size = g_array_get_element_size (array_);
589 for (i = 0; i < array_->len; i++) {
590 GIArgument item_arg = {0};
593 /* If we are receiving an array of pointers, simply assign the pointer
594 * and move on, letting the per-item marshaler deal with the
595 * various transfer modes and ref counts (e.g. g_variant_ref_sink).
597 if (array_cache->array_type == GI_ARRAY_TYPE_PTR_ARRAY) {
598 item_arg.v_pointer = g_ptr_array_index ( ( GPtrArray *)array_, i);
600 } else if (item_arg_cache->is_pointer) {
601 item_arg.v_pointer = g_array_index (array_, gpointer, i);
603 } else if (item_arg_cache->type_tag == GI_TYPE_TAG_INTERFACE) {
604 PyGIInterfaceCache *iface_cache = (PyGIInterfaceCache *) item_arg_cache;
606 /* FIXME: This probably doesn't work with boxed types or gvalues.
607 * See fx. _pygi_marshal_from_py_array() */
608 switch (g_base_info_get_type (iface_cache->interface_info)) {
609 case GI_INFO_TYPE_STRUCT:
610 if (arg_cache->transfer == GI_TRANSFER_EVERYTHING &&
611 !g_type_is_a (iface_cache->g_type, G_TYPE_BOXED)) {
612 /* array elements are structs */
613 gpointer *_struct = g_malloc (item_size);
614 memcpy (_struct, array_->data + i * item_size,
616 item_arg.v_pointer = _struct;
618 item_arg.v_pointer = array_->data + i * item_size;
622 item_arg.v_pointer = g_array_index (array_, gpointer, i);
626 memcpy (&item_arg, array_->data + i * item_size, item_size);
629 py_item = item_to_py_marshaller ( state,
634 if (py_item == NULL) {
637 if (array_cache->array_type == GI_ARRAY_TYPE_C)
638 g_array_unref (array_);
642 PyList_SET_ITEM (py_obj, i, py_item);
648 if (array_cache->array_type == GI_ARRAY_TYPE_C)
649 g_array_free (array_, FALSE);
654 if (array_cache->array_type == GI_ARRAY_TYPE_C) {
655 g_array_free (array_, arg_cache->transfer == GI_TRANSFER_EVERYTHING);
657 /* clean up unprocessed items */
658 if (seq_cache->item_cache->to_py_cleanup != NULL) {
660 PyGIMarshalCleanupFunc cleanup_func = seq_cache->item_cache->to_py_cleanup;
661 for (j = processed_items; j < array_->len; j++) {
663 seq_cache->item_cache,
665 g_array_index (array_, gpointer, j),
670 if (arg_cache->transfer == GI_TRANSFER_EVERYTHING)
671 g_array_free (array_, TRUE);
678 _wrap_c_array (PyGIInvokeState *state,
679 PyGIArgGArray *array_cache,
685 if (array_cache->fixed_size >= 0) {
686 len = array_cache->fixed_size;
687 } else if (array_cache->is_zero_terminated) {
688 len = g_strv_length ((gchar **)data);
689 } else if (array_cache->len_arg_index >= 0) {
690 GIArgument *len_arg = &state->args[array_cache->len_arg_index].arg_value;
691 len = len_arg->v_long;
694 array_ = g_array_new (FALSE,
696 array_cache->item_size);
701 g_free (array_->data);
709 _pygi_marshal_cleanup_to_py_array (PyGIInvokeState *state,
710 PyGIArgCache *arg_cache,
713 gboolean was_processed)
715 if (arg_cache->transfer == GI_TRANSFER_EVERYTHING ||
716 arg_cache->transfer == GI_TRANSFER_CONTAINER) {
717 GArray *array_ = NULL;
718 GPtrArray *ptr_array_ = NULL;
719 PyGISequenceCache *sequence_cache = (PyGISequenceCache *)arg_cache;
720 PyGIArgGArray *array_cache = (PyGIArgGArray *)arg_cache;
722 /* If this isn't a garray create one to help process variable sized
724 if (array_cache->array_type == GI_ARRAY_TYPE_C) {
725 array_ = _wrap_c_array (state, array_cache, data);
730 } else if (array_cache->array_type == GI_ARRAY_TYPE_PTR_ARRAY) {
731 ptr_array_ = (GPtrArray *) data;
733 array_ = (GArray *) data;
736 if (sequence_cache->item_cache->to_py_cleanup != NULL) {
738 guint len = (array_ != NULL) ? array_->len : ptr_array_->len;
740 PyGIMarshalCleanupFunc cleanup_func = sequence_cache->item_cache->to_py_cleanup;
741 for (i = 0; i < len; i++) {
743 sequence_cache->item_cache,
745 (array_ != NULL) ? g_array_index (array_, gpointer, i) : g_ptr_array_index (ptr_array_, i),
751 g_array_free (array_, TRUE);
753 g_ptr_array_free (ptr_array_, TRUE);
758 _array_cache_free_func (PyGIArgGArray *cache)
761 pygi_arg_cache_free (((PyGISequenceCache *)cache)->item_cache);
762 g_slice_free (PyGIArgGArray, cache);
767 pygi_arg_garray_len_arg_setup (PyGIArgCache *arg_cache,
768 GITypeInfo *type_info,
769 PyGICallableCache *callable_cache,
770 PyGIDirection direction,
772 gssize *py_arg_index)
774 PyGIArgGArray *seq_cache = (PyGIArgGArray *)arg_cache;
776 /* attempt len_arg_index setup for the first time */
777 if (seq_cache->len_arg_index < 0) {
778 seq_cache->len_arg_index = g_type_info_get_array_length (type_info);
780 /* offset by self arg for methods and vfuncs */
781 if (seq_cache->len_arg_index >= 0 && callable_cache != NULL) {
782 seq_cache->len_arg_index += callable_cache->args_offset;
786 if (seq_cache->len_arg_index >= 0) {
787 PyGIArgCache *child_cache = NULL;
789 child_cache = _pygi_callable_cache_get_arg (callable_cache,
790 seq_cache->len_arg_index);
791 if (child_cache == NULL) {
792 child_cache = pygi_arg_cache_alloc ();
794 /* If the "length" arg cache already exists (the length comes before
795 * the array in the argument list), remove it from the to_py_args list
796 * because it does not belong in "to python" return tuple. The length
797 * will implicitly be a part of the returned Python list.
799 if (direction & PYGI_DIRECTION_TO_PYTHON) {
800 callable_cache->to_py_args =
801 g_slist_remove (callable_cache->to_py_args, child_cache);
804 /* This is a case where the arg cache already exists and has been
805 * setup by another array argument sharing the same length argument.
806 * See: gi_marshalling_tests_multi_array_key_value_in
808 if (child_cache->meta_type == PYGI_META_ARG_TYPE_CHILD)
812 /* There is a length argument for this array, so increment the number
813 * of "to python" child arguments when applicable.
815 if (direction & PYGI_DIRECTION_TO_PYTHON)
816 callable_cache->n_to_py_child_args++;
818 child_cache->meta_type = PYGI_META_ARG_TYPE_CHILD;
819 child_cache->direction = direction;
820 child_cache->to_py_marshaller = _pygi_marshal_to_py_basic_type_cache_adapter;
821 child_cache->from_py_marshaller = _pygi_marshal_from_py_basic_type_cache_adapter;
822 child_cache->py_arg_index = -1;
824 /* ugly edge case code:
826 * When the length comes before the array parameter we need to update
827 * indexes of arguments after the index argument.
829 if (seq_cache->len_arg_index < arg_index && direction & PYGI_DIRECTION_FROM_PYTHON) {
831 (*py_arg_index) -= 1;
832 callable_cache->n_py_args -= 1;
834 for (i = seq_cache->len_arg_index + 1;
835 i < _pygi_callable_cache_args_len (callable_cache); i++) {
836 PyGIArgCache *update_cache = _pygi_callable_cache_get_arg (callable_cache, i);
837 if (update_cache == NULL)
840 update_cache->py_arg_index -= 1;
844 _pygi_callable_cache_set_arg (callable_cache, seq_cache->len_arg_index, child_cache);
852 pygi_arg_garray_setup (PyGIArgGArray *sc,
853 GITypeInfo *type_info,
854 GIArgInfo *arg_info, /* may be NULL for return arguments */
856 PyGIDirection direction,
857 PyGICallableCache *callable_cache)
859 GITypeInfo *item_type_info;
860 PyGIArgCache *arg_cache = (PyGIArgCache *)sc;
862 if (!pygi_arg_sequence_setup ((PyGISequenceCache *)sc,
871 ((PyGIArgCache *)sc)->destroy_notify = (GDestroyNotify)_array_cache_free_func;
872 sc->array_type = g_type_info_get_array_type (type_info);
873 sc->is_zero_terminated = g_type_info_is_zero_terminated (type_info);
874 sc->fixed_size = g_type_info_get_array_fixed_size (type_info);
875 sc->len_arg_index = -1; /* setup by pygi_arg_garray_len_arg_setup */
877 item_type_info = g_type_info_get_param_type (type_info, 0);
878 sc->item_size = _pygi_g_type_info_size (item_type_info);
879 g_base_info_unref ( (GIBaseInfo *)item_type_info);
881 if (direction & PYGI_DIRECTION_FROM_PYTHON) {
882 arg_cache->from_py_marshaller = _pygi_marshal_from_py_array;
883 arg_cache->from_py_cleanup = _pygi_marshal_cleanup_from_py_array;
886 if (direction & PYGI_DIRECTION_TO_PYTHON) {
887 arg_cache->to_py_marshaller = _pygi_marshal_to_py_array;
888 arg_cache->to_py_cleanup = _pygi_marshal_cleanup_to_py_array;
895 pygi_arg_garray_new_from_info (GITypeInfo *type_info,
898 PyGIDirection direction,
899 PyGICallableCache *callable_cache)
901 PyGIArgGArray *array_cache = g_slice_new0 (PyGIArgGArray);
902 if (array_cache == NULL)
905 if (!pygi_arg_garray_setup (array_cache,
911 pygi_arg_cache_free ( (PyGIArgCache *)array_cache);
915 return (PyGIArgCache *)array_cache;