*
* All methods can be called from many threads as part of interface.
*
+ * Note: Statement that call does not block does not mean that it is lock-free.
+ * Internally the implementation is allowed to take mutex but it should guarantee
+ * that it is not for a long time (including blocking on anything else, I/O calls
+ * or callbacks).
+ *
* @see Promise
* @see Lazy
* @see task
* @see async_task
* @since 0.11.0
- *
- * Note: Statement that call does not block does not mean that it is lock-free.
- * Internally the implementation is allowed to take mutex but it should guarantee
- * that it is not for a long time (including blocking on anything else, I/O calls
- * or callbacks).
*/
[GenericAccessors]
public interface Gee.Future<G> : Object {
/**
* Waits until the value is ready.
*
- * @returns The {@link value} associated with future
+ * @return The {@link value} associated with future
* @see ready
* @see wait_until
* @see wait_async
*
* @param end_time The time when the wait should finish
* @param value The {@link value} associated with future if the wait was successful
- * @returns ``true`` if the value was ready within deadline or ``false`` otherwise
+ * @return ``true`` if the value was ready within deadline or ``false`` otherwise
* @see ready
* @see wait
* @see wait_async
/**
* Reschedules the callback until the {@link value} is available.
*
- * @returns The {@link value} associated with future
+ * @return The {@link value} associated with future
* @see ready
* @see wait
* @see wait_until
* Maps a future value to another value by a function and returns the
* another value in future.
*
- * @param func Function applied to {@link value}
- * @returns Value returned by function
- *
- * @see flat_map
- * @see light_map
- *
* Note: As time taken by function might not contribute to
* {@link wait_until} and the implementation is allowed to compute
* value eagerly by {@link when_done} it is recommended to use
* {@link task} and {@link flat_map} for longer computation.
+ *
+ * @param func Function applied to {@link value}
+ * @return Value returned by function
+ *
+ * @see flat_map
+ * @see light_map
*/
[CCode (ordering = 3)]
public virtual Future<A> map<A> (owned MapFunc<A, G> func) {
* Maps a future value to another value by a function and returns the
* another value in future.
*
- * @param func Function applied to {@link value}
- * @returns Value returned by function
- *
- * @see flat_map
- * @see map
- * @since 0.11.4
- *
* Note: The function may be reevaluated at any time and it might
* be called lazily. Therefore it is recommended for it to be
* idempotent. If the function needs to be called eagerly or have
* {@link wait_until} and the implementation is allowed to compute
* value eagerly by {@link when_done} it is recommended to use
* {@link task} and {@link flat_map} for longer computation.
+ *
+ * @param func Function applied to {@link value}
+ * @return Value returned by function
+ *
+ * @see flat_map
+ * @see map
+ * @since 0.11.4
*/
[CCode (ordering = 10, cname = "gee_future_light_map_fixed", vfunc_name = "light_map_fixed")]
public virtual Future<A> light_map<A> (owned LightMapFunc<A, G> func) {
* Combines values of two futures using a function returning the combined
* value in future (call does not block).
*
- * @param join_func Function applied to values
- * @param second Second parameter
- * @returns A combine value
- * @since 0.11.4
- *
* Note: As time taken by function does not contribute to
* {@link wait_until} and the implementation is allowed to compute
* value eagerly by {@link when_done} it is recommended to return a
* future from {@link task} and use {@link flat_map} for longer computation.
+ *
+ * @param join_func Function applied to values
+ * @param second Second parameter
+ * @return A combine value
+ * @since 0.11.4
*/
[CCode (ordering = 5)]
public virtual Future<B> zip<A, B> (owned ZipFunc<G, A, B> zip_func, Future<A> second) {
/**
* Maps a future value to another future value which is returned (call does not block).
*
- * @param func Function applied to {@link value}
- * @returns Value of a future returned by function
- *
- * @see map
- *
* Note: As time taken by function does not contribute to
* {@link wait_until} and the implementation is allowed to compute
* value eagerly by {@link when_done} it is recommended to put the
* larger computation inside the returned future for example by
* {@link task}
+ *
+ * @param func Function applied to {@link value}
+ * @return Value of a future returned by function
+ *
+ * @see map
*/
[CCode (ordering = 6)]
public virtual Gee.Future<A> flat_map<A>(owned FlatMapFunc<A, G> func) {
* Schedules a task to execute asynchroniously. Internally one
* of threads from pool will execute the task.
*
- * @params task Task to be executed
- * @returns Future value returned by task
- * @see async_task
- * @since 0.11.0
- *
* Note: There is limited number of threads unless environment variable
* ``GEE_NUM_THREADS`` is set to -1. It is not adviced to call I/O or
* block inside the taks. If necessary it is possible to create a new one
* by anyther call.
+ *
+ * @param task Task to be executed
+ * @return Future value returned by task
+ * @see async_task
+ * @since 0.11.0
*/
public Future<G> task<G>(owned Task<G> task) throws GLib.ThreadError {
TaskData<G> tdata = new TaskData<G>();
* Continues the execution asynchroniously in helper thread. Internally
* one of threads from pool will execute the task.
*
- * @see task
- * @since 0.11.0
- *
* Note: There is limited number of threads unless environment variable
* ``GEE_NUM_THREADS`` is set to -1. It is not adviced to call I/O or
* block inside the taks. If necessary it is possible to create a new one
* by anyther call.
+ *
+ * @see task
+ * @since 0.11.0
*/
public async void async_task() throws GLib.ThreadError {
task<bool>(async_task.callback);