Originally the code for signal connection and disconnection is:
actor.connect("touched", onTouched);
actor.disconnect("touched", onTouched);
It needs to be changed to match the style of Node.JS event handling API
and Tizen Native JavaScript API:
actor.on("touched", onTouched);
actor.off("touched", onTouched);
Change-Id: I7d90651e6014b795292d696c90cb0ab77531a165
}
// connect to touch events
}
// connect to touch events
-myActor.connect( "touched", onPressed );
+myActor.on( "touched", onPressed );
}
```
// connect to touch events
}
```
// connect to touch events
- myActor.connect( "hovered", onHover);
+ myActor.on( "hovered", onHover);
}
// connect to touch events
}
// connect to touch events
-myActor.connect( "mouseWheelEvent", onMouseWheel );
+myActor.on( "mouseWheelEvent", onMouseWheel );
log("Animation finished \n");
}
log("Animation finished \n");
}
-anim.connect("finished", finished );
+anim.on("finished", finished );
Connect to the pre-focus-change call back as follows:
```
// listen for pre-focus change events
Connect to the pre-focus-change call back as follows:
```
// listen for pre-focus change events
-dali.keyboardFocusManager.connect("keyboardPreFocusChange", this.preFocusChanged);
+dali.keyboardFocusManager.on("keyboardPreFocusChange", this.preFocusChanged);
// example call back handler
// example call back handler
-dali.keyboardFocusManager.connect("keyboardPreFocusChange", myCallback)
+dali.keyboardFocusManager.on("keyboardPreFocusChange", myCallback)
```
KeyboardFocusManager makes the best guess for which actor to focus towards the given direction, but applications might want to change that.
```
KeyboardFocusManager makes the best guess for which actor to focus towards the given direction, but applications might want to change that.
-dali.keyboardFocusManager.connect("keyboardFocusChange", myCallback)
+dali.keyboardFocusManager.on("keyboardFocusChange", myCallback)
```
@class KeyboardFocusManager
```
@class KeyboardFocusManager
panGestureDetector.attach(actor);
// Connect the detected signal
panGestureDetector.attach(actor);
// Connect the detected signal
-panGestureDetector.connect("panDetected", onPan);
+panGestureDetector.on("panDetected", onPan);
onPan = function(actor, panGesture)
{
onPan = function(actor, panGesture)
{
var image = new dali.ResourceImage( {url: "my_image.png"} );
var image = new dali.ResourceImage( {url: "my_image.png"} );
-image.connect("imageLoadingFinished", finished );
+image.on("imageLoadingFinished", finished );
// Create a material and add the image as texture to be used by the material.
var material = new dali.Material();
// Create a material and add the image as texture to be used by the material.
var material = new dali.Material();
-dali.stage.connect("keyEvent", daliApp.myCallback);
+dali.stage.on("keyEvent", daliApp.myCallback);
```
The key event object has the following properties
```
The key event object has the following properties
void AddSignalConnectAndDisconnect( v8::Isolate* isolate, v8::Local<v8::ObjectTemplate>& objTemplate )
{
void AddSignalConnectAndDisconnect( v8::Isolate* isolate, v8::Local<v8::ObjectTemplate>& objTemplate )
{
- objTemplate->Set( v8::String::NewFromUtf8( isolate, "connect"),
+ objTemplate->Set( v8::String::NewFromUtf8( isolate, "on"),
v8::FunctionTemplate::New( isolate, SignalManager::SignalConnect) );
v8::FunctionTemplate::New( isolate, SignalManager::SignalConnect) );
- objTemplate->Set( v8::String::NewFromUtf8( isolate, "disconnect"),
+ objTemplate->Set( v8::String::NewFromUtf8( isolate, "off"),
v8::FunctionTemplate::New( isolate, SignalManager::SignalDisconnect) );
}
v8::FunctionTemplate::New( isolate, SignalManager::SignalDisconnect) );
}