var validator_ = xwalk.utils.validator;
var types_ = validator_.Types;
-var bridge = xwalk.utils.NativeBridge(extension, true);
+var native_ = new xwalk.utils.NativeManager(extension);
function CommonFS() {};
function _initializeCache() {
try {
- var result = bridge.sync({
- cmd: 'Archive_fetchVirtualRoots'
- });
+ var result = native_.callSync('Archive_fetchVirtualRoots', {});
+
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObject(result);
+ }
+
+ result = native_.getResultObject(result);
for (var i = 0; i < result.length; ++i) {
- CommonFS.cacheVirtualToReal[result[i].name] = {
- path: result[i].path
- };
+ CommonFS.cacheVirtualToReal[result[i].name] = {
+ path: result[i].path
+ };
}
} catch(e) {
console.log("Exception while getting widget paths was thrown: " + e);
BEST: "BEST"
};
+var onprogressCallbacks = {};
+var ARCHIVE_ONPROGRESS_CALLBACK = 'ArchiveOnprogressCallback';
+
+var ArchiveFileProgressCallback = function(msg) {
+ if (native_.isFailure(msg)) {
+ return;
+ }
+
+ var result = native_.getResultObject(msg);
+ if ('onprogress' === result.action && onprogressCallbacks.hasOwnProperty(result.opId)) {
+ onprogressCallbacks[result.opId](result.opId, result.value, result.filename);
+ }
+};
+
+native_.addListener(ARCHIVE_ONPROGRESS_CALLBACK, ArchiveFileProgressCallback);
+
/**
* The ArchiveFileEntry interface provides access to ArchiveFile member information and file data.
* This constructor is for internal use only.
]),
opId = getNextOpId();
- if (!CommonFS.isVirtualPath(args.destinationDirectory))
+ if (!CommonFS.isVirtualPath(args.destinationDirectory)) {
throw new WebAPIException(WebAPIException.TYPE_MISMATCH_ERR,
"Destination directory should be virtual path or file.");
- bridge.async({
- cmd: 'ArchiveFileEntry_extract',
- args: {
- destinationDirectory: CommonFS.toRealPath(args.destinationDirectory),
- stripName: args.stripName || null,
- overwrite: args.overwrite || null,
- opId: opId,
- handle: getHandle(),
- name: this.name
- }
- }).then({
- success: function () {
- if (args.onsuccess) {
- args.onsuccess.call(null);
- }
- },
- error: function (e) {
- if (args.onerror) {
- args.onerror.call(
- null,
- new WebAPIException(e)
- );
- }
- },
- progress: function (data) {
- if (args.onprogress) {
- args.onprogress.call(
- null,
- opId,
- data.value,
- data.filename
- );
- }
+ }
+
+ var callArgs = {
+ destinationDirectory : CommonFS.toRealPath(args.destinationDirectory),
+ stripName : args.stripName || null,
+ overwrite : args.overwrite || null,
+ opId : opId,
+ handle : getHandle(),
+ name : this.name
+ };
+
+ var callback = function(result) {
+ if (native_.isFailure(result)) {
+ native_.callIfPossible(args.onerror, native_.getErrorObject(result));
+ } else {
+ var ret = native_.getResultObject(result);
+ delete onprogressCallbacks[opId];
+ native_.callIfPossible(args.onsuccess);
}
- });
+ };
+
+ if (args.onprogress) {
+ onprogressCallbacks[opId] = args.onprogress;
+ }
+
+ var result = native_.call('ArchiveFileEntry_extract', callArgs, callback);
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObject(result);
+ }
return opId;
};
]),
opId = getNextOpId();
- if (!CommonFS.isVirtualPath(args.sourceFile))
+ if (!CommonFS.isVirtualPath(args.sourceFile)) {
throw new WebAPIException(WebAPIException.TYPE_MISMATCH_ERR,
"sourceFile should be virtual path or file.");
+ }
var optionsAttributes = ["destination", "stripSourceDirectory", "compressionLevel"],
options = args.options || {};
}
checkMode(this.mode, ["w","rw", "a"]);
- bridge.async({
- cmd: 'ArchiveFile_add',
- args: {
- sourceFile: CommonFS.toRealPath(args.sourceFile),
- options: options,
- opId: opId,
- handle: getHandle()
- }
- }).then({
- success: function () {
- if (args.onsuccess) {
- args.onsuccess.call(null);
- }
- },
- error: function (e) {
- if (args.onerror) {
- args.onerror.call(
- null,
- new WebAPIException(e)
- );
- }
- },
- progress: function (data) {
- if (args.onprogress) {
- args.onprogress.call(
- null,
- opId,
- data.value,
- data.filename
- );
- }
+
+ var callArgs = {
+ sourceFile : CommonFS.toRealPath(args.sourceFile),
+ options : options,
+ opId : opId,
+ handle : getHandle()
+ };
+
+ var callback = function(result) {
+ if (native_.isFailure(result)) {
+ native_.callIfPossible(args.onerror, native_.getErrorObject(result));
+ } else {
+ delete onprogressCallbacks[opId];
+ native_.callIfPossible(args.onsuccess);
}
- });
+ };
+
+ if (args.onprogress) {
+ onprogressCallbacks[opId] = args.onprogress;
+ }
+
+ var result = native_.call('ArchiveFile_add', callArgs, callback);
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObject(result);
+ }
return opId;
};
]),
opId = getNextOpId();
- if (!CommonFS.isVirtualPath(args.destinationDirectory))
+ if (!CommonFS.isVirtualPath(args.destinationDirectory)) {
throw new WebAPIException(WebAPIException.TYPE_MISMATCH_ERR,
"destinationDirectory should be virtual path or file.");
+ }
checkMode(this.mode, ["r","rw"]);
- bridge.async({
- cmd: 'ArchiveFile_extractAll',
- args: {
- destinationDirectory: CommonFS.toRealPath(args.destinationDirectory),
- overwrite: args.overwrite || null,
- opId: opId,
- handle: getHandle()
- }
- }).then({
- success: function () {
- if (args.onsuccess) {
- args.onsuccess.call(null);
- }
- },
- error: function (e) {
- if (args.onerror) {
- args.onerror.call(
- null,
- new WebAPIException(e)
- );
- }
- },
- progress: function (data) {
- if (args.onprogress) {
- args.onprogress.call(
- null,
- opId,
- data.value,
- data.filename
- );
- }
+
+ var callArgs = {
+ destinationDirectory : CommonFS.toRealPath(args.destinationDirectory),
+ overwrite : args.overwrite || null,
+ opId : opId,
+ handle : getHandle()
+ };
+
+ var callback = function(result) {
+ if (native_.isFailure(result)) {
+ native_.callIfPossible(args.onerror, native_.getErrorObject(result));
+ } else {
+ delete onprogressCallbacks[opId];
+ native_.callIfPossible(args.onsuccess);
}
- });
+ };
+
+ if (args.onprogress) {
+ onprogressCallbacks[opId] = args.onprogress;
+ }
+
+ var result = native_.call('ArchiveFile_extractAll', callArgs, callback);
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObject(result);
+ }
return opId;
};
opId = getNextOpId();
checkMode(this.mode, ["r","rw"]);
- bridge.async({
- cmd: 'ArchiveFile_getEntries',
- args: {
- opId: opId,
- handle: getHandle()
- }
- }).then({
- success: function (data) {
+
+ var callArgs = {
+ opId : opId,
+ handle : getHandle()
+ };
+
+ var callback = function(result) {
+ if (native_.isFailure(result)) {
+ native_.callIfPossible(args.onerror, native_.getErrorObject(result));
+ } else {
var entries = [];
- data.forEach(function (e) {
+ var ret = native_.getResultObject(result);
+ ret.forEach(function (e) {
entries.push(new ArchiveFileEntry(e, priv));
});
- args.onsuccess.call(null, entries);
- },
- error: function (e) {
- if (args.onerror) {
- args.onerror.call(
- null,
- new WebAPIException(e)
- );
- }
- }
- });
+ args.onsuccess(entries);
+ }
+ };
+
+ var result = native_.call('ArchiveFile_getEntries', callArgs, callback);
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObject(result);
+ }
return opId;
};
opId = getNextOpId();
checkMode(this.mode, ["r","rw"]);
- bridge.async({
- cmd: 'ArchiveFile_getEntryByName',
- args: {
- name: args.name,
- opId: opId,
- handle: getHandle()
- }
- }).then({
- success: function (data) {
- args.onsuccess.call(null, new ArchiveFileEntry(data, priv));
- },
- error: function (e) {
- if (args.onerror) {
- args.onerror.call(
- null,
- new WebAPIException(e)
- );
- }
+
+ var callArgs = {
+ name : args.name,
+ opId : opId,
+ handle : getHandle()
+ };
+
+ var callback = function(result) {
+ if (native_.isFailure(result)) {
+ native_.callIfPossible(args.onerror, native_.getErrorObject(result));
+ } else {
+ args.onsuccess(new ArchiveFileEntry(native_.getResultObject(result), priv));
}
- });
+ };
+
+ var result = native_.call('ArchiveFile_getEntryByName', callArgs, callback);
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObject(result);
+ }
return opId;
};
var handle = priv.handle;
if(priv.handle) {
delete priv.handle;
- bridge.sync({
- cmd: 'ArchiveFile_close',
- args: {
- handle: handle
- }
- });
+ var result = native_.callSync('ArchiveFile_close', {'handle': handle});
+
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObject(result);
+ }
}
};
}
}
}
- if (!CommonFS.isVirtualPath(args.file))
+ if (!CommonFS.isVirtualPath(args.file)) {
throw new WebAPIException(WebAPIException.TYPE_MISMATCH_ERR,
"file should be virtual path or file.");
+ }
- bridge.async({
- cmd: 'ArchiveManager_open',
- args: {
- file: CommonFS.toRealPath(args.file),
- mode: args.mode,
- options: options,
- opId: opId
- }
- }).then({
- success: function (data) {
- args.onsuccess.call(null, new ArchiveFile(data));
- },
- error: function (e) {
- if (args.onerror) {
- args.onerror.call(
- null,
- new WebAPIException(e)
- );
- }
+ var callArgs = {
+ file : CommonFS.toRealPath(args.file),
+ mode : args.mode,
+ options : options,
+ opId : opId,
+ };
+
+
+ var callback = function(result) {
+ if (native_.isFailure(result)) {
+ native_.callIfPossible(args.onerror, native_.getErrorObject(result));
+ } else {
+ args.onsuccess(new ArchiveFile(native_.getResultObject(result)));
}
- });
+ };
+
+ var result = native_.call('ArchiveManager_open', callArgs, callback);
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObject(result);
+ }
return opId;
};
{ name: "opId", type: types_.LONG }
]);
- bridge.sync({
- cmd: 'ArchiveManager_abort',
- args: {
- opId: args.opId
- }
- });
+ var result = native_.callSync('ArchiveManager_abort', {opId: args.opId});
+
+ if (native_.isFailure(result)) {
+ throw native_.getErrorObject(result);
+ }
};
//Exports
#include "archive_callback_data.h"
#include "common/logger.h"
+#include "common/tools.h"
#include "archive_file.h"
#include "archive_utils.h"
namespace extension {
namespace archive {
-using namespace common;
+using common::tools::ReportSuccess;
//----------------------------------------------------------------------------------------
//OperationCallbackData
obj[JSON_CALLBACK_ID] = picojson::value(callback->getCallbackId());
if (!callback->isError()) {
- obj[JSON_ACTION] = picojson::value(JSON_CALLBACK_SUCCCESS);
-
+ ReportSuccess(obj);
LoggerD("%s", val.serialize().c_str());
Instance::PostMessage(&callback->instance_, val.serialize().c_str());
picojson::value val = picojson::value(picojson::object());
picojson::object& obj = val.get<picojson::object>();
- obj[JSON_CALLBACK_ID] = picojson::value(callbackId);
- obj[JSON_DATA] = picojson::value(picojson::object());
- picojson::object& args = obj[JSON_DATA].get<picojson::object>();
- obj[JSON_ACTION] = picojson::value(JSON_CALLBACK_PROGRESS);
- obj[JSON_CALLBACK_KEEP] = picojson::value(true);
+ obj[JSON_LISTENER_ID] = picojson::value(JSON_ONPROGRESS_CALLBACK);
- args[PARAM_OPERATION_ID] = picojson::value(static_cast<double>(operationId));
- args[PARAM_VALUE] = picojson::value(value);
- args[PARAM_FILENAME] = picojson::value(filename);
+ picojson::object result;
+ result[JSON_ACTION] = picojson::value(JSON_CALLBACK_PROGRESS);
+ result[PARAM_OPERATION_ID] = picojson::value(static_cast<double>(operationId));
+ result[PARAM_VALUE] = picojson::value(value);
+ result[PARAM_FILENAME] = picojson::value(filename);
+ ReportSuccess(picojson::value(result), obj);
LoggerD("%s", val.serialize().c_str());
Instance::PostMessage(&instance_, val.serialize().c_str());
#include "common/picojson.h"
#include "common/logger.h"
+#include "common/tools.h"
#include "archive_manager.h"
#include "archive_utils.h"
#include "zip.h"
#include "archive_instance.h"
-using namespace common;
+using common::tools::ReportError;
+using common::tools::ReportSuccess;
namespace extension {
namespace archive {
picojson::value val = picojson::value(picojson::object());
picojson::object& obj = val.get<picojson::object>();
obj[JSON_CALLBACK_ID] = picojson::value(callback->getCallbackId());
- obj[JSON_DATA] = picojson::value(picojson::object());
-
- picojson::object& args = obj[JSON_DATA].get<picojson::object>();
if (!callback->isError()) {
long handle = ArchiveManager::getInstance().addPrivData(archive_file);
- obj[JSON_ACTION] = picojson::value(JSON_CALLBACK_SUCCCESS);
-
std::string fm_str;
PlatformResult result = fileModeToString(archive_file->getFileMode(), &fm_str);
if (result.error_code() != ErrorCode::NO_ERROR) {
return false;
}
- args[ARCHIVE_FILE_ATTR_MODE] = picojson::value(fm_str);
- args[ARCHIVE_FILE_ATTR_DECOMPRESSED_SIZE] = picojson::value();
- args[ARCHIVE_FILE_HANDLE] = picojson::value(static_cast<double>(handle));
- } else {
- obj[JSON_ACTION] = picojson::value(JSON_CALLBACK_ERROR);
+ picojson::value ret_val = picojson::value(picojson::object());
+ picojson::object& ret = ret_val.get<picojson::object>();
+
+ ret[ARCHIVE_FILE_ATTR_MODE] = picojson::value(fm_str);
+ ret[ARCHIVE_FILE_ATTR_DECOMPRESSED_SIZE] = picojson::value();
+ ret[ARCHIVE_FILE_HANDLE] = picojson::value(static_cast<double>(handle));
- args[ERROR_CALLBACK_CODE] = picojson::value(static_cast<double>(callback->getErrorCode()));
- args[ERROR_CALLBACK_MESSAGE] = picojson::value(callback->getErrorMessage());
+ ReportSuccess(ret_val, obj);
+ } else {
+ PlatformResult ret = PlatformResult(callback->getErrorCode(), callback->getErrorMessage());
+ ReportError(ret, &obj);
}
LoggerD("%s", val.serialize().c_str());
picojson::value val = picojson::value(picojson::object());
picojson::object& obj = val.get<picojson::object>();
obj[JSON_CALLBACK_ID] = picojson::value(callback->getCallbackId());
- obj[JSON_DATA] = picojson::value(picojson::object());
-
- picojson::object& args = obj[JSON_DATA].get<picojson::object>();
if (!callback->isError()) {
LoggerW("The success callback should be not be called in this case");
} else {
- obj[JSON_ACTION] = picojson::value(JSON_CALLBACK_ERROR);
-
- args[ERROR_CALLBACK_CODE] = picojson::value(static_cast<double>(callback->getErrorCode()));
- args[ERROR_CALLBACK_MESSAGE] = picojson::value(callback->getErrorMessage());
+ PlatformResult ret = PlatformResult(callback->getErrorCode(), callback->getErrorMessage());
+ ReportError(ret, &obj);
}
LoggerD("%s", val.serialize().c_str());
obj[JSON_CALLBACK_ID] = picojson::value(callback->getCallbackId());
if (!callback->isError()) {
- obj[JSON_ACTION] = picojson::value(JSON_CALLBACK_SUCCCESS);
- obj[JSON_DATA] = picojson::value(picojson::array());
- picojson::array &arr = obj[JSON_DATA].get<picojson::array>();
+ picojson::value arr_val = picojson::value(picojson::array());
+ picojson::array& arr = arr_val.get<picojson::array>();
ArchiveFileEntryPtrMapPtr entries = callback->getEntries();
for(auto it = entries->begin(); it != entries->end(); it++) {
arr.push_back(val);
}
- } else {
- obj[JSON_ACTION] = picojson::value(JSON_CALLBACK_ERROR);
- obj[JSON_DATA] = picojson::value(picojson::object());
- picojson::object& args = obj[JSON_DATA].get<picojson::object>();
- args[ERROR_CALLBACK_CODE] = picojson::value(static_cast<double>(callback->getErrorCode()));
- args[ERROR_CALLBACK_MESSAGE] = picojson::value(callback->getErrorMessage());
+ ReportSuccess(arr_val, obj);
+ } else {
+ PlatformResult ret = PlatformResult(callback->getErrorCode(), callback->getErrorMessage());
+ ReportError(ret, &obj);
}
LoggerD("%s", val.serialize().c_str());
picojson::value val = picojson::value(picojson::object());
picojson::object& obj = val.get<picojson::object>();
obj[JSON_CALLBACK_ID] = picojson::value(callback->getCallbackId());
- obj[JSON_DATA] = picojson::value(picojson::object());
- picojson::object& args = obj[JSON_DATA].get<picojson::object>();
if (!callback->isError()) {
- obj[JSON_ACTION] = picojson::value(JSON_CALLBACK_SUCCCESS);
-
ArchiveFileEntryPtr ent = callback->getFileEntry();
- args[ARCHIVE_FILE_ENTRY_ATTR_NAME] = picojson::value(ent->getName());
- args[ARCHIVE_FILE_ENTRY_ATTR_SIZE] = picojson::value(
+ picojson::value ret_val = picojson::value(picojson::object());
+ picojson::object& ret = ret_val.get<picojson::object>();
+
+ ret[ARCHIVE_FILE_ENTRY_ATTR_NAME] = picojson::value(ent->getName());
+ ret[ARCHIVE_FILE_ENTRY_ATTR_SIZE] = picojson::value(
static_cast<double>(ent->getSize()));
- args[ARCHIVE_FILE_ENTRY_ATTR_MODIFIED] = picojson::value(
+ ret[ARCHIVE_FILE_ENTRY_ATTR_MODIFIED] = picojson::value(
static_cast<double>(ent->getModified()));
- args[ARCHIVE_FILE_ENTRY_ATTR_COMPRESSED_SIZE] = picojson::value(
+ ret[ARCHIVE_FILE_ENTRY_ATTR_COMPRESSED_SIZE] = picojson::value(
static_cast<double>(ent->getCompressedSize()));
- args[ARCHIVE_FILE_HANDLE] = picojson::value(
+ ret[ARCHIVE_FILE_HANDLE] = picojson::value(
static_cast<double>(callback->getHandle()));
- } else {
- obj[JSON_ACTION] = picojson::value(JSON_CALLBACK_ERROR);
- args[ERROR_CALLBACK_CODE] = picojson::value(static_cast<double>(callback->getErrorCode()));
- args[ERROR_CALLBACK_MESSAGE] = picojson::value(callback->getErrorMessage());
+ ReportSuccess(ret_val, obj);
+ } else {
+ PlatformResult ret = PlatformResult(callback->getErrorCode(), callback->getErrorMessage());
+ ReportError(ret, &obj);
}
LoggerD("%s", val.serialize().c_str());
namespace extension {
namespace archive {
-using namespace common;
+using common::tools::ReportSuccess;
+using common::tools::ReportError;
namespace {
const std::string kPrivilegeFilesystemRead = "http://tizen.org/privilege/filesystem.read";
#define REGISTER_SYNC(c,x) \
RegisterSyncHandler(c, std::bind(&ArchiveInstance::x, this, _1, _2));
#define REGISTER_ASYNC(c,x) \
- RegisterHandler(c, std::bind(&ArchiveInstance::x, this, _1, _2));
+ RegisterSyncHandler(c, std::bind(&ArchiveInstance::x, this, _1, _2));
REGISTER_ASYNC("ArchiveManager_open", Open);
REGISTER_SYNC("ArchiveManager_abort", Abort);
picojson::value val = picojson::value(picojson::object());
picojson::object& obj = val.get<picojson::object>();
obj[JSON_CALLBACK_ID] = picojson::value(callback_id);
- obj[JSON_DATA] = picojson::value(picojson::object());
-
- picojson::object& args = obj[JSON_DATA].get<picojson::object>();
- obj[JSON_ACTION] = picojson::value(JSON_CALLBACK_ERROR);
- args[ERROR_CALLBACK_CODE] = picojson::value(static_cast<double>(e.error_code()));
- args[ERROR_CALLBACK_MESSAGE] = picojson::value(e.message());
+ ReportError(e, &obj);
Instance::PostMessage(this, val.serialize().c_str());
}
CHECK_PRIVILEGE_ACCESS(kPrivilegeFilesystemWrite, &out);
- picojson::object data = args.get(JSON_DATA).get<picojson::object>();
+ picojson::object data = args.get<picojson::object>();
picojson::value v_file = data.at(PARAM_FILE);
picojson::value v_mode = data.at(PARAM_MODE);
picojson::value v_op_id = data.at(PARAM_OPERATION_ID);
LoggerD("Entered");
LoggerD("%s", args.serialize().c_str());
- picojson::object data = args.get(JSON_DATA).get<picojson::object>();
+ picojson::object data = args.get<picojson::object>();
picojson::value v_op_id = data.at(PARAM_OPERATION_ID);
const long op_id = static_cast<long>(v_op_id.get<double>());
CHECK_PRIVILEGE_ACCESS(kPrivilegeFilesystemWrite, &out);
- picojson::object data = args.get(JSON_DATA).get<picojson::object>();
+ picojson::object data = args.get<picojson::object>();
picojson::value v_source = data.at(PARAM_SOURCE_FILE);
picojson::value v_options = data.at(PARAM_OPTIONS);
picojson::value v_op_id = data.at(PARAM_OPERATION_ID);
CHECK_PRIVILEGE_ACCESS(kPrivilegeFilesystemWrite, &out);
- picojson::object data = args.get(JSON_DATA).get<picojson::object>();
+ picojson::object data = args.get<picojson::object>();
picojson::value v_dest_dir = data.at(PARAM_DESTINATION_DIR);
picojson::value v_overwrite = data.at(PARAM_OVERWRITE);
picojson::value v_op_id = data.at(PARAM_OPERATION_ID);
CHECK_PRIVILEGE_ACCESS(kPrivilegeFilesystemRead, &out);
- picojson::object data = args.get(JSON_DATA).get<picojson::object>();
+ picojson::object data = args.get<picojson::object>();
picojson::value v_op_id = data.at(PARAM_OPERATION_ID);
picojson::value v_handle = data.at(ARCHIVE_FILE_HANDLE);
CHECK_PRIVILEGE_ACCESS(kPrivilegeFilesystemRead, &out);
- picojson::object data = args.get(JSON_DATA).get<picojson::object>();
+ picojson::object data = args.get<picojson::object>();
picojson::value v_op_id = data.at(PARAM_OPERATION_ID);
picojson::value v_handle = data.at(ARCHIVE_FILE_HANDLE);
picojson::value v_name = data.at(PARAM_NAME);
LoggerD("Entered");
LoggerD("%s", args.serialize().c_str());
- picojson::object data = args.get(JSON_DATA).get<picojson::object>();
+ picojson::object data = args.get<picojson::object>();
picojson::value v_handle = data.at(ARCHIVE_FILE_HANDLE);
const long handle = static_cast<long>(v_handle.get<double>());
CHECK_PRIVILEGE_ACCESS(kPrivilegeFilesystemWrite, &out);
- picojson::object data = args.get(JSON_DATA).get<picojson::object>();
+ picojson::object data = args.get<picojson::object>();
picojson::value v_dest_dir = data.at(PARAM_DESTINATION_DIR);
picojson::value v_strip_name = data.at(PARAM_STRIP_NAME);
picojson::value v_overwrite = data.at(PARAM_OVERWRITE);
#define ARCHIVE_FUNCTION_API_ARCHIVE_FILE_ENTRY_EXTRACT "extract"
-#define JSON_CMD "cmd"
#define JSON_ACTION "action"
-#define JSON_CALLBACK_ID "cid"
-#define JSON_CALLBACK_SUCCCESS "success"
-#define JSON_CALLBACK_ERROR "error"
-#define JSON_CALLBACK_PROGRESS "progress"
-#define JSON_CALLBACK_KEEP "keep"
-#define JSON_DATA "args"
+#define JSON_CALLBACK_ID "callbackId"
+#define JSON_LISTENER_ID "listenerId"
+#define JSON_CALLBACK_PROGRESS "onprogress"
+#define JSON_ONPROGRESS_CALLBACK "ArchiveOnprogressCallback"
#define PARAM_FILE "file"
#define PARAM_MODE "mode"
#define ARCHIVE_FILE_ENTRY_ATTR_COMPRESSED_SIZE "compressedSize"
#define ARCHIVE_FILE_ENTRY_ATTR_MODIFIED "modified"
-#define ERROR_CALLBACK_CODE "code"
-#define ERROR_CALLBACK_MESSAGE "message"
-
#endif // _ARCHIVE_PLUGIN_DEFS_H_
{
LoggerD("Enter path %s", path.c_str());
std::string tmp_path = path.substr(kVirtualRootsDirectory.length());
- return tmp_path.substr(tmp_path.find(kSlash));
+ return tmp_path.find(kSlash) == std::string::npos ?
+ tmp_path : tmp_path.substr(tmp_path.find(kSlash));
}
} // filesystem