Implement sending stats to server

parent 4a3fae71
......@@ -23,7 +23,7 @@ ProgramRecognizer::~ProgramRecognizer()
{
}
void ProgramRecognizer::recognize(const std::string& pattern)
void ProgramRecognizer::recognize(const std::string& pattern, const std::string& taskID)
{
AppActivity_recognize_JNI(pattern);
AppActivity_recognize_JNI(pattern, taskID);
}
......@@ -27,6 +27,8 @@ private:
SuccessCallback successCallback;
ErrorCallback errorCallback;
std::string statsURL;
public:
static std::shared_ptr<ProgramRecognizer> &getInstance() {
......@@ -44,7 +46,7 @@ public:
ProgramRecognizer();
~ProgramRecognizer();
void recognize(const std::string& pattern);
void recognize(const std::string& pattern, const std::string& taskID);
void emitSuccessCallback(const std::vector<SignPrediction>& predictions) {
if(successCallback)
......@@ -63,6 +65,15 @@ public:
void setErrorCallback(const ErrorCallback &callback) {
errorCallback = callback;
}
void setStatsURL(const std::string& url) {
statsURL = url;
}
const std::string &getStatsURL() {
return statsURL;
}
};
#endif
......@@ -123,17 +123,58 @@ bool js_pm_ProgramRecognizer_recognize(JSContext *cx, uint32_t argc, jsval *vp)
js_proxy_t *proxy = jsb_get_js_proxy(obj);
ProgramRecognizer* cobj = (ProgramRecognizer *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pm_ProgramRecognizer_recognize : Invalid Native Object");
if (argc == 1) {
if (argc == 2) {
std::string arg0;
ok &= jsval_to_std_string(cx, args.get(0), &arg0);
JSB_PRECONDITION2(ok, cx, false, "js_pm_ProgramRecognizer_recognize : Error processing arguments");
cobj->recognize(arg0);
std::string arg1;
ok &= jsval_to_std_string(cx, args.get(1), &arg1);
JSB_PRECONDITION2(ok, cx, false, "js_pm_ProgramRecognizer_recognize : Error processing arguments");
cobj->recognize(arg0, arg1);
args.rval().setUndefined();
return true;
}
JS_ReportError(cx, "js_pm_ProgramRecognizer_recognize : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pm_ProgramRecognizer_setStatsURL(JSContext *cx, uint32_t argc, jsval *vp)
{
JS::CallArgs args = JS::CallArgsFromVp(argc, vp);
bool ok = true;
JS::RootedObject obj(cx, args.thisv().toObjectOrNull());
js_proxy_t *proxy = jsb_get_js_proxy(obj);
ProgramRecognizer* cobj = (ProgramRecognizer *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pm_ProgramRecognizer_setStatsURL : Invalid Native Object");
if (argc == 1) {
std::string arg0;
ok &= jsval_to_std_string(cx, args.get(0), &arg0);
JSB_PRECONDITION2(ok, cx, false, "js_pm_ProgramRecognizer_setStatsURL : Error processing arguments");
cobj->setStatsURL(arg0);
args.rval().setUndefined();
return true;
}
JS_ReportError(cx, "js_pm_ProgramRecognizer_setStatsURL : wrong number of arguments: %d, was expecting %d", argc, 1);
return false;
}
bool js_pm_ProgramRecognizer_getStatsURL(JSContext *cx, uint32_t argc, jsval *vp)
{
JS::CallArgs args = JS::CallArgsFromVp(argc, vp);
JS::RootedObject obj(cx, args.thisv().toObjectOrNull());
js_proxy_t *proxy = jsb_get_js_proxy(obj);
ProgramRecognizer* cobj = (ProgramRecognizer *)(proxy ? proxy->ptr : NULL);
JSB_PRECONDITION2( cobj, cx, false, "js_pm_ProgramRecognizer_getStatsURL : Invalid Native Object");
if (argc == 0) {
const std::string& ret = cobj->getStatsURL();
jsval jsret = JSVAL_NULL;
jsret = std_string_to_jsval(cx, ret);
args.rval().set(jsret);
return true;
}
JS_ReportError(cx, "js_pm_ProgramRecognizer_getStatsURL : wrong number of arguments: %d, was expecting %d", argc, 0);
return false;
}
bool js_pm_ProgramRecognizer_setErrorCallback(JSContext *cx, uint32_t argc, jsval *vp) {
JS::CallArgs args = JS::CallArgsFromVp(argc, vp);
bool ok = true;
......@@ -274,9 +315,11 @@ void js_register_pm_ProgramRecognizer(JSContext *cx, JS::HandleObject global) {
JS_PS_END
};
static JSFunctionSpec funcs[] = {
JS_FN("recognize", js_pm_ProgramRecognizer_recognize, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("recognize", js_pm_ProgramRecognizer_recognize, 2, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("setErrorCallback", js_pm_ProgramRecognizer_setErrorCallback, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("setSuccessCallback", js_pm_ProgramRecognizer_setSuccessCallback, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("setStatsURL", js_pm_ProgramRecognizer_setStatsURL, 1, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FN("getStatsURL", js_pm_ProgramRecognizer_getStatsURL, 0, JSPROP_PERMANENT | JSPROP_ENUMERATE),
JS_FS_END
};
static JSFunctionSpec st_funcs[] = {
......
......@@ -19,6 +19,9 @@ void js_register_pm_ProgramRecognizer(JSContext *cx, JS::HandleObject global);
bool js_pm_ProgramRecognizer_recognize(JSContext *cx, uint32_t argc, jsval *vp);
bool js_pm_ProgramRecognizer_setErrorCallback(JSContext *cx, uint32_t argc, jsval *vp);
bool js_pm_ProgramRecognizer_getInstance(JSContext *cx, uint32_t argc, jsval *vp);
bool js_pm_ProgramRecognizer_setStatsURL(JSContext *cx, uint32_t argc, jsval *vp);
bool js_pm_ProgramRecognizer_getStatsURL(JSContext *cx, uint32_t argc, jsval *vp);
void register_all_recognition(JSContext* cx, JS::HandleObject obj);
#endif /* jsb_pm_btle_manager_h */
......@@ -51,15 +51,21 @@ extern "C" {
Tabulator::getInstance()->recognizeTable(result, width, height);
ProgramRecognizer::getInstance()->emitSuccessCallback(result);
}
JNIEXPORT jstring JNICALL Java_ru_niisi_Piktomir_ProgramRecognizer_getStatsURL(JNIEnv *env, jobject thiz, jstring error, jstring data) {
auto url = ProgramRecognizer::getInstance()->getStatsURL();
return env->NewStringUTF(url.c_str());
}
}
void AppActivity_recognize_JNI(const std::string& pattern) {
void AppActivity_recognize_JNI(const std::string& pattern, const std::string& taskID) {
JniMethodInfo methodInfo;
if (JniHelper::getStaticMethodInfo(methodInfo, appActivityClassName.c_str(), "recognize", "(Ljava/lang/String;)V"))
if (JniHelper::getStaticMethodInfo(methodInfo, appActivityClassName.c_str(), "recognize", "(Ljava/lang/String;Ljava/lang/String;)V"))
{
jstring stringArg1 = methodInfo.env->NewStringUTF(pattern.c_str());
jstring stringArg2 = methodInfo.env->NewStringUTF(taskID.c_str());
methodInfo.env->CallStaticVoidMethod(methodInfo.classID, methodInfo.methodID, stringArg1);
methodInfo.env->CallStaticVoidMethod(methodInfo.classID, methodInfo.methodID, stringArg1, stringArg2);
methodInfo.env->DeleteLocalRef(methodInfo.classID);
}
}
......@@ -8,6 +8,6 @@
#include <jni.h>
#include <string>
void AppActivity_recognize_JNI(const std::string& pattern);
void AppActivity_recognize_JNI(const std::string& pattern, const std::string& taskID);
#endif //RU_NIISI_PIKTOMIR_PROGRAMRECOGNIZER_H
......@@ -188,6 +188,8 @@ dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation project(':libcocos2dx')
implementation 'com.android.billingclient:billing:1.1'
implementation 'com.fasterxml.jackson.core:jackson-core:2.9.8'
implementation 'com.fasterxml.jackson.core:jackson-databind:2.9.8'
implementation 'com.android.support:support-v4:27.+'
implementation 'org.tensorflow:tensorflow-android:+'
}
......@@ -24,6 +24,17 @@
*;
}
-keep public class **.RecognizeResult {
*;
}
-keep class com.fasterxml.jackson.databind.ObjectMapper {
public <methods>;
protected <methods>;
}
-keepnames class com.fasterxml.jackson.** { *; }
-dontwarn com.fasterxml.jackson.databind.**
# Proguard Cocos2d-x for release
-keep public class org.cocos2dx.** { *; }
-dontwarn org.cocos2dx.**
......
......@@ -61,10 +61,9 @@ public class AppActivity extends Cocos2dxActivity {
private static final int PERMISSION_REQUEST_CAMERA = 3;
private static Context context;
static final int REQUEST_IMAGE_CAPTURE = 1;
private static String recognizePattern;
private static String currentPhotoPath;
private static String recognizeTaskID;
@Override
public Cocos2dxGLSurfaceView onCreateView() {
......@@ -193,14 +192,16 @@ public class AppActivity extends Cocos2dxActivity {
Intent i = new Intent(AppActivity.getAppContext(), RecognizeActivity.class);
i.putExtra("RECOGNIZE_PATTERN", recognizePattern);
i.putExtra("RECOGNIZE_TASKID", recognizeTaskID);
Activity act = (Activity)getContext();
act.startActivity(i);
}
public static void recognize(String pattern)
public static void recognize(String pattern, String taskID)
{
recognizePattern = pattern;
recognizeTaskID = taskID;
if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) {
AppActivity.requestCameraPermission();
......
......@@ -4,8 +4,7 @@ import android.graphics.Bitmap;
import java.io.ByteArrayOutputStream;
import java.util.LinkedList;
import java.util.List;
import android.graphics.RectF;
import com.fasterxml.jackson.databind.ObjectMapper;
import android.util.Log;
import org.cocos2dx.javascript.AppActivity;
......@@ -19,7 +18,7 @@ public class ProgramRecognizer {
private static TFObjectDetection objectDetection;
public static void recognize(Bitmap image, String pattern)
public static void recognize(Bitmap image, String pattern, final String taskID)
{
try{
......@@ -49,15 +48,9 @@ public class ProgramRecognizer {
List<SignPrediction> mappedRecognitions = new LinkedList<>();
// StringBuilder recStr = new StringBuilder("Recognized signs: \n");
for (SignPrediction result : results) {
// final RectF location = result.getLocation();
if (result.getConfidence() >= MINIMUM_CONFIDENCE) {
mappedRecognitions.add(result);
//
// recStr.append(result.getLabel());
// recStr.append( "\n");
}
}
......@@ -69,6 +62,15 @@ public class ProgramRecognizer {
resizedImage.getHeight()
);
ObjectMapper mapper = new ObjectMapper();
try {
String jsonString = mapper.writeValueAsString(new RecognizeResult(mappedRecognitions));
sendStats(resizedImage, jsonString, taskID);
}
catch (Exception e) {
Log.v("Piktomir", "Could not generate json to send stats: " + e.getLocalizedMessage());
}
}
}).start();
}
......@@ -79,6 +81,44 @@ public class ProgramRecognizer {
}
}
public static void sendStats(Bitmap image, String result, String taskID)
{
try {
String charset = "UTF-8";
String requestURL = getStatsURL();
MultipartUtility multipart = new MultipartUtility(requestURL, charset);
multipart.addFilePart("processed_result", "processed_result.json", result.getBytes(Charset.forName("UTF-8")));
multipart.addFormField("task_id", taskID);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
byte[] byteArray = stream.toByteArray();
multipart.addFilePart("image", "image.jpg", byteArray);
List<String> response = multipart.finish();
Log.v("Piktomir", "Put stats on ml server result:");
String fullResp = "";
for (String line : response) {
fullResp += line + "\n";
}
Log.v("Piktomir", fullResp);
} catch (Exception e) {
e.printStackTrace();
Log.v("Piktomir", "Put stats on ml server error: " + e.getLocalizedMessage());
}
}
public static native String getStatsURL();
public static native void emitErrorCallback(String error);
public static native void emitSuccessCallback(SignPrediction []predictions, float width, float height);
}
......@@ -50,8 +50,9 @@ public class RecognizeActivity extends Activity {
Intent intent = getIntent();
String recognizePattern = intent.getStringExtra("RECOGNIZE_PATTERN");
String recognizeTaskID= intent.getStringExtra("RECOGNIZE_TASKID");
ProgramRecognizer.recognize(mutableBitmap, recognizePattern);
ProgramRecognizer.recognize(mutableBitmap, recognizePattern, recognizeTaskID);
finish();
}
......
package ru.niisi.Piktomir;
import android.support.annotation.NonNull;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import java.util.LinkedList;
import java.util.List;
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY)
public class RecognizeResult {
private int width;
private int height;
private String[] signs;
RecognizeResult(@NonNull List<SignPrediction> predictions) {
width = predictions.size();
height = 1;
List<String> result = new LinkedList<>();
for(SignPrediction prediction: predictions)
result.add(prediction.getLabel());
signs = result.toArray(new String[0]);
}
}
......@@ -28,15 +28,17 @@ ProgramRecognizer::~ProgramRecognizer()
{
}
void ProgramRecognizer::recognize(const std::string& pattern)
void ProgramRecognizer::recognize(const std::string& pattern, const std::string& taskID)
{
NSString *nsPattern = [NSString stringWithCString:pattern.c_str() encoding:[NSString defaultCStringEncoding]];
NSString *nsTaskID = [NSString stringWithCString:taskID.c_str() encoding:[NSString defaultCStringEncoding]];
NSString * storyboardName = @"PMStoryboard";
UIStoryboard *storyboard = [UIStoryboard storyboardWithName:storyboardName bundle: nil];
RecognizeViewController * vc = [storyboard instantiateViewControllerWithIdentifier:@"RecognizeViewController"];
vc.pattern = nsPattern;
vc.taskID = nsTaskID;
[[UIApplication sharedApplication].keyWindow.rootViewController presentViewController: vc animated:YES completion:NULL];
}
......@@ -17,7 +17,7 @@
+ (RecognizeProcessor *)sharedInstance;
+ (void) recognizeErrorCallback: (NSString *)error;
- (void) recognizeBuffer:(CMSampleBufferRef) imageSampleBuffer pattern: (NSString *)pattern;
- (void) recognizeBuffer:(CMSampleBufferRef) imageSampleBuffer pattern: (NSString *)pattern taskID: (NSString *)taskID;
@end
#endif /* RecognizeProcessor_h */
......@@ -36,7 +36,7 @@
</dict>
</array>
<key>CFBundleVersion</key>
<string>0</string>
<string>7</string>
<key>ITSAppUsesNonExemptEncryption</key>
<false/>
<key>LSRequiresIPhoneOS</key>
......
......@@ -37,14 +37,14 @@ API_AVAILABLE(ios(11.0))
return self;
}
- (void) recognizeBuffer:(CMSampleBufferRef) imageSampleBuffer pattern: (NSString *)pattern {
- (void) recognizeBuffer:(CMSampleBufferRef) imageSampleBuffer pattern: (NSString *)pattern taskID: (NSString *)taskID {
NSString *ver = [[UIDevice currentDevice] systemVersion];
float version = [ver floatValue];
if(version >= 11.0)
{
[self localRecognize: imageSampleBuffer];
[self localRecognize: imageSampleBuffer taskID: taskID];
}
else
{
......@@ -67,7 +67,7 @@ API_AVAILABLE(ios(11.0))
}
}
- (void) localRecognize: (CMSampleBufferRef) imageSampleBuffer {
- (void) localRecognize: (CMSampleBufferRef) imageSampleBuffer taskID: (NSString *)taskID{
if (@available(iOS 11.0, *)) {
if(!self.model)
......@@ -89,17 +89,19 @@ API_AVAILABLE(ios(11.0))
VNCoreMLRequest *req = [[VNCoreMLRequest alloc] initWithModel: self.model completionHandler: (VNRequestCompletionHandler) ^(VNRequest *request, NSError *error){
dispatch_async(dispatch_get_main_queue(), ^{
[self processMLRecognize: request.results withLabels: self.labels imageSize: image.size];
NSDictionary * result = [self processMLRecognize: request.results withLabels: self.labels imageSize: image.size];
[self sendStats: image withResult: result taskID: taskID];
CFRelease(ciImage);
[image release];
// [imageData release];
});
}];
// req.imageCropAndScaleOption = VNImageCropAndScaleOptionScaleFill;
[handler performRequests:@[req] error: &error];
CFRelease(ciImage);
[image release];
// [imageData release];
});
}
else {
......@@ -130,7 +132,7 @@ API_AVAILABLE(ios(11.0))
}
}
- (void) processMLRecognize: (NSArray *) results withLabels: (NSArray<NSString *> *) labels imageSize: (CGSize) imageSize {
- (NSDictionary *) processMLRecognize: (NSArray *) results withLabels: (NSArray<NSString *> *) labels imageSize: (CGSize) imageSize {
if (@available(iOS 11.0, *)) {
......@@ -225,11 +227,29 @@ API_AVAILABLE(ios(11.0))
Tabulator::getInstance()->recognizeTable(keptPredictions, imageSize.width, imageSize.height);
ProgramRecognizer::getInstance()->emitSuccessCallback(keptPredictions);
NSMutableArray *predicitions = [[NSMutableArray alloc] init];
for (auto &predction: keptPredictions) {
NSString * label = [NSString stringWithCString:predction.label.c_str() encoding:[NSString defaultCStringEncoding]];
[predicitions addObject: label];
}
NSDictionary *result = @{
@"munich_mode": @true,
@"width": [NSNumber numberWithUnsignedLong:keptPredictions.size()],
@"height": @1,
@"signs": @[predicitions]
};
return result;
}
}
else {
ProgramRecognizer::getInstance()->emitErrorCallback("Do no support local recognize on iOs version less 11.0");
}
return nil;
}
- (float) IoU: (CGRect) a withOther: (CGRect) b {
......@@ -239,11 +259,84 @@ API_AVAILABLE(ios(11.0))
return ((float) intersection.size.width * intersection.size.height) / ((float) unionRect.size.width * unionRect.size.height);
}
- (void) sendStats: (UIImage *)image withResult: (NSDictionary *) result taskID: (NSString *)taskID
{
if(image == nil || result == nil)
return;
auto serverURL = ProgramRecognizer::getInstance()->getStatsURL();
NSString *url = [NSString stringWithCString:serverURL.c_str() encoding:[NSString defaultCStringEncoding]];
NSMutableURLRequest *request = [[NSMutableURLRequest alloc] initWithURL:[NSURL URLWithString: url]];
NSData *imageData = UIImageJPEGRepresentation(image, 0.75);
NSError *error;
NSData *jsonData = [NSJSONSerialization dataWithJSONObject:result
options:NSJSONWritingPrettyPrinted
error:&error];
if (!jsonData) {
NSLog(@"Error getting json from recognition result: %@", error);
return;
}
[request setCachePolicy:NSURLRequestReloadIgnoringLocalCacheData];
[request setHTTPShouldHandleCookies:NO];
[request setTimeoutInterval:60];
[request setHTTPMethod:@"POST"];
NSString *boundary = [[NSUUID UUID] UUIDString];
// set Content-Type in HTTP header
NSString *contentType = [NSString stringWithFormat:@"multipart/form-data; boundary=%@", boundary];
[request setValue:contentType forHTTPHeaderField: @"Content-Type"];
// post body
NSMutableData *body = [NSMutableData data];
//Result json
[body appendData:[[NSString stringWithFormat:@"--%@\r\n", boundary] dataUsingEncoding:NSUTF8StringEncoding]];
[body appendData:[[NSString stringWithFormat:@"Content-Disposition: form-data; name=%@; filename=processed_result.json\r\n", @"processed_result"] dataUsingEncoding:NSUTF8StringEncoding]];
[body appendData:[@"Content-Type: application/json\r\n\r\n" dataUsingEncoding:NSUTF8StringEncoding]];
[body appendData:jsonData];
[body appendData:[[NSString stringWithFormat:@"\r\n"] dataUsingEncoding:NSUTF8StringEncoding]];
// Task id
[body appendData:[[NSString stringWithFormat:@"--%@\r\n", boundary] dataUsingEncoding:NSUTF8StringEncoding]];
[body appendData:[@"Content-Disposition: form-data; name=task_id\r\n\r\n" dataUsingEncoding:NSUTF8StringEncoding]];
[body appendData:[taskID dataUsingEncoding:NSUTF8StringEncoding]];
[body appendData:[[NSString stringWithFormat:@"\r\n"] dataUsingEncoding:NSUTF8StringEncoding]];
//Image
[body appendData:[[NSString stringWithFormat:@"--%@\r\n", boundary] dataUsingEncoding:NSUTF8StringEncoding]];
[body appendData:[[NSString stringWithFormat:@"Content-Disposition: form-data; name=%@; filename=image.jpg\r\n", @"image"] dataUsingEncoding:NSUTF8StringEncoding]];
[body appendData:[@"Content-Type: image/jpeg\r\n\r\n" dataUsingEncoding:NSUTF8StringEncoding]];
[body appendData:imageData];
[body appendData:[[NSString stringWithFormat:@"\r\n"] dataUsingEncoding:NSUTF8StringEncoding]];
[body appendData:[[NSString stringWithFormat:@"--%@--\r\n", boundary] dataUsingEncoding:NSUTF8StringEncoding]];
// setting the body of the post to the reqeust
[request setHTTPBody:body];
// set the content-length
NSString *postLength = [NSString stringWithFormat:@"%lu", [body length]];
[request setValue:postLength forHTTPHeaderField:@"Content-Length"];
NSURLSession *session = [NSURLSession sharedSession]; // use sharedSession or create your own
NSURLSessionTask *task = [session uploadTaskWithRequest:request fromData:body completionHandler:^(NSData *data, NSURLResponse *response, NSError *error) {
if (error)
NSLog(@"Error sending stats: %@", error);
}];
[task resume];
}
+ (void) recognizeErrorCallback: (NSString *)error
{
if(error != nil)
{
NSLog(@"error = %@", error);
NSLog(@"Recognize error: %@", error);
ProgramRecognizer::getInstance()->emitErrorCallback([error UTF8String]);
}
}
......
......@@ -11,6 +11,7 @@
@interface RecognizeViewController: UIViewController<AVCapturePhotoCaptureDelegate>
@property (strong, nonatomic) NSString *pattern;
@property (strong, nonatomic) NSString *taskID;
@end
#endif /* RecognizeViewController_h */
......@@ -57,7 +57,7 @@ struct Prediction {
[[self sessionOutput] captureStillImageAsynchronouslyFromConnection: videoConnection
completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
[[RecognizeProcessor sharedInstance] recognizeBuffer: imageSampleBuffer pattern: self.pattern];
[[RecognizeProcessor sharedInstance] recognizeBuffer: imageSampleBuffer pattern: self.pattern taskID: self.taskID];
[self exit];
}];
}
......
......@@ -75,7 +75,8 @@ pm.btleManager = pm.BTLEManager ? pm.BTLEManager.getInstance() : {
pm.programRecognizer = pm.ProgramRecognizer ? pm.ProgramRecognizer.getInstance() : {
setSuccessCallback: function(callback) {},
setErrorCallback: function(callback) {},
recognize: function(data) {}
recognize: function(data, taskID) {},
setStatsURL: function(url) {}
};
pm.broadcastServer = pm.BroadcastServer ? pm.BroadcastServer.getInstance() : {};
......
......@@ -129,6 +129,8 @@ var LoadLayer = cc.Layer.extend(/** @extends LoadLayer# */{
{
if(pm.settings.getPhysicalRobotType() === pm.PhysicalRobotType.BLE)
pm.btleManager.init();
pm.programRecognizer.setStatsURL(pm.appConfig.mlStatsURL);
}
cc.director.runScene(new StartMenuScene());
......
......@@ -828,7 +828,9 @@ var ProgramLayer = ccui.Layout.extend(/** @lends ProgramLayer# */{
}
}
pm.programRecognizer.recognize(JSON.stringify(sendData));
var taskID = "{0}_{1}_{2}".format(world.id, pm.settings.getGame(), pm.settings.getGame());
pm.programRecognizer.recognize(JSON.stringify(sendData), taskID);
pm.programRecognizer.setSuccessCallback(this._onProgramRecognize.bind(this));
pm.programRecognizer.setErrorCallback(this._onRecognizeError.bind(this));
},
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment