//ios/KeyWordRNBridge.m

#import "KeyWordRNBridge.h"
#import <AVFoundation/AVFoundation.h>
#import <Speech/Speech.h>
#import <React/RCTBridge.h>
#import <React/RCTLog.h>
#import <React/RCTEventEmitter.h>
//#import "KeyWordsDetection.h" // Import your KeyWordsDetection library header
// Speaker verification bridge (Swift) is called dynamically via objc_msgSend
#import <objc/message.h>


// Ensure the protocol is correctly imported or declared
// Assuming the protocol is named 'KeywordDetectionRNDelegate'
@interface KeyWordsDetectionWrapper : NSObject <KeywordDetectionRNDelegate>

@property (nonatomic, strong) KeyWordsDetection *keyWordsDetection;
@property (nonatomic, strong) NSString *instanceId;
@property (nonatomic, weak) KeyWordRNBridge *bridge;

- (instancetype)initWithInstanceId:(NSString *)instanceId
                         modelName:(NSString *)modelName
                         threshold:(float)threshold
                         bufferCnt:(NSInteger)bufferCnt
                            bridge:(KeyWordRNBridge *)bridge
                             error:(NSError **)error;

- (instancetype)initWithInstanceId:(NSString *)instanceId
                        modelNames:(NSArray<NSString *> *)modelNames
                        thresholds:(NSArray<NSNumber *> *)thresholds
                        bufferCnts:(NSArray<NSNumber *> *)bufferCnts
                 msBetweenCallback:(NSArray<NSNumber *> *)msBetweenCallback
                            bridge:(KeyWordRNBridge *)bridge
                             error:(NSError **)error;

@end

@implementation KeyWordsDetectionWrapper

- (instancetype)initWithInstanceId:(NSString *)instanceId
                         modelName:(NSString *)modelName
                         threshold:(float)threshold
                         bufferCnt:(NSInteger)bufferCnt
                            bridge:(KeyWordRNBridge *)bridge
                             error:(NSError **)error
{
    if (self = [super init]) {
        _instanceId = instanceId;
        _bridge = bridge;
        _keyWordsDetection = [[KeyWordsDetection alloc] initWithModelPath:modelName threshold:threshold bufferCnt:bufferCnt error:error];
        if (*error) {
            return nil;
        }
        _keyWordsDetection.delegate = self;
    }
    return self;
}

- (instancetype)initWithInstanceId:(NSString *)instanceId
                        modelNames:(NSArray<NSString *> *)modelNames
                        thresholds:(NSArray<NSNumber *> *)thresholds
                        bufferCnts:(NSArray<NSNumber *> *)bufferCnts
                 msBetweenCallback:(NSArray<NSNumber *> *)msBetweenCallback
                            bridge:(KeyWordRNBridge *)bridge
                             error:(NSError **)error {
    if (self = [super init]) {
        _instanceId = instanceId;
        _bridge = bridge;

        NSMutableArray<NSNumber *> *floatThresholds = [NSMutableArray array];
        for (NSNumber *num in thresholds) {
            [floatThresholds addObject:@(num.floatValue)];
        }

        _keyWordsDetection = [[KeyWordsDetection alloc] initWithModelPaths:modelNames
                                                                thresholds:floatThresholds
                                                                bufferCnts:bufferCnts
                                                         msBetweenCallback:msBetweenCallback
                                                                      error:error];
        if (*error) return nil;
        _keyWordsDetection.delegate = self;
    }
    return self;
}

// Implement the delegate method
- (void)KeywordDetectionDidDetectEvent:(NSDictionary *)eventInfo {
    NSMutableDictionary *mutableEventInfo = [eventInfo mutableCopy];
    mutableEventInfo[@"instanceId"] = self.instanceId;
    [_bridge sendEventWithName:@"onKeywordDetectionEvent" body:mutableEventInfo];
}

@end

 
// ============================================================
// MARK: - Speaker Verification: native holder (opaque Swift object)
// ============================================================

@interface SVVerifierHolder : NSObject
@property (nonatomic, strong) id engine;         // opaque Swift object
@property (nonatomic, strong) NSString *engineId;
@end

@implementation SVVerifierHolder
@end

// ============================================================
// MARK: - Speaker Verification Mic Controller holder + delegate proxy
// ============================================================

@interface SVMicHolder : NSObject
@property (nonatomic, strong) id controller;      // opaque Swift object (SpeakerVerificationMicController)
@property (nonatomic, strong) NSString *controllerId;
@property (nonatomic, strong) id delegateProxy;   // ObjC proxy that receives Swift callbacks
@end
@implementation SVMicHolder @end

@interface SVMicDelegateProxy : NSObject
@property (nonatomic, weak) KeyWordRNBridge *bridge;
@property (nonatomic, strong) NSString *controllerId;
@end

@implementation SVMicDelegateProxy
- (void)svOnboardingProgress:(NSDictionary *)info {
    NSMutableDictionary *m = info ? [info mutableCopy] : [NSMutableDictionary new];
    m[@"controllerId"] = self.controllerId ?: @"";
    [self.bridge sendEventWithName:@"onSpeakerVerificationOnboardingProgress" body:m];
}
- (void)svOnboardingDone:(NSDictionary *)info {
    NSMutableDictionary *m = info ? [info mutableCopy] : [NSMutableDictionary new];
    m[@"controllerId"] = self.controllerId ?: @"";
    [self.bridge sendEventWithName:@"onSpeakerVerificationOnboardingDone" body:m];
}
- (void)svVerifyResult:(NSDictionary *)info {
    NSMutableDictionary *m = info ? [info mutableCopy] : [NSMutableDictionary new];
    m[@"controllerId"] = self.controllerId ?: @"";
    [self.bridge sendEventWithName:@"onSpeakerVerificationVerifyResult" body:m];
}
- (void)svError:(NSDictionary *)info {
    NSMutableDictionary *m = info ? [info mutableCopy] : [NSMutableDictionary new];
    m[@"controllerId"] = self.controllerId ?: @"";
    [self.bridge sendEventWithName:@"onSpeakerVerificationError" body:m];
}
@end


// Resolve a file path from:
//  1) absolute path (exists)
//  2) main bundle resource "name.ext"
//  3) bundle scan by lastPathComponent (RN sometimes hashes folders)
//  4) if found in bundle but not a stable path, copy to tmp and return tmp path
static NSString * _Nullable SVResolveFilePath(NSString *input) {
    if (!input || input.length == 0) return nil;

    // Absolute path?
    if ([input hasPrefix:@"/"] && [[NSFileManager defaultManager] fileExistsAtPath:input]) {
        return input;
    }

    NSString *fileName = [input lastPathComponent];
    NSString *base = [fileName stringByDeletingPathExtension];
    NSString *ext  = [fileName pathExtension];

    // Direct bundle lookup
    NSString *p = [[NSBundle mainBundle] pathForResource:base ofType:ext.length ? ext : nil];
    if (p && [[NSFileManager defaultManager] fileExistsAtPath:p]) {
        return p;
    }

    // Scan bundle for lastPathComponent (covers RN "assets/..." or nested resources)
    NSArray<NSString *> *candidatesExt = ext.length ? @[ext] : @[@"onnx", @"json", @"wav"];
    for (NSString *e in candidatesExt) {
        NSArray<NSString *> *paths = [[NSBundle mainBundle] pathsForResourcesOfType:e inDirectory:nil];
        for (NSString *pp in paths) {
            if ([[pp lastPathComponent] isEqualToString:fileName]) {
                return pp;
            }
        }
    }

    return nil;
}

static NSString * _Nullable SVCopyToTempIfNeeded(NSString *path, NSString *preferredName) {
    if (!path) return nil;
    if ([[NSFileManager defaultManager] fileExistsAtPath:path]) {
        // Bundle paths are valid; ORT needs a file path, bundle path is fine.
        // But keep copy logic for safety (some resources could be non-file URLs in edge cases).
        return path;
    }

    // Fallback: try to copy from bundle URL
    NSURL *url = [[NSBundle mainBundle] URLForResource:[preferredName stringByDeletingPathExtension]
                                         withExtension:[preferredName pathExtension]];
    if (!url) return nil;

    NSString *tmp = [NSTemporaryDirectory() stringByAppendingPathComponent:preferredName];
    [[NSFileManager defaultManager] removeItemAtPath:tmp error:nil];
    NSError *err = nil;
    BOOL ok = [[NSFileManager defaultManager] copyItemAtURL:url toURL:[NSURL fileURLWithPath:tmp] error:&err];
    if (!ok || err) return nil;
    return tmp;
}

static NSDictionary *SVErrDict(NSString *code, NSString *msg) {
    return @{ @"code": code ?: @"Error", @"message": msg ?: @"Unknown error" };
}

// Dynamic Swift bridge:
// Expect a Swift class annotated: @objc(SpeakerVerificationRNFacade)
// with ObjC-visible selectors:
//  + (id)createEngineWithModelPath:(NSString*)modelPath enrollmentJsonPath:(NSString*)jsonPath options:(NSDictionary*)options error:(NSError**)error;
//  + (NSDictionary*)verifyWavWithEngine:(id)engine wavPath:(NSString*)wavPath reset:(BOOL)reset error:(NSError**)error;
static Class SVFacadeClass(void) {
    return NSClassFromString(@"SpeakerVerificationRNFacade");
}

// ============================================================
// MARK: - Speaker Verification Mic Config JSON resolver (modelPath)
// ============================================================

// Resolve "modelPath" inside mic controller config JSON (bundle name -> absolute file path)
// so Swift/ORT always receives a real filesystem path.
static NSString * _Nullable SVResolveMicConfigJson(NSString *configJson, NSError **error) {
    if (!configJson || configJson.length == 0) {
        NSLog(@"[SV][ObjC] SVResolveMicConfigJson: empty configJson");
        return configJson;
    }

    NSData *data = [configJson dataUsingEncoding:NSUTF8StringEncoding];
    if (!data) {
        NSLog(@"[SV][ObjC] SVResolveMicConfigJson: failed to make NSData from JSON string (passing through)");
        return configJson;
    }

    NSError *jsonErr = nil;
    id obj = [NSJSONSerialization JSONObjectWithData:data options:0 error:&jsonErr];
    if (jsonErr || ![obj isKindOfClass:[NSDictionary class]]) {
        NSLog(@"[SV][ObjC] SVResolveMicConfigJson: JSON parse failed (passing through). err=%@ json=%@", jsonErr, configJson);
        return configJson;
    }

    NSMutableDictionary *cfg = [(NSDictionary *)obj mutableCopy];
    id mp = cfg[@"modelPath"];
    if (![mp isKindOfClass:[NSString class]] || ((NSString *)mp).length == 0) {
        NSLog(@"[SV][ObjC] SVResolveMicConfigJson: missing/invalid modelPath (passing through). keys=%@", cfg.allKeys);
        return configJson;
    }

    NSString *modelPathIn = (NSString *)mp;
    NSLog(@"[SV][ObjC] SVResolveMicConfigJson: modelPath(in)='%@'", modelPathIn);

    // If already absolute and exists: keep
    if ([modelPathIn hasPrefix:@"/"] && [[NSFileManager defaultManager] fileExistsAtPath:modelPathIn]) {
        NSLog(@"[SV][ObjC] SVResolveMicConfigJson: modelPath absolute & exists ✅");
        return configJson;
    }

    // Resolve using the SAME logic as createSpeakerVerifier
    NSString *resolved = SVResolveFilePath(modelPathIn);
    if (!resolved) {
        NSArray<NSString *> *onnx = [[NSBundle mainBundle] pathsForResourcesOfType:@"onnx" inDirectory:nil];
        NSLog(@"[SV][ObjC] SVResolveMicConfigJson: ❌ cannot resolve modelPath='%@' in bundle. onnxCount=%lu sample=%@",
              modelPathIn, (unsigned long)onnx.count, onnx.count ? onnx.firstObject : @"(none)");
        if (error) {
            *error = [NSError errorWithDomain:@"SV" code:420 userInfo:@{
                NSLocalizedDescriptionKey: [NSString stringWithFormat:@"Mic config model not found in app bundle: %@", modelPathIn]
            }];
        }
        return nil;
    }

    NSString *stable = SVCopyToTempIfNeeded(resolved, [modelPathIn lastPathComponent]) ?: resolved;
    cfg[@"modelPath"] = stable;

    NSData *outData = [NSJSONSerialization dataWithJSONObject:cfg options:0 error:&jsonErr];
    if (jsonErr || !outData) {
        NSLog(@"[SV][ObjC] SVResolveMicConfigJson: re-encode failed (passing original). err=%@", jsonErr);
        return configJson;
    }

    NSString *outJson = [[NSString alloc] initWithData:outData encoding:NSUTF8StringEncoding];
    NSLog(@"[SV][ObjC] SVResolveMicConfigJson: modelPath(resolved)='%@'", stable);
    NSLog(@"[SV][ObjC] SVResolveMicConfigJson: json(out)=%@", outJson);
    return outJson;
}


// =========================
// MARK: - Mic controller dynamic bridge
// =========================

static id _Nullable SVCreateMicController(NSString *configJson, NSError **error) {
    NSLog(@"[SV][ObjC] SVCreateMicController: in.jsonLen=%lu json=%@",
          (unsigned long)(configJson ? configJson.length : 0),
          configJson ?: @"(null)");

    // Resolve modelPath inside JSON BEFORE calling Swift
    NSError *resolveErr = nil;
    NSString *fixedJson = SVResolveMicConfigJson(configJson, &resolveErr);
    if (resolveErr || !fixedJson) {
        if (error) *error = resolveErr;
        NSLog(@"[SV][ObjC] SVCreateMicController: ❌ SVResolveMicConfigJson failed: %@", resolveErr.localizedDescription);
        return nil;
    }
    if (![fixedJson isEqualToString:configJson]) {
        NSLog(@"[SV][ObjC] SVCreateMicController: using RESOLVED jsonLen=%lu", (unsigned long)fixedJson.length);
    } else {
        NSLog(@"[SV][ObjC] SVCreateMicController: json unchanged (no modelPath fix needed)");
    }


    Class c = SVFacadeClass();
    if (!c) {
        if (error) *error = [NSError errorWithDomain:@"SV" code:10 userInfo:@{NSLocalizedDescriptionKey: @"Swift class SpeakerVerificationRNFacade not found"}];
        return nil;
    }
    SEL sel = NSSelectorFromString(@"createMicControllerWithConfigJson:error:");
    if (![c respondsToSelector:sel]) {
        if (error) *error = [NSError errorWithDomain:@"SV" code:11 userInfo:@{NSLocalizedDescriptionKey: @"Missing selector createMicControllerWithConfigJson:error:"}];
        return nil;
    }
    id (*msgSend)(id, SEL, NSString*, NSError**) = (void*)objc_msgSend;
    id out = msgSend(c, sel, fixedJson, error);
    if (*error) {
        NSLog(@"[SV][ObjC] SVCreateMicController: ❌ Swift createMicController failed: %@", (*error).localizedDescription);
    } else {
        NSLog(@"[SV][ObjC] SVCreateMicController: ✅ created controller=%@", out);
    }
    return out;
}

static BOOL SVCallBool2(id obj, SEL sel, NSString *s1, NSInteger i1, BOOL b1, NSError **error) {
    BOOL (*msgSend)(id, SEL, NSString*, NSInteger, BOOL, NSError**) = (void*)objc_msgSend;
    return msgSend(obj, sel, s1, i1, b1, error);
}
static BOOL SVCallBool1(id obj, SEL sel, NSError **error) {
    BOOL (*msgSend)(id, SEL, NSError**) = (void*)objc_msgSend;
    return msgSend(obj, sel, error);
}
static BOOL SVCallBoolReset(id obj, SEL sel, BOOL b1, NSError **error) {
    BOOL (*msgSend)(id, SEL, BOOL, NSError**) = (void*)objc_msgSend;
    return msgSend(obj, sel, b1, error);
}
static BOOL SVCallBoolHopStop(id obj, SEL sel, NSNumber *hopSeconds, BOOL stopOnMatch, NSError **error) {
    BOOL (*msgSend)(id, SEL, NSNumber*, BOOL, NSError**) = (void*)objc_msgSend;
    return msgSend(obj, sel, hopSeconds, stopOnMatch, error);
}

static void SVSetDelegate(id obj, id delegateObj) {
    void (*msgSend)(id, SEL, id) = (void*)objc_msgSend;
    msgSend(obj, NSSelectorFromString(@"setDelegate:"), delegateObj);
}


static id _Nullable SVCreateEngine(NSString *modelPath, NSString *jsonPath, NSDictionary *options, NSError **error) {
    Class c = SVFacadeClass();
    if (!c) {
        if (error) *error = [NSError errorWithDomain:@"SV" code:1 userInfo:@{NSLocalizedDescriptionKey: @"Swift class SpeakerVerificationRNFacade not found (did you add it?)"}];
        return nil;
    }
    SEL sel = NSSelectorFromString(@"createEngineWithModelPath:enrollmentJsonPath:options:error:");
    if (![c respondsToSelector:sel]) {
        if (error) *error = [NSError errorWithDomain:@"SV" code:2 userInfo:@{NSLocalizedDescriptionKey: @"Missing selector createEngineWithModelPath:enrollmentJsonPath:options:error:"}];
        return nil;
    }
    id (*msgSend)(id, SEL, NSString*, NSString*, NSDictionary*, NSError**) = (void*)objc_msgSend;
    return msgSend(c, sel, modelPath, jsonPath, options ?: @{}, error);
}

static NSDictionary * _Nullable SVVerifyWav(id engine, NSString *wavPath, BOOL reset, NSError **error) {
    Class c = SVFacadeClass();
    if (!c) {
        if (error) *error = [NSError errorWithDomain:@"SV" code:3 userInfo:@{NSLocalizedDescriptionKey: @"Swift class SpeakerVerificationRNFacade not found"}];
        return nil;
    }
    SEL sel = NSSelectorFromString(@"verifyWavWithEngine:wavPath:reset:error:");
    if (![c respondsToSelector:sel]) {
        if (error) *error = [NSError errorWithDomain:@"SV" code:4 userInfo:@{NSLocalizedDescriptionKey: @"Missing selector verifyWavWithEngine:wavPath:reset:error:"}];
        return nil;
    }
    NSDictionary* (*msgSend)(id, SEL, id, NSString*, BOOL, NSError**) = (void*)objc_msgSend;
    return msgSend(c, sel, engine, wavPath, reset, error);
}

static BOOL KWRNBHasMicPermission(void) {
    AVAudioSessionRecordPermission permission = [[AVAudioSession sharedInstance] recordPermission];
    return permission == AVAudioSessionRecordPermissionGranted;
}

static BOOL KWRNBHasSpeechRecognitionPermission(void) {
    SFSpeechRecognizerAuthorizationStatus status = [SFSpeechRecognizer authorizationStatus];
    return status == SFSpeechRecognizerAuthorizationStatusAuthorized;
}

@interface KeyWordRNBridge () <RCTBridgeModule>

@property (nonatomic, strong) NSMutableDictionary *instances;
@property (nonatomic, strong) NSMutableDictionary *speakerVerifiers; // { engineId: SVVerifierHolder }
@property (nonatomic, strong) NSMutableDictionary *speakerMicControllers; // { controllerId: SVMicHolder }

@end

@implementation KeyWordRNBridge

RCT_EXPORT_MODULE();

- (instancetype)init {
    if (self = [super init]) {
        _instances = [NSMutableDictionary new];
        _speakerVerifiers = [NSMutableDictionary new];
        _speakerMicControllers = [NSMutableDictionary new];
    }
    return self;
}

+ (BOOL)requiresMainQueueSetup
{
    return YES;
}

// NOTE: Extend supported events with Speaker Verification mic events
- (NSArray<NSString *> *)supportedEvents {
    return @[@"onKeywordDetectionEvent",
             @"onVADDetectionEvent",
             @"onSpeakerVerificationOnboardingProgress",
             @"onSpeakerVerificationOnboardingDone",
             @"onSpeakerVerificationVerifyResult",
             @"onSpeakerVerificationError"];
}


// ============================================================
// MARK: - Speaker Verification (Swift) - RN APIs
// ============================================================

// Create a speaker verifier engine from:
//  - modelPathOrName: absolute path OR "speaker_model.dm" in app bundle
//  - enrollmentJsonPathOrName: absolute path OR "kesku_enrollment.json" in app bundle
//
// JS should call this once, then call verifySpeakerWavStreaming(engineId, wavPath)
RCT_EXPORT_METHOD(createSpeakerVerifier:(NSString *)engineId
                  modelPathOrName:(NSString *)modelPathOrName
                  enrollmentJsonPathOrName:(NSString *)enrollmentJsonPathOrName
                  options:(NSDictionary *)options
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    if (self.speakerVerifiers[engineId]) {
        reject(@"SVEngineExists", [NSString stringWithFormat:@"Speaker verifier already exists with ID: %@", engineId], nil);
        return;
    }

    dispatch_async(dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0), ^{
        @autoreleasepool {
            NSString *modelResolved = SVResolveFilePath(modelPathOrName);
            NSString *jsonResolved  = SVResolveFilePath(enrollmentJsonPathOrName);

            if (!modelResolved) {
                reject(@"SVModelNotFound", [NSString stringWithFormat:@"Model file not found: %@", modelPathOrName], nil);
                return;
            }
            if (!jsonResolved) {
                reject(@"SVEnrollmentNotFound", [NSString stringWithFormat:@"Enrollment JSON not found: %@", enrollmentJsonPathOrName], nil);
                return;
            }

            // Optional: ensure stable file path by copying to tmp if needed
            NSString *modelPath = SVCopyToTempIfNeeded(modelResolved, [modelPathOrName lastPathComponent]) ?: modelResolved;
            NSString *jsonPath  = SVCopyToTempIfNeeded(jsonResolved,  [enrollmentJsonPathOrName lastPathComponent]) ?: jsonResolved;

            NSError *err = nil;
            id engine = SVCreateEngine(modelPath, jsonPath, options ?: @{}, &err);
            if (err || !engine) {
                reject(@"SVCreateError",
                       [NSString stringWithFormat:@"Failed to create speaker verifier: %@", err.localizedDescription ?: @"unknown"],
                       err);
                return;
            }

            SVVerifierHolder *h = [SVVerifierHolder new];
            h.engineId = engineId;
            h.engine = engine;
            self.speakerVerifiers[engineId] = h;
            resolve(@{ @"ok": @YES, @"engineId": engineId, @"modelPath": modelPath, @"enrollmentJsonPath": jsonPath });
        }
    });
}

// Verify a WAV file by streaming frames internally via the Swift engine.
// wavPathOrName can be absolute path OR "test.wav" in bundle.
RCT_EXPORT_METHOD(verifySpeakerWavStreaming:(NSString *)engineId
                  wavPathOrName:(NSString *)wavPathOrName
                  resetState:(BOOL)resetState
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    SVVerifierHolder *h = self.speakerVerifiers[engineId];
    if (!h || !h.engine) {
        reject(@"SVEngineNotFound", [NSString stringWithFormat:@"No speaker verifier with ID: %@", engineId], nil);
        return;
    }

    dispatch_async(dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0), ^{
        @autoreleasepool {
            NSString *wavResolved = SVResolveFilePath(wavPathOrName);
            if (!wavResolved) {
                reject(@"SVWavNotFound", [NSString stringWithFormat:@"WAV not found: %@", wavPathOrName], nil);
                return;
            }
            NSString *wavPath = SVCopyToTempIfNeeded(wavResolved, [wavPathOrName lastPathComponent]) ?: wavResolved;

            NSError *err = nil;
            NSDictionary *out = SVVerifyWav(h.engine, wavPath, resetState, &err);
            if (err || !out) {
                reject(@"SVVerifyError",
                       [NSString stringWithFormat:@"Failed to verify wav: %@", err.localizedDescription ?: @"unknown"],
                       err);
                return;
            }
            resolve(out);
        }
    });
}

RCT_EXPORT_METHOD(destroySpeakerVerifier:(NSString *)engineId
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    SVVerifierHolder *h = self.speakerVerifiers[engineId];
    if (!h) {
        reject(@"SVEngineNotFound", [NSString stringWithFormat:@"No speaker verifier with ID: %@", engineId], nil);
        return;
    }
    [self.speakerVerifiers removeObjectForKey:engineId];
    resolve(@{ @"ok": @YES, @"engineId": engineId });
}

 
// ============================================================
// MARK: - Speaker Verification Mic Controller (Swift) - RN APIs
// ============================================================

// Create mic controller from config JSON (SpeakerVerificationConfig).
// Returns controllerId.
RCT_EXPORT_METHOD(createSpeakerVerificationMicController:(NSString *)controllerId
                  configJson:(NSString *)configJson
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    NSLog(@"[SV][ObjC] createSpeakerVerificationMicController: controllerId=%@ jsonLen=%lu json=%@",
          controllerId,
          (unsigned long)(configJson ? configJson.length : 0),
          configJson ?: @"(null)");

    if (self.speakerMicControllers[controllerId]) {
        reject(@"SVMicExists", [NSString stringWithFormat:@"Speaker mic controller already exists with ID: %@", controllerId], nil);
        return;
    }

    dispatch_async(dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0), ^{
        @autoreleasepool {
            NSError *err = nil;
            id ctrl = SVCreateMicController(configJson, &err);
            if (err || !ctrl) {
                NSLog(@"[SV][ObjC] createSpeakerVerificationMicController: ❌ FAILED err=%@", err.localizedDescription ?: @"(null)");

                reject(@"SVMicCreateError",
                       [NSString stringWithFormat:@"Failed to create mic controller: %@", err.localizedDescription ?: @"unknown"],
                       err);
                return;
            }
            NSLog(@"[SV][ObjC] createSpeakerVerificationMicController: ✅ ctrl=%@", ctrl);


            // Create delegate proxy that will forward Swift callbacks -> RN events
            SVMicDelegateProxy *proxy = [SVMicDelegateProxy new];
            proxy.bridge = self;
            proxy.controllerId = controllerId;

            // Set delegate dynamically (no header needed)
            SVSetDelegate(ctrl, proxy);
            NSLog(@"[SV][ObjC] createSpeakerVerificationMicController: delegate set proxy=%@", proxy);

            SVMicHolder *h = [SVMicHolder new];
            h.controllerId = controllerId;
            h.controller = ctrl;
            h.delegateProxy = proxy; // keep strong ref
            self.speakerMicControllers[controllerId] = h;
            NSLog(@"[SV][ObjC] createSpeakerVerificationMicController: stored holder. count=%lu",
                  (unsigned long)self.speakerMicControllers.count);

            resolve(@{ @"ok": @YES, @"controllerId": controllerId });
        }
    });
}

RCT_EXPORT_METHOD(destroySpeakerVerificationMicController:(NSString *)controllerId
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    SVMicHolder *h = self.speakerMicControllers[controllerId];
    if (!h) {
        reject(@"SVMicNotFound", [NSString stringWithFormat:@"No speaker mic controller with ID: %@", controllerId], nil);
        return;
    }

    // Best-effort stop (no throw)
    if (h.controller && [h.controller respondsToSelector:NSSelectorFromString(@"stop")]) {
        void (*msgSend)(id, SEL) = (void*)objc_msgSend;
        msgSend(h.controller, NSSelectorFromString(@"stop"));
    }

    [self.speakerMicControllers removeObjectForKey:controllerId];
    resolve(@{ @"ok": @YES, @"controllerId": controllerId });
}

// beginOnboarding(enrollmentId, targetEmbeddingCount, reset)
RCT_EXPORT_METHOD(svBeginOnboarding:(NSString *)controllerId
                  enrollmentId:(NSString *)enrollmentId
                  targetEmbeddingCount:(NSInteger)targetEmbeddingCount
                  reset:(BOOL)reset
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    SVMicHolder *h = self.speakerMicControllers[controllerId];
    if (!h || !h.controller) {
        reject(@"SVMicNotFound", [NSString stringWithFormat:@"No speaker mic controller with ID: %@", controllerId], nil);
        return;
    }

    dispatch_async(dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0), ^{
        @autoreleasepool {
            NSError *err = nil;
            SEL sel = NSSelectorFromString(@"beginOnboardingWithEnrollmentId:targetEmbeddingCount:reset:error:");
            if (![h.controller respondsToSelector:sel]) {
                reject(@"SVMicMissingSelector", @"Mic controller missing beginOnboardingWithEnrollmentId:targetEmbeddingCount:reset:error:", nil);
                return;
            }
            BOOL ok = SVCallBool2(h.controller, sel, enrollmentId, targetEmbeddingCount, reset, &err);
            if (!ok || err) {
                reject(@"SVMicBeginError",
                       [NSString stringWithFormat:@"beginOnboarding failed: %@", err.localizedDescription ?: @"unknown"],
                       err);
                return;
            }
            resolve(@{ @"ok": @YES, @"controllerId": controllerId, @"enrollmentId": enrollmentId, @"target": @(targetEmbeddingCount) });
        }
    });
}

// getNextEmbeddingFromMic(): starts mic, collects ONE embedding, then stops mic.
// Result is delivered via event:
//   onSpeakerVerificationOnboardingProgress / onSpeakerVerificationOnboardingDone
RCT_EXPORT_METHOD(svGetNextEmbeddingFromMic:(NSString *)controllerId
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    SVMicHolder *h = self.speakerMicControllers[controllerId];
    if (!h || !h.controller) {
        reject(@"SVMicNotFound", [NSString stringWithFormat:@"No speaker mic controller with ID: %@", controllerId], nil);
        return;
    }

    dispatch_async(dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0), ^{
        @autoreleasepool {
            NSError *err = nil;
            SEL sel = NSSelectorFromString(@"getNextEmbeddingFromMicAndReturnError:");
            if (![h.controller respondsToSelector:sel]) {
                reject(@"SVMicMissingSelector", @"Mic controller missing getNextEmbeddingFromMicAndReturnError:", nil);
                return;
            }
            BOOL ok = SVCallBool1(h.controller, sel, &err);
            if (!ok || err) {
                reject(@"SVMicGetNextError",
                       [NSString stringWithFormat:@"getNextEmbeddingFromMic failed: %@", err.localizedDescription ?: @"unknown"],
                       err);
                return;
            }
            resolve(@{ @"ok": @YES, @"controllerId": controllerId });
        }
    });
}

// finalizeOnboardingNow(): forces finalize and emits onSpeakerVerificationOnboardingDone (from Swift delegate)
RCT_EXPORT_METHOD(svFinalizeOnboardingNow:(NSString *)controllerId
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    SVMicHolder *h = self.speakerMicControllers[controllerId];
    if (!h || !h.controller) {
        reject(@"SVMicNotFound", [NSString stringWithFormat:@"No speaker mic controller with ID: %@", controllerId], nil);
        return;
    }
    dispatch_async(dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0), ^{
        @autoreleasepool {
            NSError *err = nil;
            SEL sel = NSSelectorFromString(@"finalizeOnboardingNowAndReturnError:");
            if (![h.controller respondsToSelector:sel]) {
                reject(@"SVMicMissingSelector", @"Mic controller missing finalizeOnboardingNowAndReturnError:", nil);
                return;
            }
            BOOL ok = SVCallBool1(h.controller, sel, &err);
            if (!ok || err) {
                reject(@"SVMicFinalizeError",
                       [NSString stringWithFormat:@"finalizeOnboardingNow failed: %@", err.localizedDescription ?: @"unknown"],
                       err);
                return;
            }
            resolve(@{ @"ok": @YES, @"controllerId": controllerId });
        }
    });
}

// setEnrollmentJson(enrollmentJson): sets enrollment for verification mode
RCT_EXPORT_METHOD(svSetEnrollmentJson:(NSString *)controllerId
                  enrollmentJson:(NSString *)enrollmentJson
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    SVMicHolder *h = self.speakerMicControllers[controllerId];
    if (!h || !h.controller) {
        reject(@"SVMicNotFound", [NSString stringWithFormat:@"No speaker mic controller with ID: %@", controllerId], nil);
        return;
    }
    dispatch_async(dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0), ^{
        @autoreleasepool {
            NSError *err = nil;
            SEL sel = NSSelectorFromString(@"setEnrollmentJson:error:");
            if (![h.controller respondsToSelector:sel]) {
                reject(@"SVMicMissingSelector", @"Mic controller missing setEnrollmentJson:error:", nil);
                return;
            }
            BOOL (*msgSend)(id, SEL, NSString*, NSError**) = (void*)objc_msgSend;
            BOOL ok = msgSend(h.controller, sel, enrollmentJson, &err);
            if (!ok || err) {
                reject(@"SVMicSetEnrollError",
                       [NSString stringWithFormat:@"setEnrollmentJson failed: %@", err.localizedDescription ?: @"unknown"],
                       err);
                return;
            }
            resolve(@{ @"ok": @YES, @"controllerId": controllerId });
        }
    });
}

// startVerifyFromMic(resetState): starts mic until a verification result is produced.
// Result delivered via event: onSpeakerVerificationVerifyResult
RCT_EXPORT_METHOD(svStartVerifyFromMic:(NSString *)controllerId
                  resetState:(BOOL)resetState
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    SVMicHolder *h = self.speakerMicControllers[controllerId];
    if (!h || !h.controller) {
        reject(@"SVMicNotFound", [NSString stringWithFormat:@"No speaker mic controller with ID: %@", controllerId], nil);
        return;
    }
    dispatch_async(dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0), ^{
        @autoreleasepool {
            NSError *err = nil;
            SEL sel = NSSelectorFromString(@"startVerifyFromMicWithResetState:error:");
            if (![h.controller respondsToSelector:sel]) {
                reject(@"SVMicMissingSelector", @"Mic controller missing startVerifyFromMicWithResetState:error:", nil);
                return;
            }
            BOOL ok = SVCallBoolReset(h.controller, sel, resetState, &err);
            if (!ok || err) {
                reject(@"SVMicStartVerifyError",
                       [NSString stringWithFormat:@"startVerifyFromMic failed: %@", err.localizedDescription ?: @"unknown"],
                       err);
                return;
            }
            resolve(@{ @"ok": @YES, @"controllerId": controllerId, @"resetState": @(resetState) });
        }
    });
}
// startEndlessVerifyFromMic(hopSeconds, stopOnMatch): starts mic and keeps verifying repeatedly.
// Results delivered via event: onSpeakerVerificationVerifyResult
RCT_EXPORT_METHOD(svStartEndlessVerifyFromMic:(NSString *)controllerId
                  hopSeconds:(nonnull NSNumber *)hopSeconds
                  stopOnMatch:(BOOL)stopOnMatch
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    SVMicHolder *h = self.speakerMicControllers[controllerId];
    if (!h || !h.controller) {
        reject(@"SVMicNotFound", [NSString stringWithFormat:@"No speaker mic controller with ID: %@", controllerId], nil);
        return;
    }
    dispatch_async(dispatch_get_global_queue(QOS_CLASS_USER_INITIATED, 0), ^{
        @autoreleasepool {
            NSError *err = nil;
            SEL sel = NSSelectorFromString(@"startEndlessVerifyFromMicWithHopSeconds:stopOnMatch:error:");
            if (![h.controller respondsToSelector:sel]) {
                reject(@"SVMicMissingSelector", @"Mic controller missing startEndlessVerifyFromMicWithHopSeconds:stopOnMatch:error:", nil);
                return;
            }
            BOOL ok = SVCallBoolHopStop(h.controller, sel, hopSeconds, stopOnMatch, &err);
            if (!ok || err) {
                reject(@"SVMicStartEndlessVerifyError",
                       [NSString stringWithFormat:@"startEndlessVerifyFromMic failed: %@", err.localizedDescription ?: @"unknown"],
                       err);
                return;
            }
            resolve(@{ @"ok": @YES, @"controllerId": controllerId, @"hopSeconds": hopSeconds, @"stopOnMatch": @(stopOnMatch) });
        }
    });
}


// stop(): stop mic immediately
RCT_EXPORT_METHOD(svStopMic:(NSString *)controllerId
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    SVMicHolder *h = self.speakerMicControllers[controllerId];
    if (!h || !h.controller) {
        reject(@"SVMicNotFound", [NSString stringWithFormat:@"No speaker mic controller with ID: %@", controllerId], nil);
        return;
    }
    if ([h.controller respondsToSelector:NSSelectorFromString(@"stop")]) {
        void (*msgSend)(id, SEL) = (void*)objc_msgSend;
        msgSend(h.controller, NSSelectorFromString(@"stop"));
    }
    resolve(@{ @"ok": @YES, @"controllerId": controllerId });
}

RCT_EXPORT_METHOD(createInstanceMulti:(NSString *)instanceId
                  modelPaths:(NSArray<NSString *> *)modelPaths
                  thresholds:(NSArray<NSNumber *> *)thresholds
                  bufferCnts:(NSArray<NSNumber *> *)bufferCnts
                  msBetweenCallback:(NSArray<NSNumber *> *)msBetweenCallback
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject) {
    if (self.instances[instanceId]) {
        reject(@"InstanceExists", [NSString stringWithFormat:@"Instance already exists with ID: %@", instanceId], nil);
        return;
    }

    NSError *error = nil;
    KeyWordsDetectionWrapper *wrapper = [[KeyWordsDetectionWrapper alloc]
        initWithInstanceId:instanceId
               modelNames:modelPaths
               thresholds:thresholds
               bufferCnts:bufferCnts
        msBetweenCallback:msBetweenCallback
                   bridge:self
                    error:&error];
    if (error) {
        reject(@"CreateError", [NSString stringWithFormat:@"Failed to create multi-model instance: %@", error.localizedDescription], nil);
    } else {
        self.instances[instanceId] = wrapper;
        resolve([NSString stringWithFormat:@"Multi-model instance created with ID: %@", instanceId]);
    }
}

RCT_EXPORT_METHOD(createInstance:(NSString *)instanceId modelName:(NSString *)modelName threshold:(float)threshold bufferCnt:(NSInteger)bufferCnt resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
    if (self.instances[instanceId]) {
        reject(@"InstanceExists", [NSString stringWithFormat:@"Instance already exists with ID: %@", instanceId], nil);
        return;
    }

    NSError *error = nil;
    KeyWordsDetectionWrapper *wrapper = [[KeyWordsDetectionWrapper alloc] initWithInstanceId:instanceId modelName:modelName threshold:threshold bufferCnt:bufferCnt bridge:self error:&error];
    if (error) {
        reject(@"CreateError", [NSString stringWithFormat:@"Failed to create instance: %@", error.localizedDescription], nil);
    } else {
        self.instances[instanceId] = wrapper;
        resolve([NSString stringWithFormat:@"Instance created with ID: %@", instanceId]);
    }
}

// NEW: receive global wakeword audio routing config from JS
RCT_EXPORT_METHOD(setAudioRoutingConfig:(NSString *)jsonConfig
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
  @try {
    // Hand off to your audio/session manager (you implement this)
    // e.g. in AudioSessionAndDuckingManager:
    // - (void)setWakewordAudioRoutingConfigFromJSONString:(NSString *)jsonConfig;
    [AudioSessionAndDuckingManager.shared setWakewordAudioRoutingConfigFromJSONString:jsonConfig];

    NSLog(@"[KeyWordRNBridge] setAudioRoutingConfig JSON = %@", jsonConfig);
    resolve(@"ok");
  }
  @catch (NSException *e) {
    reject(@"AudioRoutingConfigError",
           [NSString stringWithFormat:@"Failed to set audio routing config: %@", e.reason],
           nil);
  }
}

RCT_EXPORT_METHOD(disableDucking:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
  [AudioSessionAndDuckingManager.shared disableDucking];
  resolve(@"disabled");
}

RCT_EXPORT_METHOD(initAudioSessAndDuckManage:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
  [AudioSessionAndDuckingManager.shared initAudioSessAndDuckManage];
  resolve(@"enabled");
}

RCT_EXPORT_METHOD(restartListeningAfterDucking:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
  [AudioSessionAndDuckingManager.shared restartListeningAfterDucking];
  resolve(@"disabled");
}

RCT_EXPORT_METHOD(enableAggressiveDucking:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
  [AudioSessionAndDuckingManager.shared enableAggressiveDucking];
  resolve(@"enabled");
}

RCT_EXPORT_METHOD(disableDuckingAndCleanup:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
  [AudioSessionAndDuckingManager.shared disableDuckingAndCleanup];
  resolve(@"disabled");
}

RCT_EXPORT_METHOD(hasMicPermissions:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    resolve(@(KWRNBHasMicPermission()));
}

RCT_EXPORT_METHOD(requestMicPermissions:(nonnull NSNumber *)wait_timeout
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    dispatch_async(dispatch_get_main_queue(), ^{
        AVAudioSession *audioSession = [AVAudioSession sharedInstance];
        AVAudioSessionRecordPermission permission = audioSession.recordPermission;
        if (permission == AVAudioSessionRecordPermissionGranted) {
            resolve(@YES);
            return;
        }

        if (permission == AVAudioSessionRecordPermissionDenied) {
            resolve(@NO);
            return;
        }

        NSTimeInterval timeoutSeconds = MAX(wait_timeout.doubleValue, 0.0) / 1000.0;
        __block BOOL didResolve = NO;
        void (^finish)(BOOL) = ^(BOOL granted) {
            if (didResolve) {
                return;
            }
            didResolve = YES;
            resolve(@(granted));
        };

        if (timeoutSeconds > 0.0) {
            dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeoutSeconds * NSEC_PER_SEC)),
                           dispatch_get_main_queue(), ^{
                finish(KWRNBHasMicPermission());
            });
        }

        [audioSession requestRecordPermission:^(BOOL granted) {
            dispatch_async(dispatch_get_main_queue(), ^{
                finish(granted);
            });
        }];
    });
}

RCT_EXPORT_METHOD(hasSpeechRecognitionPermissions:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    resolve(@(KWRNBHasSpeechRecognitionPermission()));
}

RCT_EXPORT_METHOD(requestSpeechRecognitionPermissions:(nonnull NSNumber *)wait_timeout
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    dispatch_async(dispatch_get_main_queue(), ^{
        SFSpeechRecognizerAuthorizationStatus status = [SFSpeechRecognizer authorizationStatus];
        if (status == SFSpeechRecognizerAuthorizationStatusAuthorized) {
            resolve(@YES);
            return;
        }

        if (status == SFSpeechRecognizerAuthorizationStatusDenied ||
            status == SFSpeechRecognizerAuthorizationStatusRestricted) {
            resolve(@NO);
            return;
        }

        NSTimeInterval timeoutSeconds = MAX(wait_timeout.doubleValue, 0.0) / 1000.0;
        __block BOOL didResolve = NO;
        void (^finish)(BOOL) = ^(BOOL granted) {
            if (didResolve) {
                return;
            }
            didResolve = YES;
            resolve(@(granted));
        };

        if (timeoutSeconds > 0.0) {
            dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(timeoutSeconds * NSEC_PER_SEC)),
                           dispatch_get_main_queue(), ^{
                finish(KWRNBHasSpeechRecognitionPermission());
            });
        }

        [SFSpeechRecognizer requestAuthorization:^(SFSpeechRecognizerAuthorizationStatus updatedStatus) {
            dispatch_async(dispatch_get_main_queue(), ^{
                finish(updatedStatus == SFSpeechRecognizerAuthorizationStatusAuthorized);
            });
        }];
    });
}

RCT_EXPORT_METHOD(setKeywordDetectionLicense:(NSString *)instanceId licenseKey:(NSString *)licenseKey resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    KeyWordsDetection *instance = wrapper.keyWordsDetection;
    BOOL isLicensed = NO;
    if (instance) {
        isLicensed = [instance setLicenseWithLicenseKey:licenseKey];
        NSLog(@"License is valid?: %@", isLicensed ? @"YES" : @"NO");
        resolve(@(isLicensed)); // Wrap BOOL in NSNumber
    } else {
        reject(@"InstanceNotFound", [NSString stringWithFormat:@"No instance found with ID: %@", instanceId], nil);
    }
}

RCT_EXPORT_METHOD(replaceKeywordDetectionModel:(NSString *)instanceId modelName:(NSString *)modelName threshold:(float)threshold bufferCnt:(NSInteger)bufferCnt resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    KeyWordsDetection *instance = wrapper.keyWordsDetection;
    if (instance) {
        NSError *error = nil;
        [instance replaceKeywordDetectionModelWithModelPath:modelName threshold:threshold bufferCnt:bufferCnt error:&error];
        if (error) {
            reject(@"ReplaceError", [NSString stringWithFormat:@"Failed to replace model: %@", error.localizedDescription], nil);
        } else {
            resolve([NSString stringWithFormat:@"Instance ID: %@ changed model to %@", instanceId, modelName]);
        }
    } else {
        reject(@"InstanceNotFound", [NSString stringWithFormat:@"No instance found with ID: %@", instanceId], nil);
    }
}

RCT_EXPORT_METHOD(startKeywordDetection:(NSString *)instanceId 
    threshold:(float)threshold 
    noExternalActivation:(BOOL)noExternalActivation
    duckOthers:(BOOL)duckOthers
    mixWithOthers:(BOOL)mixWithOthers
    defaultToSpeaker:(BOOL)defaultToSpeaker
    resolver:(RCTPromiseResolveBlock)resolve 
    rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    KeyWordsDetection *instance = wrapper.keyWordsDetection;
    if (instance) {
        BOOL success = [instance startListeningWithNoExternalActivation:noExternalActivation
                                                  duckOthers:duckOthers
                                              mixWithOthers:mixWithOthers
                                           defaultToSpeaker:defaultToSpeaker];
        if (success == false) {
            reject(@"StartError", [NSString stringWithFormat:@"Failed to start detection"], nil);
        } else {
            resolve([NSString stringWithFormat:@"Started detection for instance: %@", instanceId]);
        }
    } else {
        reject(@"InstanceNotFound", [NSString stringWithFormat:@"No instance found with ID: %@", instanceId], nil);
    }
}

RCT_EXPORT_METHOD(startKeywordDetectionWithSpeakerVerification:(NSString *)instanceId
    threshold:(float)threshold
    speakerVerificationEnrollmentJsonOrPath:(NSString *)speakerVerificationEnrollmentJsonOrPath
    noExternalActivation:(BOOL)noExternalActivation
    duckOthers:(BOOL)duckOthers
    mixWithOthers:(BOOL)mixWithOthers
    defaultToSpeaker:(BOOL)defaultToSpeaker
    resolver:(RCTPromiseResolveBlock)resolve
    rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    KeyWordsDetection *instance = wrapper.keyWordsDetection;
    if (instance) {
        BOOL success = [instance startListeningWithSpeakerVerificationEnrollmentJsonOrPath:speakerVerificationEnrollmentJsonOrPath
                                                                     noExternalActivation:noExternalActivation
                                                                                duckOthers:duckOthers
                                                                            mixWithOthers:mixWithOthers
                                                                         defaultToSpeaker:defaultToSpeaker];
        if (success == false) {
            reject(@"StartError", [NSString stringWithFormat:@"Failed to start detection"], nil);
        } else {
            resolve([NSString stringWithFormat:@"Started detection for instance: %@", instanceId]);
        }
    } else {
        reject(@"InstanceNotFound", [NSString stringWithFormat:@"No instance found with ID: %@", instanceId], nil);
    }
}

RCT_EXPORT_METHOD(stopKeywordDetection:(NSString *)instanceId resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    KeyWordsDetection *instance = wrapper.keyWordsDetection;
    if (instance) {
        [instance stopListening];
        resolve([NSString stringWithFormat:@"Stopped detection for instance: %@", instanceId]);
    } else {
        reject(@"InstanceNotFound", [NSString stringWithFormat:@"No instance found with ID: %@", instanceId], nil);
    }
}

RCT_EXPORT_METHOD(pauseDetection:(NSString *)instanceId
                  stopMic:(BOOL)stopMic
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    KeyWordsDetection *instance = wrapper.keyWordsDetection;
    if (instance) {
        [instance pauseDetectionWithStopMic:stopMic];
        resolve([NSString stringWithFormat:@"Paused detection for instance: %@ (stopMic=%@)",
                 instanceId,
                 stopMic ? @"YES" : @"NO"]);
    } else {
        reject(@"InstanceNotFound", [NSString stringWithFormat:@"No instance found with ID: %@", instanceId], nil);
    }
}

RCT_EXPORT_METHOD(unPauseDetection:(NSString *)instanceId
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    KeyWordsDetection *instance = wrapper.keyWordsDetection;
    if (instance) {
        [instance unPauseDetection];
        resolve([NSString stringWithFormat:@"Unpaused detection for instance: %@", instanceId]);
    } else {
        reject(@"InstanceNotFound", [NSString stringWithFormat:@"No instance found with ID: %@", instanceId], nil);
    }
}

RCT_EXPORT_METHOD(destroyInstance:(NSString *)instanceId resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    if (wrapper) {
        [wrapper.keyWordsDetection stopListening];
        [self.instances removeObjectForKey:instanceId];
        resolve([NSString stringWithFormat:@"Destroyed instance: %@", instanceId]);
    } else {
        reject(@"InstanceNotFound", [NSString stringWithFormat:@"No instance found with ID: %@", instanceId], nil);
    }
}

// Keeping all APIs even if not called in JS yet

RCT_EXPORT_METHOD(getKeywordDetectionModel:(NSString *)instanceId resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    KeyWordsDetection *instance = wrapper.keyWordsDetection;
    if (instance) {
        NSString *modelName = [instance getKeywordDetectionModel];
        resolve(modelName);
    } else {
        reject(@"InstanceNotFound", [NSString stringWithFormat:@"No instance found with ID: %@", instanceId], nil);
    }
}

RCT_EXPORT_METHOD(getRecordingWav:(NSString *)instanceId resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    KeyWordsDetection *instance = wrapper.keyWordsDetection;
    if (instance) {
        NSString *recWavPath = [instance getRecordingWav];
        resolve(recWavPath);
    } else {
        reject(@"InstanceNotFound", [NSString stringWithFormat:@"No instance found with ID: %@", instanceId], nil);
    }
}

RCT_EXPORT_METHOD(getVoiceProps:(NSString *)instanceId resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    KeyWordsDetection *instance = wrapper.keyWordsDetection;
    if (instance) {
        @try {
            NSDictionary *voiceProps = [instance getVoiceProps];
            NSMutableDictionary *result = [NSMutableDictionary dictionary];
            result[@"error"] = voiceProps[@"error"] ?: @"No Error";
            result[@"voiceProbability"] = @([voiceProps[@"voiceProbability"] floatValue]);
            result[@"lastTimeHumanVoiceHeard"] = @([voiceProps[@"lastTimeHumanVoiceHeard"] longLongValue]);
            resolve(result);
        } @catch (NSException *exception) {
            reject(@"VoicePropsError", [NSString stringWithFormat:@"Failed to get voice properties: %@", exception.reason], nil);
        }
    } else {
        reject(@"InstanceNotFound", [NSString stringWithFormat:@"No instance found with ID: %@", instanceId], nil);
    }
}

// Start/stop silent VAD (iOS only)
RCT_EXPORT_METHOD(startSilentVADDetection:(NSString *)instanceId resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    if (wrapper && wrapper.keyWordsDetection) {
        BOOL success = [wrapper.keyWordsDetection startSilentListening];
        success ? resolve(@"Started silent VAD detection") :
                  reject(@"StartError", @"Failed to start silent VAD detection", nil);
    } else {
        reject(@"InstanceNotFound", @"No instance found", nil);
    }
}

RCT_EXPORT_METHOD(stopSilentVADDetection:(NSString *)instanceId resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    if (wrapper && wrapper.keyWordsDetection) {
        [wrapper.keyWordsDetection stopSilentListening];
        resolve(@"Stopped silent VAD detection");
    } else {
        reject(@"InstanceNotFound", @"No instance found", nil);
    }
}

// Start/stop explicit VAD
RCT_EXPORT_METHOD(startVADDetection:(NSString *)instanceId resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    if (wrapper && wrapper.keyWordsDetection) {
        BOOL success = [wrapper.keyWordsDetection startVADListening];
        success ? resolve(@"Started VAD detection") :
                  reject(@"StartError", @"Failed to start VAD detection", nil);
    } else {
        reject(@"InstanceNotFound", @"No instance found", nil);
    }
}

RCT_EXPORT_METHOD(stopVADDetection:(NSString *)instanceId resolver:(RCTPromiseResolveBlock)resolve rejecter:(RCTPromiseRejectBlock)reject)
{
    KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
    if (wrapper && wrapper.keyWordsDetection) {
        [wrapper.keyWordsDetection stopVADListening];
        resolve(@"Stopped VAD detection");
    } else {
        reject(@"InstanceNotFound", @"No instance found", nil);
    }
}

RCT_EXPORT_METHOD(setVADParams:(NSString *)instanceId
                  threshold:(float)threshold
                  msWindow:(NSInteger)msWindow
                  resolver:(RCTPromiseResolveBlock)resolve
                  rejecter:(RCTPromiseRejectBlock)reject)
{
  KeyWordsDetectionWrapper *wrapper = self.instances[instanceId];
  if (wrapper && wrapper.keyWordsDetection) {
    NSError *err = nil;
    BOOL ok = [wrapper.keyWordsDetection setVADParamsWithThreshold:threshold
                                                          msWindow:msWindow
                                                             error:&err];
    if (!ok || err) {
      reject(@"VADParamsError",
             err ? err.localizedDescription : @"Failed to set VAD params",
             err);
    } else {
      resolve(@"VAD params updated");
    }
  } else {
    reject(@"InstanceNotFound", @"No instance found", nil);
  }
}

// You can add more methods here as needed, ensuring they use the instanceId

@end
