# apps/wp_invoices/services/llm_client.py
import base64
import json
import importlib

def vision_json(*, model: str, prompt: str, file_bytes: bytes, filename: str, mimetype: str):
    """
    Intenta usar el cliente compartido (shared.openai_manager) con distintas firmas.
    Si no encuentra una compatible, hace fallback a un llamado directo a OpenAI pidiendo JSON.
    Devuelve: dict (JSON)
    """
    # 1) Intentar usar shared.openai_manager con varias variantes de nombre
    try:
        mgr = importlib.import_module("shared.openai_manager")

        # a) Función directa esperada
        if hasattr(mgr, "run_vision_json"):
            return _to_dict(mgr.run_vision_json(
                model=model,
                prompt=prompt,
                file_bytes=file_bytes,
                filename=filename,
                mimetype=mimetype
            ))

        # b) Otras firmas comunes
        for fname in ("vision_json", "run_vision", "call_vision_json", "run_vision_bytes"):
            if hasattr(mgr, fname):
                fn = getattr(mgr, fname)
                return _try_variants(fn, model, prompt, file_bytes, filename, mimetype)

        # c) Clase administradora
        if hasattr(mgr, "OpenAIManager"):
            inst = mgr.OpenAIManager()
            for meth in ("run_vision_json", "vision_json", "run_vision", "call_vision_json"):
                if hasattr(inst, meth):
                    fn = getattr(inst, meth)
                    return _try_variants(fn, model, prompt, file_bytes, filename, mimetype)

    except ModuleNotFoundError:
        # shared.openai_manager no existe → seguimos al fallback
        pass
    except Exception as e:
        # Si el manager compartido falló, probamos fallback antes de re-lanzar
        try:
            return _fallback_openai_call(model, prompt, file_bytes, mimetype)
        except Exception:
            raise e

    # 2) Fallback directo a OpenAI si nada anterior aplica
    return _fallback_openai_call(model, prompt, file_bytes, mimetype)


def _to_dict(result):
    if isinstance(result, str):
        return json.loads(result)
    return result


def _try_variants(fn, model, prompt, file_bytes, filename, mimetype):
    """
    Intenta llamar a fn con distintas combinaciones de argumentos comunes.
    """
    # Variante 1: firma explícita
    try:
        return _to_dict(fn(model=model, prompt=prompt, file_bytes=file_bytes, filename=filename, mimetype=mimetype))
    except TypeError:
        pass

    # Variante 2: sin filename
    try:
        return _to_dict(fn(model=model, prompt=prompt, file_bytes=file_bytes, mimetype=mimetype))
    except TypeError:
        pass

    # Variante 3: (prompt, bytes, mimetype) posicional
    try:
        return _to_dict(fn(prompt, file_bytes, mimetype))
    except TypeError:
        pass

    # Variante 4: solo (prompt, bytes)
    try:
        return _to_dict(fn(prompt, file_bytes))
    except TypeError:
        pass

    # Si ninguna firma calza, levantamos un error para pasar al fallback
    raise RuntimeError("Ninguna firma conocida coincidió en shared.openai_manager")


def _fallback_openai_call(model: str, prompt: str, file_bytes: bytes, mimetype: str):
    """
    Fallback directo usando chat.completions:
    - Sin temperature
    - Primero intenta response_format={"type":"json_object"}
    - Si falla, reintenta sin response_format forzando JSON vía prompt
    """
    try:
        from openai import OpenAI, BadRequestError
    except Exception as e:
        raise ImportError(
            "openai SDK no disponible y no se encontró un manager compatible. Instala openai o implementa run_vision_json."
        ) from e

    import base64, json
    client = OpenAI()
    b64 = base64.b64encode(file_bytes).decode("utf-8")
    data_url = f"data:{mimetype};base64,{b64}"

    content = [
        {"type": "text", "text": prompt},
        {"type": "image_url", "image_url": {"url": data_url}},
    ]

    # 1) Con response_format
    try:
        resp = client.chat.completions.create(
            model=model,
            messages=[{"role": "user", "content": content}],
            response_format={"type": "json_object"},
        )
        text = resp.choices[0].message.content
        return json.loads(text) if isinstance(text, str) else text
    except BadRequestError:
        pass
    except Exception:
        pass

    # 2) Sin response_format, prompt reforzado
    content_fb = [
        {"type": "text", "text": "Responde SOLO con un JSON válido. No agregues texto fuera del JSON.\n\n" + prompt},
        {"type": "image_url", "image_url": {"url": data_url}},
    ]
    resp = client.chat.completions.create(
        model=model,
        messages=[{"role": "user", "content": content_fb}],
    )
    text = resp.choices[0].message.content
    try:
        return json.loads(text) if isinstance(text, str) else text
    except Exception:
        return {"_raw": text}

