File size: 1,509 Bytes
b91f5f8
 
54ae27f
b91f5f8
048a065
c913035
048a065
b91f5f8
54ae27f
b91f5f8
 
 
048a065
54ae27f
 
b91f5f8
 
 
 
 
 
 
 
 
 
 
54ae27f
048a065
 
54ae27f
 
 
 
 
 
 
 
 
b91f5f8
54ae27f
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import gradio as gr
from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
from datasets import load_dataset

# ✅ Use your fine-tuned model (after running train.py)
MODEL = "yagnik12/AI_Text_Detecter_HanxiGuo_BiScope-Data"

tokenizer = AutoTokenizer.from_pretrained(MODEL)
model = AutoModelForSequenceClassification.from_pretrained(MODEL)

detector = pipeline("text-classification", model=model, tokenizer=tokenizer, return_all_scores=True)

# Load some BiScope test examples for demo
biscope = load_dataset("HanxiGuo/BiScope_Data", split="test[:20]")

def detect_ai(text):
    results = detector(text)[0]
    human_score = [r["score"] for r in results if r["label"] in ["LABEL_0", "0"]][0]
    ai_score = [r["score"] for r in results if r["label"] in ["LABEL_1", "1"]][0]
    prediction = "🧑 Human" if human_score > ai_score else "🤖 AI"
    return {
        "Prediction": prediction,
        "Human Probability": round(human_score * 100, 2),
        "AI Probability": round(ai_score * 100, 2)
    }

with gr.Blocks() as demo:
    gr.Markdown("# AI vs Human Text Detector (BiScope Dataset)")

    with gr.Row():
        inp = gr.Textbox(lines=5, placeholder="Enter text here...")
        out = gr.JSON()
    btn = gr.Button("Detect")
    btn.click(fn=detect_ai, inputs=inp, outputs=out)

    gr.Markdown("### 🔎 Try BiScope Dataset Examples")
    examples = [biscope[i]["text"] for i in range(len(biscope))]
    gr.Examples(examples=examples, inputs=inp)

demo.launch()