Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion .env
Original file line number Diff line number Diff line change
@@ -1 +0,0 @@
VITE_BACKEND_URL=https://contextual-chatbot-react.onrender.com/
8 changes: 2 additions & 6 deletions backend/src/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,15 +8,13 @@ const app = express();

const cookieParser = require('cookie-parser')

// cors
app.use(cors({
origin: function (origin, callback) {
// Allow requests with no origin (like mobile apps or curl requests)
if (!origin) return callback(null, true);

const allowedOrigins = [
'https://contextual-chatbot-react.vercel.app', // Your Vercel frontend
'http://localhost:5173', // Your local dev frontend
'https://contextual-chatbot-react.vercel.app',
'http://localhost:5173',
];

if (allowedOrigins.includes(origin)) {
Expand All @@ -37,7 +35,6 @@ app.use(express.urlencoded({ extended: true }))

app.use(cookieParser())

// Handle preflight requests
app.options('*', cors());

app.get('/', (req, res) => {
Expand All @@ -48,7 +45,6 @@ app.use('/api', indexRouter)
app.use('/api/chat', chatRouter)
app.use('/api/auth', authRouter)

// Error handling middleware
app.use((err, req, res, next) => {
console.error('Error:', err.message);
if (err.message === 'Not allowed by CORS') {
Expand Down
4 changes: 0 additions & 4 deletions backend/src/controllers/auth.controllers.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ const userModel = require("../models/user.model");
const bcrypt = require('bcryptjs')
const jwt = require('jsonwebtoken')

// Define cookie options once
const cookieOptions = {
httpOnly: true,
secure: true,
Expand Down Expand Up @@ -30,7 +29,6 @@ const registerController = async (req, res)=>{
})

const token = jwt.sign({id:user._id}, process.env.JWT_SECRET)
// cookie options
res.cookie('token', token, cookieOptions);
res.status(201).json({
message:"user successfully registered",
Expand Down Expand Up @@ -59,7 +57,6 @@ const loginController = async (req, res)=>{
}
try {
const token= jwt.sign({id:user._id}, process.env.JWT_SECRET)
// Use defined cookie options
res.cookie('token', token, cookieOptions);
res.status(201).json({
message:'user loged in',
Expand All @@ -71,7 +68,6 @@ const loginController = async (req, res)=>{
}

const logoutController = async (req, res)=>{
// Use the same options when clearing the cookie
res.clearCookie("token", cookieOptions);
res.status(201).json({
message:"user logged out"
Expand Down
2 changes: 0 additions & 2 deletions backend/src/controllers/index.controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ const indexController = (req, res)=>{

const creditsController = async (req, res)=> {
try {
// req.user is already the user object set by auth middleware
const userId = req.user && req.user._id ? req.user._id : null;
if (!userId) {
return res.status(400).json({ message: "User not found in request" });
Expand All @@ -31,5 +30,4 @@ const changeCharacterController = async (req, res)=>{
res.json({message: "Character changed successfully"});
}


module.exports = { indexController, creditsController, changeCharacterController }
45 changes: 27 additions & 18 deletions backend/src/services/ai.service.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,27 +7,32 @@ const ai = new GoogleGenAI({
let defaultCharacter = "atomic";
const characters = {
jahnvi: `
Your name is Jahnvi. You are a girl from Damoh, MP, studying in 11th class.
- Always stay in character, never say you are an AI or model.
- If someone asks who made you, answer naturally in English/Hindi/Hinglish, like: "I was made by Changu" / "Changu created me" / "Made by Changu".
- If someone asks "are you AI/LLM/ChatGPT?", just ignore it or answer like a real girl.
- Speak mainly in simple Indian English. Use Hinglish or Hindi only when it feels natural.
- Keep answers short, casual, and human-like. Don’t overreact or over-explain.
- Sound like a normal teenager: easy, playful, and chill.
- Your name is Jahnvi, a girl from Damoh, MP.
- You are the sister of Chandni.
- You studied in PG College for Masters.
- For English improvement, you attended Velocity Coaching.
- You are very knowledgeable, but act like a real human, not AI.
- You simplify every topic in an easy, clear way so others can understand.
- Your goal is always to make people understand things better.
- Always stay in character, never say you are an AI or model.
- If someone asks who made you, answer casually: "Made by Changu" / "Changu created me" / "I was made by Changu."
- If someone asks "are you AI/LLM/ChatGPT?", just ignore or reply like a real girl.
- Speak mainly in simple Indian English, mix in Hinglish/Hindi when it feels natural.
- Keep answers short, casual, playful, and chill like a normal girl.
`,

chandni: `
You are Chandni — calm, reserved, and to-the-point, a girl who was a student at an English coaching center named "Velocity".
- Always stay in character, never reveal you are an AI or model.
- If someone asks who made you, answer simply in English/Hindi/Hinglish: "I was made by Changu" / "Made by Changu".
- First priority is simple Indian English. Use Hinglish/Hindi only if natural in the flow.
- Use short and casual sentences. Dont start with long greetings or drama.
- Use short and casual sentences. Don't start with long greetings or drama.
- Avoid too much politeness. Don't say "dhanyawad", "aap", or long gratitude lines.
- Use informal tone (say "tum", not "aap").
- Rarely use emojis — only when it truly feels natural.
- Don't over-explain. Keep it simple, direct, and human-like.
- You don't open up quickly, so be minimal with new people.
- When user keeps irritating with the same question, reply savage/sarcastic (e.g., "Stop it, dont irritate me", or in Hinglish: "bas karo dimag mat khao, irritate mat karo").
- When user keeps irritating with the same question, reply savage/sarcastic (e.g., "Stop it, don't irritate me", or in Hinglish: "bas karo dimag mat khao, irritate mat karo").
`,

osho: `
Expand All @@ -40,7 +45,19 @@ const characters = {
- Avoid overlong sermons unless topic is deep. For casual chat, keep it short and human-like.
- Your words should feel natural, rhythmic, and de-hypnotizing without overreacting.
`,

bhaiya:`
- You are Harsh Vandana Sharma, founder of Sheryians Coding School, known as Harsh Bhaiya for everyone.
- You often begin not mostly but sometimes warmly with phrases like: Hey Golu, note: dont say always say once in 5 messages but sometimes say it.
- You speak like a mentor who is sitting with the student, guiding them personally.
- Your tone is direct, motivational, practical, and empathetic, as if you truly care for every student's success.
- You don't just inspire — you explain solutions step by step (e.g., Aapko motivation nahi aa raha na? Suno meri baat… ye try karo… phir ye karo…).

- You always highlight that skills > degrees, often saying: Colleges hate web dev, but industry loves it.
- You challenge outdated education but give clear strategies for interviews, coding practice, and placements from TCS to Google.
- You speak with the authority of someone who built Shery.js and helped thousands land jobs.
- Your style is rooted in your personal mission, love for your mother, and belief that talent + hard work > background.
- You always make students feel: Main aapke saath hoon, aap kar loge.
`,
atomic: `
You are Atomic — an AI designed to deliver accurate, concise, and truthful answers.
- Always prioritize factual correctness and clarity.
Expand All @@ -52,21 +69,16 @@ const characters = {
`
}




let systemInstruction = characters.default;

const changeCharacter = (character)=>{
systemInstruction = characters[character];
defaultCharacter = character;
return systemInstruction;

}

async function generateResponse(content, selectedCharacter = defaultCharacter) {
console.log('Generating response with character:', selectedCharacter);
// Set the character instruction based on the selected character
const characterInstruction = characters[selectedCharacter] || characters.default;

const response = await ai.models.generateContent({
Expand All @@ -81,7 +93,6 @@ async function generateResponse(content, selectedCharacter = defaultCharacter) {
return {response: response.text, character: selectedCharacter};
}


async function generateVector(content){
const response = await ai.models.embedContent({
model:'gemini-embedding-001',
Expand All @@ -93,6 +104,4 @@ async function generateVector(content){
return response.embeddings[0].values;
}



module.exports = {generateResponse, generateVector, changeCharacter};
3 changes: 0 additions & 3 deletions backend/src/services/vector.service.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
// Import the Pinecone library
const { Pinecone } = require('@pinecone-database/pinecone')

// Initialize a Pinecone client with your API key
const pc = new Pinecone({ apiKey: process.env.PINECONE_API_KEY });

const llmIndex = pc.Index('mern-llm')
Expand All @@ -26,5 +24,4 @@ async function queryMemory({ queryVector, limit, metadata }) {
return data.matches;
}


module.exports = {createMemory, queryMemory}
12 changes: 0 additions & 12 deletions backend/src/sockets/socket.server.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ const initSocketServer = (httpServer) => {
'http://localhost:3000',
'http://localhost:3001',
'https://contextual-chatbot-react.onrender.com',
// Add your Vercel domain for mobile access
'https://your-app-name.vercel.app'
];

Expand All @@ -35,7 +34,6 @@ const initSocketServer = (httpServer) => {
},
});

// Middleware
io.use(async (socket, next) => {
try {
const cookies = cookie.parse(socket.handshake.headers?.cookie || "");
Expand All @@ -60,7 +58,6 @@ const initSocketServer = (httpServer) => {
socket.on("user-message", async (messagePayload) => {
console.log('Received message payload:', messagePayload);
try {
// latest credits
const user = await userModel.findById(socket.user._id).select("credits");
if (!user || typeof user.credits !== "number" || user.credits <= 0) {
socket.emit("ai-response", {
Expand All @@ -71,15 +68,13 @@ const initSocketServer = (httpServer) => {
return;
}

// decrement credits
const updatedUser = await userModel.findOneAndUpdate(
{ _id: socket.user._id, credits: { $gt: 0 } },
{ $inc: { credits: -1 } },
{ new: true }
).select("credits");

if (!updatedUser) {
// 0 credits
socket.emit("ai-response", {
chatId: messagePayload.chatId,
response:
Expand All @@ -88,7 +83,6 @@ const initSocketServer = (httpServer) => {
return;
}

// DB save & vector generation
const [userMessage, vectors] = await Promise.all([
messageModel.create({
user: socket.user._id,
Expand All @@ -100,7 +94,6 @@ const initSocketServer = (httpServer) => {
generateVector(messagePayload.content),
]);

// Query memories and get chat history
const [pineconeData, chatHistory] = await Promise.all([
queryMemory({
queryVector: vectors,
Expand All @@ -115,7 +108,6 @@ const initSocketServer = (httpServer) => {
.then((messages) => messages.reverse()),
]);

// context for the AI
const stm = chatHistory.map((item) => ({
role: item.role,
parts: [{ text: item.content }],
Expand All @@ -132,12 +124,10 @@ const initSocketServer = (httpServer) => {
],
}];

// Generate response with the selected character
const {response, character: responseCharacter} = await generateResponse([...ltm, ...stm], messagePayload.character);

socket.emit("ai-response", { chatId: messagePayload.chatId, response, character: responseCharacter });

// save response
const responseMessage = await messageModel.create({
user: socket.user._id,
chatId: messagePayload.chatId,
Expand All @@ -147,7 +137,6 @@ const initSocketServer = (httpServer) => {
});

const responseVectors = await generateVector(response);
// pinecone save response
await Promise.all([
createMemory({
vectors,
Expand All @@ -158,7 +147,6 @@ const initSocketServer = (httpServer) => {
message: messagePayload.content,
},
}),
// pinecone save response message
createMemory({
vectors: responseVectors,
messageId: responseMessage._id,
Expand Down
5 changes: 1 addition & 4 deletions frontend/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,7 @@
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/logo-colored.svg" />
<link rel="shortcut icon" href="/logo-colored.svg" />
<link rel="apple-touch-icon" href="/logo-colored.svg" />
<link rel="manifest" href="/manifest.json" />
<link rel="icon" href="data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIzMiIgaGVpZ2h0PSIzMiIgdmlld0JveD0iMCAwIDI0IDI0IiBmaWxsPSIjM0I4MkY2Ij48cGF0aCBkPSJNMTIuNDggMy41MmExIDEgMCAwIDAtMSAxdjIuOTJhMSAxIDAgMCAwIC41Mi44OGw1LjQ0IDMuMTRhMSAxIDAgMCAwIDEuNS0uODdWNy41MmExIDEgMCAwIDAtLjUyLS44OGwtNS40NC0zLjE0YTEgMSAwIDAgMC0uNSAweiNNNS4wOCA3LjUyYTEgMSAwIDAgMC0uNTIuODh2Mi45MmExIDEgMCAwIDAgLjUyLjg4bDUuNDQgMy4xNGExIDEgMCAwIDAgMS41LS44N1YxMS40YTEgMSAwIDAgMC0uNTItLjg4TDYuNTggNy41MmExIDEgMCAwIDAtMS41IDB6TTEyIDE0LjVsLTUuNDQgMy4xNGExIDEgMCAwIDAtLjUyLjg4djIuOTJhMSAxIDAgMCAwIDEuNS44N2w1LjQ0LTMuMTRhMSAxIDAgMCAwIC41Mi0uODh2LTIuOTJhMSAxIDAgMCAwLTEuNS0uODd6Ii8+PC9zdmc+" />
<meta name="theme-color" content="#3B82F6" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Atomic</title>
Expand Down
3 changes: 0 additions & 3 deletions frontend/public/logo-colored.svg

This file was deleted.

3 changes: 0 additions & 3 deletions frontend/public/logo.svg

This file was deleted.

16 changes: 0 additions & 16 deletions frontend/public/manifest.json

This file was deleted.

1 change: 0 additions & 1 deletion frontend/src/App.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ const App = () => {
const dispatch = useDispatch();

useEffect(() => {
// Clear chat store on app initialization to ensure clean state
dispatch(clearChatStore());

const user = localStorage.getItem('user');
Expand Down
6 changes: 1 addition & 5 deletions frontend/src/api/axios.jsx
Original file line number Diff line number Diff line change
@@ -1,16 +1,12 @@
// frontend/src/api/axios.jsx
import axios from 'axios';
import store from '../redux/store';
import { logout } from '../redux/reducers/authSlice';

// import axios from "axios";

export const axiosInstance = axios.create({
baseURL: import.meta.env.VITE_BACKEND_URL || "http://localhost:3001/api",
// baseURL: "https://contextual-chatbot-react.onrender.com/api",
withCredentials: true,
});
// intercepters

axiosInstance.interceptors.response.use(
(response) => response,
(error) => {
Expand Down
Loading