+
MayaCode Login Verification
+
Your login OTP is:
+
${otp}
+
This OTP will expire in 10 minutes.
+
If you did not request this OTP, please ignore this email.
+
+
This is an automated message from MayaCode.
Please do not reply to this email as this inbox is not monitored.
+
+ `,
+ });
+
+ if (error) {
+ console.error('❌ Resend error:', error);
+ return res.status(500).json({
+ success: false,
+ message: "Failed to send email. Please try again."
+ });
+ }
+
+ console.log('✅ Email sent via Resend:', data);
+
+ res.json({
+ success: true,
+ message: "OTP sent to your email successfully"
+ });
+ } catch (emailError) {
+ console.error('❌ Email sending error:', emailError);
+
+ return res.status(500).json({
+ success: false,
+ message: "Failed to send OTP. Please try again."
+ });
+ }
+ } catch (error) {
+ console.error('Request OTP Error:', error);
+ res.status(500).json({
+ success: false,
+ message: "Internal server error"
+ });
+ }
+};
+
+const verifyOtp = async (req, res) => {
+ try {
+ const { email, otp } = req.body;
+
+ if (!email || !otp) {
+ return res.status(400).json({
+ success: false,
+ message: "Email and OTP are required"
+ });
+ }
+
+ // Find OTP in database
+ const otpRecord = await Otp.findOne({ email: email.toLowerCase() });
+
+ if (!otpRecord) {
+ return res.status(400).json({
+ success: false,
+ message: "No OTP found for this email"
+ });
+ }
+
+ // Check if OTP is expired
+ if (new Date() > otpRecord.expiresAt) {
+ await Otp.deleteOne({ email: email.toLowerCase() });
+ return res.status(400).json({
+ success: false,
+ message: "OTP has expired. Please request a new one."
+ });
+ }
+
+ // Verify OTP
+ if (otpRecord.otp !== otp) {
+ return res.status(400).json({
+ success: false,
+ message: "Invalid OTP"
+ });
+ }
+
+ // Delete OTP after successful verification
+ await Otp.deleteOne({ email: email.toLowerCase() });
+
+ // Check if user exists, if not create one
+ let user = await UserProfile.findOne({ email: email.toLowerCase() });
+
+ if (!user) {
+ // Create new user with minimal required fields
+ try {
+ const userData = {
+ email: email.toLowerCase(),
+ name: 'Guest User',
+ userType: 'Other' // Default user type
+ };
+
+ console.log('Creating user with data:', userData);
+ user = await UserProfile.create(userData);
+ console.log('User created successfully:', user._id);
+ } catch (createError) {
+ console.error('User creation error:', createError);
+ return res.status(500).json({
+ success: false,
+ message: "Account creation failed. Please contact support."
+ });
+ }
+ }
+
+ const token = jwt.sign(
+ { email: user.email, id: user._id },
+ process.env.JWT_SECRET_VERIFY,
+ { expiresIn: '7d' }
+ );
+
+ return res.json({
+ success: true,
+ message: "Email verified successfully",
+ token: token,
+ user: {
+ email: user.email,
+ id: user._id,
+ name: user.name,
+ userType: user.userType
+ }
+ });
+ } catch (error) {
+ console.error('Verify OTP Error:', error);
+ res.status(500).json({
+ success: false,
+ message: "Internal server error"
+ });
+ }
+};
+
+// Update user profile after email verification
+const updateProfile = async (req, res) => {
+ try {
+ const { name, age, location, userType, languages } = req.body;
+ const userId = req.user.id; // From JWT token
+
+ if (!name) {
+ return res.status(400).json({ message: 'Name is required' });
+ }
+
+ const user = await UserProfile.findById(userId);
+ if (!user) {
+ return res.status(404).json({ message: 'User not found' });
+ }
+
+ // Update user profile
+ const updatedUser = await UserProfile.findByIdAndUpdate(
+ userId,
+ {
+ name,
+ age: age || user.age,
+ location: location || user.location,
+ userType: userType || user.userType,
+ languages: languages || user.languages
+ },
+ { new: true }
+ );
+
+ res.json({
+ message: 'Profile updated successfully',
+ user: {
+ id: updatedUser._id,
+ email: updatedUser.email,
+ name: updatedUser.name,
+ age: updatedUser.age,
+ location: updatedUser.location,
+ userType: updatedUser.userType,
+ languages: updatedUser.languages,
+ isProfileComplete: true
+ }
+ });
+ } catch (err) {
+ console.error('updateProfile error:', err);
+ res.status(500).json({ message: 'Failed to update profile' });
+ }
+};
+
+// Middleware to verify JWT token
+const verifyToken = async (req, res, next) => {
+ try {
+ const token = req.header('Authorization')?.replace('Bearer ', '');
+
+ if (!token) {
+ return res.status(401).json({ message: 'No token provided' });
+ }
+
+ const decoded = jwt.verify(token, process.env.JWT_SECRET_VERIFY);
+ req.user = decoded;
+ next();
+ } catch (err) {
+ console.error('Token verification error:', err);
+ res.status(401).json({ message: 'Invalid token' });
+ }
+};
+
+// Verify token endpoint
+const verifyTokenEndpoint = async (req, res) => {
+ try {
+ const token = req.header('Authorization')?.replace('Bearer ', '');
+
+ if (!token) {
+ return res.status(401).json({ message: 'No token provided' });
+ }
+
+ const decoded = jwt.verify(token, process.env.JWT_SECRET_VERIFY);
+ const user = await UserProfile.findById(decoded.id);
+
+ if (!user) {
+ return res.status(404).json({ message: 'User not found' });
+ }
+
+ res.json({
+ user: {
+ id: user._id,
+ email: user.email,
+ name: user.name,
+ userType: user.userType,
+ age: user.age,
+ location: user.location,
+ languages: user.languages
+ }
+ });
+ } catch (err) {
+ console.error('Token verification error:', err);
+ res.status(401).json({ message: 'Invalid token' });
+ }
+};
+
+module.exports = {
+ checkEmailInUse,
+ requestOtp,
+ verifyOtp,
+ updateProfile,
+ verifyToken,
+ verifyTokenEndpoint
+};
diff --git a/Backend/src/controllers/imageController.js b/Backend/src/controllers/imageController.js
new file mode 100644
index 0000000..1b1796b
--- /dev/null
+++ b/Backend/src/controllers/imageController.js
@@ -0,0 +1,83 @@
+const path = require('path');
+const fs = require('fs');
+
+// Serve images by category and number
+exports.getImage = async (req, res) => {
+ try {
+ const { category, number } = req.params;
+
+ // Define image categories and their file paths
+ const imageCategories = {
+ 'help-posts': 'help-posts',
+ 'stories': 'stories',
+ 'unity': 'unity',
+ 'inspirational': 'help-posts' // Can use help-posts placements
+ };
+
+ // Get the category folder
+ const categoryFolder = imageCategories[category];
+ if (!categoryFolder) {
+ return res.status(404).json({ message: 'Category not found' });
+ }
+
+ // Construct the image path - assuming files are named like help-posts2.png for number 2
+ let imagePath;
+ if (number === 1) {
+ imagePath = path.join(__dirname, '..', '..', 'public', 'images', `${categoryFolder}.png`);
+ } else {
+ imagePath = path.join(__dirname, '..', '..', 'public', 'images', `${categoryFolder}${number}.png`);
+ }
+
+ // Check if image exists
+ if (!fs.existsSync(imagePath)) {
+ return res.status(404).json({ message: 'Image not found', path: imagePath });
+ }
+
+ // Send the image without any transformation
+ res.type('image/png');
+ res.sendFile(imagePath);
+ } catch (error) {
+ console.error('Error serving image:', error);
+ res.status(500).json({ message: 'Error serving image' });
+ }
+};
+
+// List available images for a category
+exports.listImages = async (req, res) => {
+ try {
+ const { category } = req.params;
+
+ const imageCategories = {
+ 'help-posts': 'help-posts',
+ 'stories': 'stories',
+ 'unity': 'unity',
+ 'inspirational': 'help-posts'
+ };
+
+ const categoryFolder = imageCategories[category];
+ if (!categoryFolder) {
+ return res.status(404).json({ message: 'Category not found' });
+ }
+
+ const categoryPath = path.join(__dirname, '..', '..', 'public', 'images', categoryFolder);
+
+ if (!fs.existsSync(categoryPath)) {
+ return res.json({ images: [], category, count: 0 });
+ }
+
+ // Read directory and filter for images
+ const files = fs.readdirSync(categoryPath);
+ const images = files
+ .filter(file => file.endsWith('.png') || file.endsWith('.jpg') || file.endsWith('.jpeg'))
+ .map(file => ({
+ filename: file,
+ url: `/api/images/${category}/${file.replace(/\.(png|jpg|jpeg)$/i, '')}`
+ }));
+
+ res.json({ images, category, count: images.length });
+ } catch (error) {
+ console.error('Error listing images:', error);
+ res.status(500).json({ message: 'Error listing images' });
+ }
+};
+
diff --git a/Backend/src/controllers/postController.js b/Backend/src/controllers/postController.js
new file mode 100644
index 0000000..b3c65d4
--- /dev/null
+++ b/Backend/src/controllers/postController.js
@@ -0,0 +1,162 @@
+const Post = require('../models/Post');
+const path = require('path');
+const fs = require('fs');
+const https = require('https');
+const http = require('http');
+const { uploadImagesToCloudinary } = require('../utils/cloudinaryUploader');
+
+// Create a new post
+exports.createPost = async (req, res) => {
+ console.log(`📝 Creating post: ${req.body.title || 'Untitled'}`);
+
+ try {
+ let body = { ...req.body };
+
+ // Handle images - either from base64 (current frontend) or file uploads (multer)
+ if (req.files && req.files.length > 0) {
+ console.log(`🖼️ Processing ${req.files.length} uploaded files...`);
+
+ // Convert uploaded files to base64 for Cloudinary
+ const base64Images = req.files.map(file => {
+ const base64 = file.buffer.toString('base64');
+ return `data:${file.mimetype};base64,${base64}`;
+ });
+
+ try {
+ body.images = await uploadImagesToCloudinary(base64Images);
+ console.log('✅ Images uploaded to Cloudinary');
+ } catch (uploadError) {
+ console.error('❌ Error uploading to Cloudinary:', uploadError.message);
+ throw uploadError;
+ }
+ } else if (body.images && Array.isArray(body.images) && body.images.length > 0) {
+ console.log(`🖼️ Processing ${body.images.length} base64 images...`);
+
+ try {
+ body.images = await uploadImagesToCloudinary(body.images);
+ console.log('✅ Images processed successfully');
+ } catch (uploadError) {
+ console.error('❌ Error processing images:', uploadError.message || uploadError);
+ // Don't throw - continue with original images
+ console.log('⚠️ Continuing with original image data');
+ }
+ }
+
+ const post = new Post(body);
+ await post.save();
+ console.log('✅ Post created successfully');
+ res.status(201).json(post);
+ } catch (error) {
+ console.error('❌ Error creating post:', error.message);
+ if (error.name === 'ValidationError') {
+ res.status(400).json({ message: error.message });
+ } else {
+ res.status(500).json({ message: error.message || 'Something went wrong!' });
+ }
+ }
+};
+
+// Get all posts
+exports.getPosts = async (req, res) => {
+ console.log(`HTTP ${req.method} ${req.url} - Get Posts`, req.query);
+ try {
+ const { type } = req.query; // Get the type from query parameters
+ let query = {};
+
+ if (type) {
+ // Add type filter to the query if type is provided
+ query.type = type;
+ }
+
+ const posts = await Post.find(query).sort({ createdAt: -1 }); // Apply the query
+ res.json(posts);
+ } catch (error) {
+ res.status(500).json({ message: error.message });
+ }
+};
+
+// Get a single post
+exports.getPost = async (req, res) => {
+ console.log(`HTTP ${req.method} ${req.url} - Get Post`, req.params);
+ try {
+ const { id } = req.params;
+
+ const post = await Post.findById(id);
+ if (!post) {
+ return res.status(404).json({ message: 'Post not found' });
+ }
+
+ res.json(post);
+ } catch (error) {
+ res.status(500).json({ message: error.message });
+ }
+};
+
+// Update a post
+exports.updatePost = async (req, res) => {
+ console.log(`HTTP ${req.method} ${req.url} - Update Post`, req.params, req.body);
+ try {
+ const { id } = req.params;
+
+ const post = await Post.findByIdAndUpdate(
+ id,
+ req.body,
+ { new: true, runValidators: true }
+ );
+
+ if (!post) {
+ return res.status(404).json({ message: 'Post not found' });
+ }
+
+ res.json(post);
+ } catch (error) {
+ res.status(400).json({ message: error.message });
+ }
+};
+
+// Delete a post
+exports.deletePost = async (req, res) => {
+ console.log(`HTTP ${req.method} ${req.url} - Delete Post`, req.params);
+ try {
+ const { id } = req.params;
+
+ const post = await Post.findByIdAndDelete(id);
+ if (!post) {
+ return res.status(404).json({ message: 'Post not found' });
+ }
+
+ res.json({ message: 'Post deleted successfully' });
+ } catch (error) {
+ res.status(500).json({ message: error.message });
+ }
+};
+
+// Get posts by phone
+exports.getUserPosts = async (req, res) => {
+ console.log(`HTTP ${req.method} ${req.url} - Get User Posts`, req.params);
+ try {
+ const { phone } = req.params;
+
+ const posts = await Post.find({ phone }).sort({ createdAt: -1 });
+ res.json(posts);
+ } catch (error) {
+ res.status(500).json({ message: error.message });
+ }
+};
+
+// Get post images (lazy loading support)
+exports.getPostImages = async (req, res) => {
+ console.log(`HTTP ${req.method} ${req.url} - Get Post Images`, req.params);
+ try {
+ const { id } = req.params;
+
+ const post = await Post.findById(id);
+ if (!post) {
+ return res.status(404).json({ message: 'Post not found' });
+ }
+
+ res.json({ images: post.images || [] });
+ } catch (error) {
+ res.status(500).json({ message: error.message });
+ }
+};
\ No newline at end of file
diff --git a/Backend/controllers/userController.js b/Backend/src/controllers/userController.js
similarity index 55%
rename from Backend/controllers/userController.js
rename to Backend/src/controllers/userController.js
index 6d97e17..d3090c8 100644
--- a/Backend/controllers/userController.js
+++ b/Backend/src/controllers/userController.js
@@ -1,14 +1,28 @@
const UserProfile = require('../models/User');
+const jwt = require('jsonwebtoken');
-// Get user profile by phone
-exports.getUserByPhone = async (req, res) => {
+// Helper function to extract user info from JWT token
+const getUserFromToken = (req) => {
+ const authHeader = req.headers.authorization;
+ if (!authHeader || !authHeader.startsWith('Bearer ')) {
+ throw new Error('No authorization token provided');
+ }
+
+ const token = authHeader.substring(7); // Remove 'Bearer ' prefix
+ const decoded = jwt.verify(token, process.env.JWT_SECRET_VERIFY);
+ return decoded;
+};
+
+// Get user profile by email
+exports.getUserByEmail = async (req, res) => {
+ console.log(`HTTP ${req.method} ${req.url} - Get User By Email`, req.params);
try {
- const { phone } = req.params;
- console.log('Get User - Request:', { phone });
+ const { email } = req.params;
+ console.log('Get User - Request:', { email });
- const user = await UserProfile.findOne({ phone });
+ const user = await UserProfile.findOne({ email });
if (!user) {
- console.log('Get User - User not found:', phone);
+ console.log('Get User - User not found:', email);
return res.status(404).json({ message: 'User not found' });
}
@@ -24,8 +38,8 @@ exports.getUserByPhone = async (req, res) => {
exports.createOrUpdateUser = async (req, res) => {
try {
console.log('Create/Update User - Starting process');
- const { phone } = req.params;
- console.log('Create/Update User - Phone:', phone);
+ const { email } = req.params;
+ console.log('Create/Update User - Email:', email);
// Get values from request body
const updates = {
@@ -34,6 +48,7 @@ exports.createOrUpdateUser = async (req, res) => {
age: req.body.age,
languages: req.body.languages || [],
profileImage: req.body.profileImage,
+ location: req.body.location,
lastActive: new Date()
};
@@ -56,12 +71,13 @@ exports.createOrUpdateUser = async (req, res) => {
// Clean up updates
console.log('Create/Update User - Cleaning updates');
const cleanedUpdates = {
- phone, // Add phone to the updates
+ email, // Add email to the updates
name: updates.name,
age: updates.age,
userType: updates.userType,
languages: updates.languages,
profileImage: updates.profileImage,
+ location: updates.location,
lastActive: new Date()
};
@@ -73,7 +89,7 @@ exports.createOrUpdateUser = async (req, res) => {
console.log('Create/Update User - Attempting database operation');
const user = await UserProfile.findOneAndUpdate(
- { phone },
+ { email },
{ $set: cleanedUpdates },
{
new: true,
@@ -101,12 +117,12 @@ exports.createOrUpdateUser = async (req, res) => {
// Delete user profile
exports.deleteUser = async (req, res) => {
try {
- const { phone } = req.params;
- console.log('Delete User - Request:', { phone });
+ const { email } = req.params;
+ console.log('Delete User - Request:', { email });
- const user = await UserProfile.findOneAndDelete({ phone });
+ const user = await UserProfile.findOneAndDelete({ email });
if (!user) {
- console.log('Delete User - User not found:', phone);
+ console.log('Delete User - User not found:', email);
return res.status(404).json({ message: 'User not found' });
}
@@ -121,18 +137,18 @@ exports.deleteUser = async (req, res) => {
// Update user stats
exports.updateUserStats = async (req, res) => {
try {
- const { phone } = req.params;
+ const { email } = req.params;
const { stats } = req.body;
- console.log('Update Stats - Request:', { phone, stats });
+ console.log('Update Stats - Request:', { email, stats });
const user = await UserProfile.findOneAndUpdate(
- { phone },
+ { email },
{ $set: { stats } },
{ new: true, runValidators: true }
);
if (!user) {
- console.log('Update Stats - User not found:', phone);
+ console.log('Update Stats - User not found:', email);
return res.status(404).json({ message: 'User not found' });
}
@@ -147,9 +163,9 @@ exports.updateUserStats = async (req, res) => {
// Add created post
exports.addCreatedPost = async (req, res) => {
try {
- const { phone } = req.params;
+ const { email } = req.params;
const { postId, postType } = req.body;
- console.log('Add Post - Request:', { phone, postId, postType });
+ console.log('Add Post - Request:', { email, postId, postType });
if (!postId || !postType) {
return res.status(400).json({
@@ -172,7 +188,7 @@ exports.addCreatedPost = async (req, res) => {
}
const user = await UserProfile.findOneAndUpdate(
- { phone },
+ { email },
{
$addToSet: { [updateField]: postId },
$inc: { [`stats.${updateField.replace('created', '').toLowerCase()}Count`]: 1 }
@@ -181,7 +197,7 @@ exports.addCreatedPost = async (req, res) => {
);
if (!user) {
- console.log('Add Post - User not found:', phone);
+ console.log('Add Post - User not found:', email);
return res.status(404).json({ message: 'User not found' });
}
@@ -196,17 +212,17 @@ exports.addCreatedPost = async (req, res) => {
// Get user preferences
exports.getPreferences = async (req, res) => {
try {
- const { phone } = req.params;
+ const { email } = req.params;
console.log('Get Preferences - Request:', {
- phone,
+ email,
params: req.params
});
- const user = await UserProfile.findOne({ phone });
+ const user = await UserProfile.findOne({ email });
console.log('Get Preferences - User Check:', user);
if (!user) {
- console.log('Get Preferences - User not found:', phone);
+ console.log('Get Preferences - User not found:', email);
return res.status(404).json({ message: 'User profile not found' });
}
@@ -221,26 +237,26 @@ exports.getPreferences = async (req, res) => {
// Update user preferences
exports.updatePreferences = async (req, res) => {
try {
- const { phone } = req.params;
+ const { email } = req.params;
const { preferences } = req.body;
console.log('Update Preferences - Request:', {
- phone,
+ email,
preferences,
params: req.params,
body: req.body
});
// First check if user exists
- const existingUser = await UserProfile.findOne({ phone });
+ const existingUser = await UserProfile.findOne({ email });
console.log('Update Preferences - Existing User Check:', existingUser);
if (!existingUser) {
- console.log('Update Preferences - User not found:', phone);
+ console.log('Update Preferences - User not found:', email);
return res.status(404).json({ message: 'User profile not found. Please create a profile first.' });
}
const profile = await UserProfile.findOneAndUpdate(
- { phone },
+ { email },
{ $set: { preferences } },
{ new: true, runValidators: true }
);
@@ -268,4 +284,114 @@ exports.getAllUsers = async (req, res) => {
console.error('❌ Get All Users - Error:', error);
res.status(500).json({ message: error.message });
}
+};
+
+// Get user profile using JWT token
+exports.getProfile = async (req, res) => {
+ try {
+ console.log('Get Profile - Request received');
+
+ const userInfo = getUserFromToken(req);
+ console.log('Get Profile - User info from token:', userInfo);
+
+ const user = await UserProfile.findOne({ email: userInfo.email });
+ if (!user) {
+ console.log('Get Profile - User not found:', userInfo.email);
+ return res.status(404).json({ message: 'User profile not found' });
+ }
+
+ console.log('Get Profile - Success:', user);
+ res.json(user);
+ } catch (error) {
+ console.error('❌ Get Profile - Error:', error);
+ if (error.message === 'No authorization token provided') {
+ return res.status(401).json({ message: 'Authorization token required' });
+ }
+ res.status(500).json({ message: error.message });
+ }
+};
+
+// Update user profile using JWT token
+exports.updateProfile = async (req, res) => {
+ try {
+ console.log('Update Profile - Request received');
+ console.log('Update Profile - Request body:', req.body);
+
+ const userInfo = getUserFromToken(req);
+ console.log('Update Profile - User info from token:', userInfo);
+
+ // Get values from request body
+ const updates = {
+ name: req.body.name,
+ userType: req.body.userType,
+ age: req.body.age,
+ languages: req.body.languages || [],
+ profileImage: req.body.profileImage,
+ location: req.body.location,
+ lastActive: new Date()
+ };
+
+ console.log('Update Profile - Updates:', updates);
+
+ // Validate required fields
+ if (!updates.name || !updates.userType) {
+ console.log('Update Profile - Missing required fields:', {
+ hasName: !!updates.name,
+ hasUserType: !!updates.userType,
+ body: req.body
+ });
+ return res.status(400).json({
+ message: 'Missing required fields',
+ required: ['name', 'userType']
+ });
+ }
+
+ // Clean up updates
+ const cleanedUpdates = {
+ email: userInfo.email, // Add email to the updates
+ name: updates.name,
+ age: updates.age,
+ userType: updates.userType,
+ languages: updates.languages,
+ profileImage: updates.profileImage,
+ location: updates.location,
+ lastActive: new Date()
+ };
+
+ // Remove undefined values
+ Object.keys(cleanedUpdates).forEach(key =>
+ cleanedUpdates[key] === undefined && delete cleanedUpdates[key]
+ );
+ console.log('Update Profile - Final updates:', cleanedUpdates);
+
+ console.log('Update Profile - Attempting database operation');
+ const user = await UserProfile.findOneAndUpdate(
+ { email: userInfo.email },
+ { $set: cleanedUpdates },
+ {
+ new: true,
+ upsert: true,
+ runValidators: true,
+ setDefaultsOnInsert: true
+ }
+ );
+
+ console.log('Update Profile - Success:', user);
+ res.json(user);
+ } catch (error) {
+ console.error('❌ Update Profile - Error:', {
+ message: error.message,
+ stack: error.stack,
+ name: error.name
+ });
+
+ if (error.message === 'No authorization token provided') {
+ return res.status(401).json({ message: 'Authorization token required' });
+ }
+
+ res.status(500).json({
+ message: 'Error updating user profile',
+ error: error.message
+ });
+ }
};
\ No newline at end of file
diff --git a/Backend/src/index.js b/Backend/src/index.js
new file mode 100644
index 0000000..f0e684b
--- /dev/null
+++ b/Backend/src/index.js
@@ -0,0 +1,329 @@
+// CRITICAL: Wrap entire file execution to catch ANY errors
+// This ensures we can log errors even if something fails before error handlers are set up
+(function() {
+ try {
+ // IMMEDIATE STARTUP LOGGING - Write to stderr immediately to ensure iisnode captures it
+ // Use process.stderr.write for immediate output that iisnode can capture
+ process.stderr.write("==================================================\n");
+ process.stderr.write("🚀 MayaCode Backend - Starting...\n");
+ process.stderr.write("==================================================\n");
+ process.stderr.write("📋 Node.js Version: " + process.version + "\n");
+ process.stderr.write("📋 Process PID: " + process.pid + "\n");
+ process.stderr.write("📋 Working Directory: " + process.cwd() + "\n");
+ process.stderr.write("📋 __dirname: " + __dirname + "\n");
+ process.stderr.write("==================================================\n");
+
+ // Also write to console
+ console.log("==================================================");
+ console.log("🚀 MayaCode Backend - Starting...");
+ console.log("==================================================");
+ console.log("📋 Node.js Version:", process.version);
+ console.log("📋 Process PID:", process.pid);
+ console.log("📋 Working Directory:", process.cwd());
+ console.log("📋 __dirname:", __dirname);
+ console.log("==================================================");
+
+ // Check Node.js version (using basic syntax for compatibility)
+ var nodeVersion = process.version;
+ var majorVersion = parseInt(nodeVersion.split('.')[0].substring(1), 10);
+
+ if (majorVersion < 18) {
+ console.error("==========================================");
+ console.error("❌ FATAL ERROR: Node.js version too old!");
+ console.error("Current version: " + nodeVersion);
+ console.error("Required version: Node.js 18+ or 20+");
+ console.error("");
+ console.error("This application requires modern Node.js features.");
+ console.error("Please set WEBSITE_NODE_DEFAULT_VERSION=20.11.1");
+ console.error("And ensure Stack Settings use Node.js 20.x");
+ console.error("==========================================");
+ process.exit(1);
+ }
+ } catch (e) {
+ // Last resort - try to write error to a file or use basic console
+ try {
+ const fs = require("fs");
+ const path = require("path");
+ const errorLogPath = path.join(__dirname, "../startup-error.log");
+ fs.writeFileSync(errorLogPath, "Failed to write startup logs: " + e.toString() + "\n" + e.stack);
+ } catch (fileError) {
+ // If even file writing fails, we're in deep trouble
+ // This should never happen, but it's a safety net
+ }
+ }
+})();
+
+// Add error handling for missing dependencies
+// Check if node_modules exists (warn but don't exit - actual requires will fail if missing)
+try {
+ const fs = require("fs");
+ const path = require("path");
+
+ // Check both possible locations (../node_modules for Azure deployment, ../../node_modules for local)
+ const nodeModulesPath1 = path.join(__dirname, "../node_modules");
+ const nodeModulesPath2 = path.join(__dirname, "../../node_modules");
+ const cwdNodeModules = path.join(process.cwd(), "node_modules");
+
+ const nodeModulesExists = fs.existsSync(nodeModulesPath1) ||
+ fs.existsSync(nodeModulesPath2) ||
+ fs.existsSync(cwdNodeModules);
+
+ if (!nodeModulesExists) {
+ console.warn("⚠️ WARNING: node_modules directory not found in expected locations!");
+ console.warn("Checked paths:");
+ console.warn(" - " + nodeModulesPath1);
+ console.warn(" - " + nodeModulesPath2);
+ console.warn(" - " + cwdNodeModules);
+ console.warn("Current working directory: " + process.cwd());
+ console.warn("__dirname: " + __dirname);
+ console.warn("⚠️ Continuing anyway - Azure may install dependencies during deployment");
+ console.warn("If modules are missing, require() calls will fail with clear error messages");
+ } else {
+ console.log("✅ node_modules found");
+ }
+} catch (checkError) {
+ console.warn("⚠️ Warning: Error checking for node_modules:", checkError.message);
+ console.warn("Continuing anyway...");
+}
+
+// Add global error handlers BEFORE loading modules
+// Use stderr.write for immediate output
+process.on('uncaughtException', (error) => {
+ process.stderr.write("=".repeat(50) + "\n");
+ process.stderr.write("❌ UNCAUGHT EXCEPTION - Application will exit\n");
+ process.stderr.write("=".repeat(50) + "\n");
+ process.stderr.write(`Error: ${error.message}\n`);
+ process.stderr.write(`Stack: ${error.stack}\n`);
+ process.stderr.write("=".repeat(50) + "\n");
+ // Also log to console
+ console.error("=".repeat(50));
+ console.error("❌ UNCAUGHT EXCEPTION - Application will exit");
+ console.error("=".repeat(50));
+ console.error("Error:", error.message);
+ console.error("Stack:", error.stack);
+ console.error("=".repeat(50));
+ // Give time for logs to flush
+ setTimeout(() => process.exit(1), 2000);
+});
+
+process.on('unhandledRejection', (reason, promise) => {
+ console.error("=".repeat(50));
+ console.error("❌ UNHANDLED PROMISE REJECTION");
+ console.error("=".repeat(50));
+ console.error("Reason:", reason);
+ console.error("Promise:", promise);
+ if (reason && reason.stack) {
+ console.error("Stack:", reason.stack);
+ }
+ console.error("=".repeat(50));
+ // Don't exit on unhandled rejection - log and continue
+});
+
+const http = require("http");
+let app, setupSocket, initializeProducer, initializeConsumer, kafkaConsumerService;
+
+try {
+ process.stderr.write("📦 Loading application modules...\n");
+ console.log("📦 Loading application modules...");
+
+ app = require("./app.js");
+ process.stderr.write("✅ app.js loaded\n");
+ console.log("✅ app.js loaded");
+
+ setupSocket = require("./sockets/index.js").setupSocket;
+ process.stderr.write("✅ sockets/index.js loaded\n");
+ console.log("✅ sockets/index.js loaded");
+
+ initializeProducer = require("./config/kafka").initializeProducer;
+ initializeConsumer = require("./config/kafka").initializeConsumer;
+ process.stderr.write("✅ kafka config loaded\n");
+ console.log("✅ kafka config loaded");
+
+ kafkaConsumerService = require("./services/kafkaConsumer");
+ process.stderr.write("✅ kafkaConsumer service loaded\n");
+ console.log("✅ kafkaConsumer service loaded");
+
+ require("dotenv").config();
+ process.stderr.write("✅ dotenv configured\n");
+ console.log("✅ dotenv configured");
+
+ // Initialize Cloudinary
+ require("./config/cloudinary");
+ process.stderr.write("✅ cloudinary config loaded\n");
+ console.log("✅ cloudinary config loaded");
+
+ process.stderr.write("✅ All modules loaded successfully\n");
+ console.log("✅ All modules loaded successfully");
+} catch (requireError) {
+ // Write to stderr immediately so iisnode captures it
+ process.stderr.write("=".repeat(50) + "\n");
+ process.stderr.write("❌ ERROR: Failed to load required modules!\n");
+ process.stderr.write("=".repeat(50) + "\n");
+ process.stderr.write(`Error message: ${requireError.message}\n`);
+ process.stderr.write(`Error name: ${requireError.name}\n`);
+ process.stderr.write(`Error code: ${requireError.code}\n`);
+ process.stderr.write(`\nStack trace:\n${requireError.stack}\n`);
+ process.stderr.write("=".repeat(50) + "\n");
+ console.error("=".repeat(50));
+ console.error("❌ ERROR: Failed to load required modules!");
+ console.error("=".repeat(50));
+ console.error("Error message:", requireError.message);
+ console.error("Error name:", requireError.name);
+ console.error("Error code:", requireError.code);
+ console.error("");
+ console.error("Stack trace:");
+ console.error(requireError.stack);
+ console.error("");
+ console.error("This usually means:");
+ console.error("1. node_modules is missing - run 'npm install --production'");
+ console.error("2. A dependency is missing from package.json");
+ console.error("3. There's a syntax error in the code");
+ console.error("4. Environment variables are missing (check Azure App Settings)");
+ console.error("=".repeat(50));
+ // Give time for logs to flush before exiting
+ setTimeout(() => process.exit(1), 2000);
+}
+
+// For iisnode, PORT is automatically set by Azure/IIS via environment variable
+// Use default 8000 only for local development (should never happen in Azure)
+let PORT = process.env.PORT || process.env.IISNODE_HTTP_PORT || 8000;
+
+// Validate PORT is a number
+PORT = parseInt(PORT, 10);
+if (isNaN(PORT) || PORT <= 0 || PORT > 65535) {
+ process.stderr.write("❌ ERROR: Invalid PORT value: " + (process.env.PORT || process.env.IISNODE_HTTP_PORT || "8000") + "\n");
+ process.stderr.write("PORT must be a number between 1 and 65535\n");
+ console.error("❌ ERROR: Invalid PORT value:", process.env.PORT || process.env.IISNODE_HTTP_PORT || "8000");
+ console.error("PORT must be a number between 1 and 65535");
+ process.exit(1);
+}
+
+process.stderr.write("📋 Using PORT: " + PORT + "\n");
+console.log("📋 Using PORT:", PORT);
+
+// Log startup information for debugging
+// Use both stderr and console for maximum visibility
+const startupInfo = [
+ "=".repeat(50),
+ "🚀 Starting MayaCode Backend Server",
+ "=".repeat(50),
+ `📋 PORT: ${PORT}`,
+ `📋 NODE_ENV: ${process.env.NODE_ENV || 'not set'}`,
+ `📋 Working Directory: ${process.cwd()}`,
+ `📋 __dirname: ${__dirname}`,
+ `📋 Node.js Version: ${process.version}`,
+ `📋 Process PID: ${process.pid}`,
+ "=".repeat(50)
+].join("\n");
+
+process.stderr.write(startupInfo + "\n");
+console.log(startupInfo);
+
+const server = http.createServer(app);
+
+// Add error handling for server startup
+server.on('error', (error) => {
+ console.error("❌ Server error:", error);
+ if (error.code === 'EADDRINUSE') {
+ console.error("Port", PORT, "is already in use");
+ }
+});
+
+// Initialize services with fallback for Kafka/Redis
+async function initializeServices() {
+ try {
+ console.log("🚀 Initializing MayaCode Services...");
+
+ // Try to initialize Kafka (with fallback)
+ if (initializeProducer && initializeConsumer && kafkaConsumerService) {
+ try {
+ await initializeProducer();
+ await initializeConsumer();
+ await kafkaConsumerService.startConsuming();
+
+ const messageService = require("./services/messageService");
+ messageService.startBufferFlushing();
+
+ console.log("✅ Kafka services initialized successfully");
+ } catch (kafkaError) {
+ console.log("⚠️ Kafka services unavailable, continuing without them...");
+ console.log("💡 To enable Kafka: Create topics 'chat-messages' and 'message-persistence'");
+ }
+ } else {
+ console.log("⚠️ Kafka modules not available, skipping Kafka initialization");
+ }
+
+ console.log("✅ Core services initialized");
+ console.log("📧 Email OTP Authentication is ready!");
+ console.log("🔗 Test endpoints:");
+ console.log(" POST /auth/request-otp");
+ console.log(" POST /auth/verify-otp");
+ console.log(" GET /auth/verify-token");
+
+ // Setup socket (will work even without Redis)
+ if (setupSocket) {
+ setupSocket(server);
+ }
+
+ } catch (error) {
+ console.error("❌ Failed to initialize services:", error);
+ // Don't exit, let authentication work
+ console.log("⚠️ Continuing with limited functionality...");
+ }
+}
+
+// Graceful shutdown
+process.on('SIGINT', async () => {
+ console.log("\n🛑 Shutting down gracefully...");
+
+ try {
+ // Stop Kafka consumer if available
+ if (kafkaConsumerService && typeof kafkaConsumerService.stopConsuming === 'function') {
+ await kafkaConsumerService.stopConsuming();
+ }
+
+ // Flush any remaining messages if available
+ try {
+ const messageService = require("./services/messageService");
+ if (messageService && typeof messageService.flushBuffer === 'function') {
+ await messageService.flushBuffer();
+ }
+ } catch (msgError) {
+ console.warn("⚠️ Could not flush messages:", msgError.message);
+ }
+
+ console.log("✅ Graceful shutdown completed");
+ process.exit(0);
+ } catch (error) {
+ console.error("❌ Error during shutdown:", error);
+ process.exit(1);
+ }
+});
+
+// Start the server
+// For iisnode, listen on the PORT provided by IIS (no host binding needed)
+server.listen(PORT, () => {
+ const successMsg = [
+ `🌐 Server running on PORT: ${PORT}`,
+ `✅ Node.js process started successfully`,
+ `📋 Environment: ${process.env.NODE_ENV || 'development'}`,
+ `📁 Working directory: ${process.cwd()}`
+ ].join("\n");
+
+ process.stderr.write(successMsg + "\n");
+ console.log(successMsg);
+}).on('error', (error) => {
+ const errorMsg = [
+ "❌ Failed to start server:",
+ `Error code: ${error.code}`,
+ `Error message: ${error.message}`,
+ `Stack: ${error.stack}`
+ ].join("\n");
+
+ process.stderr.write(errorMsg + "\n");
+ console.error(errorMsg);
+ process.exit(1);
+});
+
+// Initialize all services
+initializeServices();
diff --git a/Backend/middleware/errorMiddleware.js b/Backend/src/middleware/errorMiddleware.js
similarity index 74%
rename from Backend/middleware/errorMiddleware.js
rename to Backend/src/middleware/errorMiddleware.js
index 28bd9c2..fcfbf09 100644
--- a/Backend/middleware/errorMiddleware.js
+++ b/Backend/src/middleware/errorMiddleware.js
@@ -1,5 +1,3 @@
-const { logger } = require('../utils/logger');
-
// Custom error class
class AppError extends Error {
constructor(message, statusCode) {
@@ -17,18 +15,6 @@ const errorHandler = (err, req, res, next) => {
err.statusCode = err.statusCode || 500;
err.status = err.status || 'error';
- // Log error
- logger.error('Error:', {
- message: err.message,
- stack: err.stack,
- statusCode: err.statusCode,
- path: req.originalUrl,
- method: req.method,
- body: req.body,
- params: req.params,
- query: req.query
- });
-
// Development error response
if (process.env.NODE_ENV === 'development') {
res.status(err.statusCode).json({
@@ -49,7 +35,6 @@ const errorHandler = (err, req, res, next) => {
}
// Programming or other unknown error: don't leak error details
else {
- logger.error('ERROR 💥', err);
res.status(500).json({
status: 'error',
message: 'Something went wrong!'
@@ -58,7 +43,4 @@ const errorHandler = (err, req, res, next) => {
}
};
-module.exports = {
- AppError,
- errorHandler
-};
\ No newline at end of file
+module.exports = { AppError, errorHandler };
\ No newline at end of file
diff --git a/Backend/src/models/Message.js b/Backend/src/models/Message.js
new file mode 100644
index 0000000..e4f0e2b
--- /dev/null
+++ b/Backend/src/models/Message.js
@@ -0,0 +1,62 @@
+const mongoose = require('mongoose');
+
+const messageSchema = new mongoose.Schema({
+ messageId: {
+ type: String,
+ required: true,
+ unique: true,
+ index: true
+ },
+ roomId: {
+ type: String,
+ required: true,
+ index: true
+ },
+ senderId: {
+ type: String,
+ required: true,
+ index: true
+ },
+ content: {
+ type: String,
+ required: true
+ },
+ messageType: {
+ type: String,
+ enum: ['text', 'image', 'file', 'audio'],
+ default: 'text'
+ },
+ status: {
+ type: String,
+ enum: ['pending', 'sent', 'delivered', 'read', 'failed'],
+ default: 'pending'
+ },
+ recipients: [{
+ type: String,
+ index: true
+ }],
+ metadata: {
+ requiresDelivery: {
+ type: Boolean,
+ default: true
+ },
+ priority: {
+ type: String,
+ enum: ['normal', 'high', 'urgent'],
+ default: 'normal'
+ }
+ }
+}, {
+ timestamps: true
+});
+
+// Indexes for better query performance
+messageSchema.index({ roomId: 1, createdAt: -1 });
+messageSchema.index({ senderId: 1, createdAt: -1 });
+messageSchema.index({ status: 1, createdAt: -1 });
+messageSchema.index({ recipients: 1, status: 1 });
+
+// TTL index to automatically delete old messages (90 days)
+messageSchema.index({ createdAt: 1 }, { expireAfterSeconds: 7776000 });
+
+module.exports = mongoose.model('Message', messageSchema);
\ No newline at end of file
diff --git a/Backend/src/models/Otp.js b/Backend/src/models/Otp.js
new file mode 100644
index 0000000..9a991ea
--- /dev/null
+++ b/Backend/src/models/Otp.js
@@ -0,0 +1,30 @@
+const mongoose = require('mongoose');
+
+const otpSchema = new mongoose.Schema({
+ email: {
+ type: String,
+ required: true,
+ lowercase: true,
+ trim: true
+ },
+ otp: {
+ type: String,
+ required: true
+ },
+ expiresAt: {
+ type: Date,
+ required: true,
+ default: Date.now,
+ expires: 600 // 10 minutes in seconds
+ },
+ createdAt: {
+ type: Date,
+ default: Date.now
+ }
+});
+
+// Index for faster queries
+otpSchema.index({ email: 1 });
+otpSchema.index({ expiresAt: 1 }, { expireAfterSeconds: 0 });
+
+module.exports = mongoose.model('Otp', otpSchema);
diff --git a/Backend/models/Post.js b/Backend/src/models/Post.js
similarity index 99%
rename from Backend/models/Post.js
rename to Backend/src/models/Post.js
index 91bdbeb..65481d6 100644
--- a/Backend/models/Post.js
+++ b/Backend/src/models/Post.js
@@ -13,7 +13,7 @@ const locationSchema = new mongoose.Schema({
});
const postSchema = new mongoose.Schema({
- phone: {
+ email: {
type: String,
required: true
},
diff --git a/Backend/models/User.js b/Backend/src/models/User.js
similarity index 84%
rename from Backend/models/User.js
rename to Backend/src/models/User.js
index de0c7eb..ea068e1 100644
--- a/Backend/models/User.js
+++ b/Backend/src/models/User.js
@@ -1,11 +1,17 @@
const mongoose = require('mongoose');
const userProfileSchema = new mongoose.Schema({
- phone: {
+ email: {
type: String,
required: true,
unique: true,
- index: true
+ lowercase: true,
+ trim: true
+ },
+ phone: {
+ type: String,
+ unique: true,
+ sparse: true
},
name: {
type: String,
@@ -55,8 +61,8 @@ const userProfileSchema = new mongoose.Schema({
timestamps: true
});
-// Drop the userId index if it exists
-userProfileSchema.index({ userId: 1 }, { unique: true, sparse: true });
-userProfileSchema.index({ phone: 1 }, { unique: true });
+// Indexes for faster queries
+userProfileSchema.index({ email: 1 });
+userProfileSchema.index({ phone: 1 });
module.exports = mongoose.model('UserProfile', userProfileSchema);
\ No newline at end of file
diff --git a/Backend/src/routes/authRoutes.js b/Backend/src/routes/authRoutes.js
new file mode 100644
index 0000000..1ece878
--- /dev/null
+++ b/Backend/src/routes/authRoutes.js
@@ -0,0 +1,21 @@
+const express = require('express');
+const router = express.Router();
+const {
+ checkEmailInUse,
+ requestOtp,
+ verifyOtp,
+ updateProfile,
+ verifyToken,
+ verifyTokenEndpoint
+} = require('../controllers/authController');
+
+// Public routes
+router.post('/check-email', checkEmailInUse);
+router.post('/request-otp', requestOtp);
+router.post('/verify-otp', verifyOtp);
+
+// Protected routes (require authentication)
+router.post('/update-profile', verifyToken, updateProfile);
+router.get('/verify-token', verifyTokenEndpoint);
+
+module.exports = router;
\ No newline at end of file
diff --git a/Backend/src/routes/imageRoutes.js b/Backend/src/routes/imageRoutes.js
new file mode 100644
index 0000000..1195d8a
--- /dev/null
+++ b/Backend/src/routes/imageRoutes.js
@@ -0,0 +1,12 @@
+const express = require('express');
+const router = express.Router();
+const imageController = require('../controllers/imageController');
+
+// Get image by category and number
+router.get('/:category/:number', imageController.getImage);
+
+// List all images for a category
+router.get('/:category', imageController.listImages);
+
+module.exports = router;
+
diff --git a/Backend/src/routes/messageRoutes.js b/Backend/src/routes/messageRoutes.js
new file mode 100644
index 0000000..0fc4796
--- /dev/null
+++ b/Backend/src/routes/messageRoutes.js
@@ -0,0 +1,171 @@
+const express = require('express');
+const router = express.Router();
+const messageService = require('../services/messageService');
+
+// Get messages for a specific room
+router.get('/room/:roomId', async (req, res) => {
+ try {
+ const { roomId } = req.params;
+ const { limit = 50, offset = 0 } = req.query;
+
+ console.log(`📥 Getting messages for room: ${roomId}, limit: ${limit}, offset: ${offset}`);
+
+ const messages = await messageService.getMessagesByRoom(
+ roomId,
+ parseInt(limit),
+ parseInt(offset)
+ );
+
+ res.json({
+ success: true,
+ data: messages,
+ pagination: {
+ limit: parseInt(limit),
+ offset: parseInt(offset),
+ count: messages.length
+ }
+ });
+
+ } catch (error) {
+ console.error('❌ Error getting room messages:', error);
+ res.status(500).json({
+ success: false,
+ message: 'Failed to get messages',
+ error: error.message
+ });
+ }
+});
+
+// Get unread messages for a user
+router.get('/unread/:userId', async (req, res) => {
+ try {
+ const { userId } = req.params;
+ const { limit = 100 } = req.query;
+
+ console.log(`📥 Getting unread messages for user: ${userId}`);
+
+ const messages = await messageService.getUnreadMessages(userId, parseInt(limit));
+
+ res.json({
+ success: true,
+ data: messages,
+ count: messages.length
+ });
+
+ } catch (error) {
+ console.error('❌ Error getting unread messages:', error);
+ res.status(500).json({
+ success: false,
+ message: 'Failed to get unread messages',
+ error: error.message
+ });
+ }
+});
+
+// Mark messages as delivered for a user
+router.post('/delivered', async (req, res) => {
+ try {
+ const { userId, messageIds } = req.body;
+
+ if (!userId || !messageIds || !Array.isArray(messageIds)) {
+ return res.status(400).json({
+ success: false,
+ message: 'userId and messageIds array are required'
+ });
+ }
+
+ console.log(`📝 Marking ${messageIds.length} messages as delivered for user: ${userId}`);
+
+ await messageService.markMessagesAsDelivered(userId, messageIds);
+
+ res.json({
+ success: true,
+ message: `Marked ${messageIds.length} messages as delivered`
+ });
+
+ } catch (error) {
+ console.error('❌ Error marking messages as delivered:', error);
+ res.status(500).json({
+ success: false,
+ message: 'Failed to mark messages as delivered',
+ error: error.message
+ });
+ }
+});
+
+// Update message status
+router.put('/:messageId/status', async (req, res) => {
+ try {
+ const { messageId } = req.params;
+ const { status } = req.body;
+
+ if (!status) {
+ return res.status(400).json({
+ success: false,
+ message: 'status is required'
+ });
+ }
+
+ console.log(`📝 Updating message status: ${messageId} -> ${status}`);
+
+ await messageService.updateMessageStatus(messageId, status);
+
+ res.json({
+ success: true,
+ message: 'Message status updated successfully'
+ });
+
+ } catch (error) {
+ console.error('❌ Error updating message status:', error);
+ res.status(500).json({
+ success: false,
+ message: 'Failed to update message status',
+ error: error.message
+ });
+ }
+});
+
+// Get message statistics
+router.get('/stats/:roomId', async (req, res) => {
+ try {
+ const { roomId } = req.params;
+ const Message = require('../models/Message');
+
+ const stats = await Message.aggregate([
+ { $match: { roomId } },
+ {
+ $group: {
+ _id: null,
+ totalMessages: { $sum: 1 },
+ totalDelivered: {
+ $sum: { $cond: [{ $eq: ['$status', 'delivered'] }, 1, 0] }
+ },
+ totalRead: {
+ $sum: { $cond: [{ $eq: ['$status', 'read'] }, 1, 0] }
+ }
+ }
+ }
+ ]);
+
+ const result = stats[0] || {
+ totalMessages: 0,
+ totalDelivered: 0,
+ totalRead: 0
+ };
+
+ res.json({
+ success: true,
+ data: result
+ });
+
+ } catch (error) {
+ console.error('❌ Error getting message stats:', error);
+ res.status(500).json({
+ success: false,
+ message: 'Failed to get message statistics',
+ error: error.message
+ });
+ }
+});
+
+module.exports = router;
\ No newline at end of file
diff --git a/Backend/src/routes/postRoutes.js b/Backend/src/routes/postRoutes.js
new file mode 100644
index 0000000..9085008
--- /dev/null
+++ b/Backend/src/routes/postRoutes.js
@@ -0,0 +1,47 @@
+const express = require('express');
+const multer = require('multer');
+const router = express.Router();
+const postController = require('../controllers/postController');
+
+// Configure multer for handling file uploads
+const upload = multer({
+ storage: multer.memoryStorage(), // Store files in memory for Cloudinary upload
+ limits: {
+ fileSize: 10 * 1024 * 1024, // 10MB limit
+ },
+ fileFilter: (req, file, cb) => {
+ // Accept only image files
+ if (file.mimetype.startsWith('image/')) {
+ cb(null, true);
+ } else {
+ cb(new Error('Only image files are allowed'), false);
+ }
+ }
+});
+
+// Add basic logging middleware
+router.use((req, res, next) => {
+ console.log(`📨 ${req.method} ${req.url}`);
+ next();
+});
+
+// Error handling middleware
+router.use((err, req, res, next) => {
+ res.status(500).json({
+ message: 'Internal server error',
+ error: err.message
+ });
+});
+
+// Post routes
+router.post('/', postController.createPost); // Remove multer middleware for base64 JSON requests
+router.get('/', postController.getPosts);
+router.get('/:id/images', postController.getPostImages); // Lazy load images endpoint (must be before /:id)
+router.get('/:id', postController.getPost);
+router.put('/:id', postController.updatePost);
+router.delete('/:id', postController.deletePost);
+
+// User posts route
+router.get('/phone/:phone', postController.getUserPosts);
+
+module.exports = router;
\ No newline at end of file
diff --git a/Backend/routes/userRoutes.js b/Backend/src/routes/userRoutes.js
similarity index 59%
rename from Backend/routes/userRoutes.js
rename to Backend/src/routes/userRoutes.js
index 07a1d2f..cb9b4fc 100644
--- a/Backend/routes/userRoutes.js
+++ b/Backend/src/routes/userRoutes.js
@@ -26,19 +26,23 @@ router.use((err, req, res, next) => {
});
// User profile routes
-router.get('/phone/:phone', userController.getUserByPhone);
-router.put('/phone/:phone', userController.createOrUpdateUser);
-router.delete('/phone/:phone', userController.deleteUser);
+router.get('/email/:email', userController.getUserByEmail);
+router.put('/email/:email', userController.createOrUpdateUser);
+router.delete('/email/:email', userController.deleteUser);
+
+// Profile routes (using JWT token)
+router.get('/profile', userController.getProfile);
+router.put('/profile', userController.updateProfile);
// User stats routes
-router.put('/phone/:phone/stats', userController.updateUserStats);
+router.put('/email/:email/stats', userController.updateUserStats);
// User posts routes
-router.post('/phone/:phone/posts', userController.addCreatedPost);
+router.post('/email/:email/posts', userController.addCreatedPost);
// Preferences routes
-router.put('/phone/:phone/preferences', userController.updatePreferences);
-router.get('/phone/:phone/preferences', userController.getPreferences);
+router.put('/email/:email/preferences', userController.updatePreferences);
+router.get('/email/:email/preferences', userController.getPreferences);
// Get all users
router.get('/', userController.getAllUsers);
diff --git a/Backend/scripts/dropIndexes.js b/Backend/src/scripts/dropIndexes.js
similarity index 100%
rename from Backend/scripts/dropIndexes.js
rename to Backend/src/scripts/dropIndexes.js
diff --git a/Backend/src/services/gmailService.js b/Backend/src/services/gmailService.js
new file mode 100644
index 0000000..1842ca1
--- /dev/null
+++ b/Backend/src/services/gmailService.js
@@ -0,0 +1,110 @@
+const nodemailer = require('nodemailer');
+const { google } = require('googleapis');
+
+// OAuth2 configuration
+const oAuth2Client = new google.auth.OAuth2(
+ process.env.GMAIL_CLIENT_ID,
+ process.env.GMAIL_CLIENT_SECRET,
+ process.env.GMAIL_REDIRECT_URI || 'https://developers.google.com/oauthplayground'
+);
+
+// Set credentials
+oAuth2Client.setCredentials({
+ refresh_token: process.env.GMAIL_REFRESH_TOKEN
+});
+
+// Create transporter
+let transporter;
+
+async function createTransporter() {
+ try {
+ const accessToken = await oAuth2Client.getAccessToken();
+
+ transporter = nodemailer.createTransporter({
+ service: 'gmail',
+ auth: {
+ type: 'OAuth2',
+ user: process.env.GMAIL_USER, // Your Gmail address
+ clientId: process.env.GMAIL_CLIENT_ID,
+ clientSecret: process.env.GMAIL_CLIENT_SECRET,
+ refreshToken: process.env.GMAIL_REFRESH_TOKEN,
+ accessToken: accessToken.token,
+ },
+ });
+
+ console.log('✅ Gmail transporter created successfully');
+ return transporter;
+ } catch (error) {
+ console.error('❌ Error creating Gmail transporter:', error);
+ throw error;
+ }
+}
+
+// Send email function
+async function sendEmail({ to, subject, html, text }) {
+ try {
+ if (!transporter) {
+ await createTransporter();
+ }
+
+ const mailOptions = {
+ from: `MayaCode <${process.env.GMAIL_USER}>`,
+ to,
+ subject,
+ html,
+ text,
+ };
+
+ console.log(`📧 Sending email to: ${to}`);
+ const result = await transporter.sendMail(mailOptions);
+ console.log('✅ Email sent successfully:', result.messageId);
+
+ return {
+ success: true,
+ messageId: result.messageId,
+ message: 'Email sent successfully'
+ };
+ } catch (error) {
+ console.error('❌ Error sending email:', error);
+ return {
+ success: false,
+ error: error.message,
+ message: 'Failed to send email'
+ };
+ }
+}
+
+// Send OTP email
+async function sendOTPEmail(email, otp) {
+ const subject = 'MayaCode - Your OTP Code';
+ const html = `
+
+
MayaCode OTP Verification
+
Hello!
+
Your OTP code for MayaCode is:
+
+
${otp}
+
+
This code will expire in 10 minutes.
+
If you didn't request this code, please ignore this email.
+
+
MayaCode - Building Stronger Communities
+
+ `;
+
+ const text = `MayaCode OTP Verification\n\nYour OTP code is: ${otp}\n\nThis code will expire in 10 minutes.\n\nIf you didn't request this code, please ignore this email.\n\nMayaCode - Building Stronger Communities`;
+
+ return await sendEmail({
+ to: email,
+ subject,
+ html,
+ text
+ });
+}
+
+module.exports = {
+ sendEmail,
+ sendOTPEmail,
+ createTransporter
+};
+
diff --git a/Backend/src/services/kafkaConsumer.js b/Backend/src/services/kafkaConsumer.js
new file mode 100644
index 0000000..27da3c0
--- /dev/null
+++ b/Backend/src/services/kafkaConsumer.js
@@ -0,0 +1,132 @@
+const { consumer, TOPICS } = require('../config/kafka');
+const messageService = require('./messageService');
+
+class KafkaConsumerService {
+ constructor() {
+ this.isRunning = false;
+ this.messageBuffer = [];
+ this.batchSize = 50;
+ this.flushInterval = 2000; // 2 seconds
+ }
+
+ // Start consuming messages from Kafka
+ async startConsuming() {
+ if (this.isRunning) {
+ console.log('⚠️ Kafka consumer is already running');
+ return;
+ }
+
+ if (!consumer) {
+ throw new Error('Kafka consumer not configured. Set KAFKA_BROKERS environment variable.');
+ }
+
+ try {
+ // Subscribe to chat messages topic
+ await consumer.subscribe({
+ topic: TOPICS.CHAT_MESSAGES,
+ fromBeginning: false // Start from latest messages
+ });
+
+ console.log(`📥 Subscribed to Kafka topic: ${TOPICS.CHAT_MESSAGES}`);
+
+ // Start consuming messages
+ await consumer.run({
+ eachMessage: async ({ topic, partition, message }) => {
+ try {
+ console.log(`📥 Raw Kafka message received:`, message.value.toString());
+ const messageData = JSON.parse(message.value.toString());
+ console.log(`📥 Parsed message from Kafka: ${messageData.id}`);
+
+ // Add to buffer for batch processing
+ this.addToBuffer(messageData);
+ } catch (error) {
+ console.error('❌ Error processing Kafka message:', error);
+ }
+ }
+ });
+
+ this.isRunning = true;
+ console.log('✅ Kafka consumer started successfully');
+
+ // Start periodic buffer flushing
+ this.startBufferFlushing();
+
+ } catch (error) {
+ console.error('❌ Failed to start Kafka consumer:', error);
+ throw error;
+ }
+ }
+
+ // Stop consuming messages
+ async stopConsuming() {
+ if (!this.isRunning) {
+ console.log('⚠️ Kafka consumer is not running');
+ return;
+ }
+
+ try {
+ // Flush any remaining messages
+ await this.flushBuffer();
+
+ // Disconnect consumer
+ await consumer.disconnect();
+
+ this.isRunning = false;
+ console.log('✅ Kafka consumer stopped successfully');
+ } catch (error) {
+ console.error('❌ Failed to stop Kafka consumer:', error);
+ throw error;
+ }
+ }
+
+ // Add message to buffer
+ addToBuffer(messageData) {
+ this.messageBuffer.push(messageData);
+
+ // Flush if buffer is full
+ if (this.messageBuffer.length >= this.batchSize) {
+ this.flushBuffer();
+ }
+ }
+
+ // Flush message buffer to MongoDB
+ async flushBuffer() {
+ if (this.messageBuffer.length === 0) return;
+
+ const messagesToProcess = [...this.messageBuffer];
+ this.messageBuffer = [];
+
+ try {
+ console.log(`💾 Processing ${messagesToProcess.length} messages from buffer`);
+
+ // Use batch processing for better performance
+ await messageService.batchProcessMessages(messagesToProcess);
+
+ console.log(`✅ Successfully processed ${messagesToProcess.length} messages`);
+ } catch (error) {
+ console.error('❌ Failed to process message buffer:', error);
+
+ // Could implement retry logic or dead letter queue here
+ // For now, we'll log the error and continue
+ }
+ }
+
+ // Start periodic buffer flushing
+ startBufferFlushing() {
+ setInterval(() => {
+ this.flushBuffer();
+ }, this.flushInterval);
+ }
+
+ // Get consumer status
+ getStatus() {
+ return {
+ isRunning: this.isRunning,
+ bufferSize: this.messageBuffer.length,
+ batchSize: this.batchSize,
+ flushInterval: this.flushInterval
+ };
+ }
+}
+
+module.exports = new KafkaConsumerService();
\ No newline at end of file
diff --git a/Backend/src/services/messageService.js b/Backend/src/services/messageService.js
new file mode 100644
index 0000000..2a69b0a
--- /dev/null
+++ b/Backend/src/services/messageService.js
@@ -0,0 +1,230 @@
+const Message = require('../models/Message');
+const { sendMessage, TOPICS } = require('../config/kafka');
+
+class MessageService {
+ constructor() {
+ this.messageBuffer = [];
+ this.batchSize = 50;
+ this.flushInterval = 2000; // 2 seconds
+ }
+
+ // Send message to Kafka for processing
+ async sendMessageToKafka(messageData) {
+ try {
+ const message = {
+ id: this.generateMessageId(),
+ roomId: messageData.roomId || 'general',
+ senderId: messageData.senderId || 'anonymous',
+ content: messageData.message,
+ messageType: messageData.messageType || 'text',
+ status: 'pending',
+ recipients: messageData.recipients || [],
+ metadata: {
+ requiresDelivery: true,
+ priority: messageData.priority || 'normal'
+ },
+ timestamp: new Date().toISOString()
+ };
+
+ // Send to Kafka for persistence
+ await sendMessage(TOPICS.CHAT_MESSAGES, message, message.roomId);
+
+ console.log(`📤 Message sent to Kafka: ${message.id}`);
+ return message;
+ } catch (error) {
+ console.error('❌ Failed to send message to Kafka:', error);
+ throw error;
+ }
+ }
+
+ // Process messages from Kafka and store in MongoDB
+ async processMessageFromKafka(messageData) {
+ try {
+ console.log(`💾 Processing message for MongoDB:`, messageData);
+ const message = new Message({
+ _id: messageData.id,
+ roomId: messageData.roomId,
+ senderId: messageData.senderId,
+ content: messageData.content,
+ messageType: messageData.messageType,
+ status: messageData.status,
+ recipients: messageData.recipients,
+ metadata: messageData.metadata
+ });
+
+ await message.save();
+ console.log(`💾 Message saved to MongoDB: ${messageData.id}`);
+ return message;
+ } catch (error) {
+ console.error('❌ Failed to save message to MongoDB:', error);
+ throw error;
+ }
+ }
+
+ // Batch process messages for better performance
+ async batchProcessMessages(messages) {
+ if (messages.length === 0) return;
+
+ try {
+ console.log(`💾 Starting batch processing for ${messages.length} messages`);
+ console.log(`💾 First message data:`, messages[0]);
+
+ const bulkOps = messages.map(msg => ({
+ insertOne: {
+ document: {
+ messageId: msg.id, // Use messageId instead of _id
+ roomId: msg.roomId,
+ senderId: msg.senderId,
+ content: msg.content,
+ messageType: msg.messageType,
+ status: msg.status,
+ recipients: msg.recipients,
+ metadata: msg.metadata,
+ createdAt: new Date(msg.timestamp),
+ updatedAt: new Date(msg.timestamp)
+ }
+ }
+ }));
+
+ console.log(`💾 Bulk operations prepared:`, bulkOps.length);
+
+ const result = await Message.bulkWrite(bulkOps, { ordered: false });
+ console.log(`💾 Batch write result:`, result);
+
+ // Check for validation errors
+ if (result.mongoose && result.mongoose.validationErrors) {
+ console.error('❌ Validation errors:', result.mongoose.validationErrors);
+ }
+
+ if (result.insertedCount === 0) {
+ console.error('❌ No messages were inserted!');
+ // Try individual save to see the exact error
+ try {
+ const testMessage = new Message({
+ messageId: messages[0].id,
+ roomId: messages[0].roomId,
+ senderId: messages[0].senderId,
+ content: messages[0].content,
+ messageType: messages[0].messageType,
+ status: messages[0].status,
+ recipients: messages[0].recipients,
+ metadata: messages[0].metadata
+ });
+ await testMessage.save();
+ console.log('✅ Individual save succeeded');
+ } catch (individualError) {
+ console.error('❌ Individual save failed:', individualError.message);
+ }
+ }
+
+ console.log(`💾 Batch saved ${messages.length} messages to MongoDB`);
+ } catch (error) {
+ console.error('❌ Failed to batch save messages:', error);
+ console.error('❌ Error details:', error.message);
+ if (error.writeErrors) {
+ console.error('❌ Write errors:', error.writeErrors);
+ }
+ throw error;
+ }
+ }
+
+ // Get messages for a room
+ async getMessagesByRoom(roomId, limit = 50, offset = 0) {
+ try {
+ const messages = await Message.find({ roomId })
+ .sort({ createdAt: -1 })
+ .skip(offset)
+ .limit(limit)
+ .lean();
+
+ return messages.reverse(); // Return in chronological order
+ } catch (error) {
+ console.error('❌ Failed to get messages by room:', error);
+ throw error;
+ }
+ }
+
+ // Get unread messages for a user
+ async getUnreadMessages(userId, limit = 100) {
+ try {
+ const messages = await Message.find({
+ recipients: userId,
+ status: { $ne: 'delivered' }
+ })
+ .sort({ createdAt: 1 })
+ .limit(limit)
+ .lean();
+
+ return messages;
+ } catch (error) {
+ console.error('❌ Failed to get unread messages:', error);
+ throw error;
+ }
+ }
+
+ // Update message status
+ async updateMessageStatus(messageId, status) {
+ try {
+ await Message.findByIdAndUpdate(messageId, { status });
+ console.log(`📝 Updated message status: ${messageId} -> ${status}`);
+ } catch (error) {
+ console.error('❌ Failed to update message status:', error);
+ throw error;
+ }
+ }
+
+ // Mark messages as delivered for a user
+ async markMessagesAsDelivered(userId, messageIds) {
+ try {
+ await Message.updateMany(
+ {
+ _id: { $in: messageIds },
+ recipients: userId
+ },
+ { status: 'delivered' }
+ );
+ console.log(`📝 Marked ${messageIds.length} messages as delivered for user: ${userId}`);
+ } catch (error) {
+ console.error('❌ Failed to mark messages as delivered:', error);
+ throw error;
+ }
+ }
+
+ // Generate unique message ID
+ generateMessageId() {
+ return `msg_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
+ }
+
+ // Add message to buffer for batch processing
+ addToBuffer(message) {
+ this.messageBuffer.push(message);
+
+ if (this.messageBuffer.length >= this.batchSize) {
+ this.flushBuffer();
+ }
+ }
+
+ // Flush message buffer
+ async flushBuffer() {
+ if (this.messageBuffer.length === 0) return;
+
+ const messagesToProcess = [...this.messageBuffer];
+ this.messageBuffer = [];
+
+ try {
+ await this.batchProcessMessages(messagesToProcess);
+ } catch (error) {
+ console.error('❌ Failed to flush message buffer:', error);
+ // Could implement retry logic or dead letter queue here
+ }
+ }
+
+ // Start periodic buffer flushing
+ startBufferFlushing() {
+ setInterval(() => {
+ this.flushBuffer();
+ }, this.flushInterval);
+ }
+}
+
+module.exports = new MessageService();
\ No newline at end of file
diff --git a/Backend/src/services/socket.js b/Backend/src/services/socket.js
new file mode 100644
index 0000000..d27acf3
--- /dev/null
+++ b/Backend/src/services/socket.js
@@ -0,0 +1,77 @@
+const { Server } = require("socket.io");
+const Redis = require("ioredis");
+// const prismaClient = require("./prisma");
+// const { produceMessage } = require("./kafka");
+
+// Only create Redis clients if VALKEY_HOST is configured
+let pub = null;
+let sub = null;
+
+if (process.env.VALKEY_HOST) {
+ pub = new Redis({
+ host: process.env.VALKEY_HOST,
+ port: process.env.VALKEY_PORT,
+ username: process.env.VALKEY_USERNAME,
+ password: process.env.VALKEY_PASSWORD,
+ });
+
+ sub = new Redis({
+ host: process.env.VALKEY_HOST,
+ port: process.env.VALKEY_PORT,
+ username: process.env.VALKEY_USERNAME,
+ password: process.env.VALKEY_PASSWORD,
+ });
+} else {
+ console.warn('⚠️ VALKEY_HOST not configured. Redis pub/sub will not work.');
+}
+
+class SocketService {
+
+ constructor() {
+ console.log("Init Socket Service...");
+ this._io = new Server(
+ {
+ cors: {
+ allowedHeaders: ["*"],
+ origin: "*",
+ },
+ }
+ );
+ if (sub) {
+ sub.subscribe("MESSAGES");
+ }
+ }
+
+ initListeners() {
+ const io = this.io;
+ console.log("Init Socket Listeners...");
+
+ io.on("connect", (socket) => {
+ console.log(`New Socket Connected`, socket.id);
+ socket.on("event:message", async ({ message }) => {
+ console.log("New Message Rec.", message);
+ // publish this message to redis (only if Redis is configured)
+ if (pub) {
+ await pub.publish("MESSAGES", JSON.stringify({ message }));
+ }
+ });
+ });
+
+ if (sub) {
+ sub.on("message", async (channel, message) => {
+ if (channel === "MESSAGES") {
+ console.log("new message from redis", message);
+ io.emit("message", message);
+ // await produceMessage(message);
+ // console.log("Message Produced to Kafka Broker");
+ }
+ });
+ }
+ }
+
+ get io() {
+ return this._io;
+ }
+}
+
+module.exports = SocketService;
\ No newline at end of file
diff --git a/Backend/src/sockets/chat.socket.js b/Backend/src/sockets/chat.socket.js
new file mode 100644
index 0000000..83dc2e0
--- /dev/null
+++ b/Backend/src/sockets/chat.socket.js
@@ -0,0 +1,146 @@
+const messageService = require('../services/messageService');
+
+module.exports = (io, socket, pub) => {
+ socket.on("chat:send", async (data) => {
+ try {
+ console.log("📨 Received chat message:", data);
+
+ // Create message data with additional context
+ const messageData = {
+ message: data.message,
+ roomId: data.roomId || 'general',
+ senderId: data.senderId || socket.id,
+ messageType: data.messageType || 'text',
+ recipients: data.recipients || [],
+ priority: data.priority || 'normal'
+ };
+
+ // Send to Kafka for persistence (async, non-blocking)
+ const kafkaMessage = await messageService.sendMessageToKafka(messageData);
+
+ // Send to Redis for immediate delivery to online users (only if Redis is configured)
+ if (pub) {
+ pub.publish("CHAT_MESSAGES", JSON.stringify({
+ ...data,
+ id: kafkaMessage.id,
+ timestamp: kafkaMessage.timestamp
+ }));
+ }
+
+ // Send delivery confirmation to sender
+ console.log("Message processed and sent to Kafka:", kafkaMessage.id);
+ socket.emit("message:delivered", {
+ id: kafkaMessage.id,
+ message: data.message,
+ timestamp: kafkaMessage.timestamp,
+ status: 'sent'
+ });
+
+ } catch (error) {
+ console.error("Error processing chat message:", error);
+
+ // Send error notification to sender
+ socket.emit("message:error", {
+ message: "Failed to send message",
+ error: error.message
+ });
+ }
+ });
+
+ // Handle message status updates
+ socket.on("message:status", async (data) => {
+ try {
+ const { messageId, status } = data;
+ await messageService.updateMessageStatus(messageId, status);
+ console.log(`Updated message status: ${messageId} -> ${status}`);
+ } catch (error) {
+ console.error("Error updating message status:", error);
+ }
+ });
+
+ // Handle user joining a room
+ socket.on("room:join", async (data) => {
+ try {
+ const { roomId, userId } = data;
+
+ // Join the socket room
+ socket.join(roomId);
+
+ // Store user's room info in socket
+ socket.roomId = roomId;
+ socket.userId = userId;
+
+ console.log(`User ${userId} joined room ${roomId}`);
+
+ // Notify others in the room
+ socket.to(roomId).emit("user:joined", {
+ userId,
+ roomId,
+ timestamp: new Date().toISOString()
+ });
+
+ } catch (error) {
+ console.error("Error joining room:", error);
+ }
+ });
+
+ // Handle user leaving a room
+ socket.on("room:leave", async (data) => {
+ try {
+ const { roomId, userId } = data;
+
+ // Leave the socket room
+ socket.leave(roomId);
+
+ console.log(`User ${userId} left room ${roomId}`);
+
+ // Notify others in the room
+ socket.to(roomId).emit("user:left", {
+ userId,
+ roomId,
+ timestamp: new Date().toISOString()
+ });
+
+ } catch (error) {
+ console.error("Error leaving room:", error);
+ }
+ });
+
+ // Handle user coming online
+ socket.on("user:online", async (data) => {
+ try {
+ const { userId } = data;
+
+ // Store user's online status
+ socket.userId = userId;
+
+ console.log(`User ${userId} is online`);
+
+ // Get unread messages for this user
+ const unreadMessages = await messageService.getUnreadMessages(userId);
+
+ if (unreadMessages.length > 0) {
+ console.log(`Sending ${unreadMessages.length} unread messages to user ${userId}`);
+
+ // Send unread messages to user
+ unreadMessages.forEach(msg => {
+ socket.emit("chat:receive", {
+ id: msg._id,
+ message: msg.content,
+ senderId: msg.senderId,
+ roomId: msg.roomId,
+ timestamp: msg.createdAt,
+ status: 'delivered'
+ });
+ });
+
+ // Mark messages as delivered
+ const messageIds = unreadMessages.map(msg => msg._id);
+ await messageService.markMessagesAsDelivered(userId, messageIds);
+ }
+
+ } catch (error) {
+ console.error("Error handling user online:", error);
+ }
+ });
+};
diff --git a/Backend/src/sockets/index.js b/Backend/src/sockets/index.js
new file mode 100644
index 0000000..4403218
--- /dev/null
+++ b/Backend/src/sockets/index.js
@@ -0,0 +1,62 @@
+const { Server } = require("socket.io");
+const Redis = require("ioredis");
+const chatSocket = require("./chat.socket.js");
+const notificationSocket = require("./notification.socket.js");
+
+// Only create Redis clients if VALKEY_HOST is configured
+let pub = null;
+let sub = null;
+
+if (process.env.VALKEY_HOST) {
+ pub = new Redis({
+ host: process.env.VALKEY_HOST,
+ port: process.env.VALKEY_PORT,
+ username: process.env.VALKEY_USERNAME,
+ password: process.env.VALKEY_PASSWORD,
+ });
+
+ sub = new Redis({
+ host: process.env.VALKEY_HOST,
+ port: process.env.VALKEY_PORT,
+ username: process.env.VALKEY_USERNAME,
+ password: process.env.VALKEY_PASSWORD,
+ });
+} else {
+ console.warn('⚠️ VALKEY_HOST not configured. Redis pub/sub will not work.');
+}
+
+const setupSocket = (httpServer) => {
+ const io = new Server(httpServer, {
+ cors: {
+ origin: "*",
+ methods: ["GET", "POST"]
+ }
+ });
+
+ // Subscribe to both chat and notification channels (only if Redis is configured)
+ if (sub) {
+ sub.subscribe("CHAT_MESSAGES");
+ sub.subscribe("NOTIFICATION_MESSAGES");
+
+ sub.on("message", (channel, message) => {
+ if (channel === "CHAT_MESSAGES") {
+ io.emit("chat:receive", JSON.parse(message));
+ } else if (channel === "NOTIFICATION_MESSAGES") {
+ io.emit("notification:receive", JSON.parse(message));
+ }
+ });
+ }
+
+ io.on("connection", (socket) => {
+ console.log(`⚡ Socket connected: ${socket.id}`);
+
+ chatSocket(io, socket, pub);
+ notificationSocket(io, socket, pub);
+
+ socket.on("disconnect", () => {
+ console.log(`⚠️ Socket disconnected: ${socket.id}`);
+ });
+ });
+};
+
+module.exports = { setupSocket };
diff --git a/Backend/src/sockets/notification.socket.js b/Backend/src/sockets/notification.socket.js
new file mode 100644
index 0000000..f158401
--- /dev/null
+++ b/Backend/src/sockets/notification.socket.js
@@ -0,0 +1,6 @@
+module.exports = (io, socket, pub) => {
+ socket.on("notification:send", (data) => {
+ console.log("Received notification:", data);
+ pub.publish("NOTIFICATION_MESSAGES", JSON.stringify(data));
+ });
+};
diff --git a/Backend/src/utils/cloudinaryUploader.js b/Backend/src/utils/cloudinaryUploader.js
new file mode 100644
index 0000000..b971d97
--- /dev/null
+++ b/Backend/src/utils/cloudinaryUploader.js
@@ -0,0 +1,62 @@
+/**
+ * Upload base64 image to Cloudinary as-is
+ * @param {string} base64Image - Base64 encoded image with data URI prefix
+ * @returns {Promise