GraphQL Query Depth and Complexity Attacks Causing Resource Exhaustion

High Risk API Security
graphqlquery-depthcomplexity-attackdosresource-exhaustionperformancenested-queries

What it is

A high-severity vulnerability where GraphQL APIs lack proper query depth limiting, complexity analysis, and resource controls, allowing attackers to craft deeply nested or computationally expensive queries that can overwhelm servers, exhaust database resources, and cause denial of service. These attacks exploit GraphQL's flexible query structure to create exponentially expensive operations through deep nesting, large result sets, and complex field selections.

// VULNERABLE: GraphQL server without depth or complexity protection const { ApolloServer, gql } = require('apollo-server-express'); // Dangerous schema with recursive relationships const typeDefs = gql` type User { id: ID! name: String! posts: [Post!]! # Unbounded list followers: [User!]! # Recursive, unbounded following: [User!]! # Recursive, unbounded } type Post { id: ID! title: String! author: User! # Back-reference comments: [Comment!]! # Unbounded list likes: [Like!]! # Unbounded list } type Comment { id: ID! content: String! author: User! # Back-reference post: Post! # Back-reference replies: [Comment!]! # Recursive, unbounded } type Query { users: [User!]! # Returns all users posts: [Post!]! # Returns all posts } `; // Inefficient resolvers with N+1 problems const resolvers = { Query: { users: () => User.find(), // No limits posts: () => Post.find() // No limits }, User: { posts: (user) => Post.find({ authorId: user.id }), followers: (user) => User.find({ following: user.id }), following: (user) => User.find({ followers: user.id }) }, Post: { author: (post) => User.findById(post.authorId), comments: (post) => Comment.find({ postId: post.id }), likes: (post) => Like.find({ postId: post.id }) }, Comment: { author: (comment) => User.findById(comment.authorId), post: (comment) => Post.findById(comment.postId), replies: (comment) => Comment.find({ parentId: comment.id }) } }; // PROBLEM: No security measures const server = new ApolloServer({ typeDefs, resolvers // Missing: depth limits, complexity analysis, timeouts, pagination }); // Attack query that can crash the server: // query MaliciousQuery { // users { // followers { // followers { // followers { // posts { // comments { // replies { // author { // followers { // posts { comments { replies } } // } // } // } // } // } // } // } // } // } // }
// SECURE: Comprehensive GraphQL security implementation const { ApolloServer, gql } = require('apollo-server-express'); const depthLimit = require('graphql-depth-limit'); const costAnalysis = require('graphql-query-complexity').costAnalysisValidator; const DataLoader = require('dataloader'); const rateLimit = require('express-rate-limit'); // Secure schema with pagination and limits const typeDefs = gql` type PageInfo { hasNextPage: Boolean! hasPreviousPage: Boolean! startCursor: String endCursor: String } type UserEdge { node: User! cursor: String! } type UserConnection { edges: [UserEdge!]! pageInfo: PageInfo! totalCount: Int! } type PostConnection { edges: [PostEdge!]! pageInfo: PageInfo! totalCount: Int! } type PostEdge { node: Post! cursor: String! } type CommentConnection { edges: [CommentEdge!]! pageInfo: PageInfo! totalCount: Int! } type CommentEdge { node: Comment! cursor: String! } type User { id: ID! name: String! # Paginated relationships with strict limits posts(first: Int = 10, after: String): PostConnection! followers(first: Int = 20, after: String): UserConnection! following(first: Int = 20, after: String): UserConnection! # Cached counts instead of real-time aggregation postCount: Int! followerCount: Int! followingCount: Int! } type Post { id: ID! title: String! content: String! author: User! # Paginated comments with limits comments(first: Int = 10, after: String): CommentConnection! # Pre-calculated metrics likeCount: Int! commentCount: Int! } type Comment { id: ID! content: String! author: User! post: Post! # Limited reply depth replies(first: Int = 5, after: String): CommentConnection! replyCount: Int! } type Query { # Paginated queries with strict limits users(first: Int = 20, after: String, search: String): UserConnection! posts(first: Int = 20, after: String, authorId: ID): PostConnection! # Single item queries user(id: ID!): User post(id: ID!): Post } `; // Resource management and monitoring class GraphQLResourceManager { constructor() { this.activeQueries = new Map(); this.queryStats = { totalQueries: 0, timeouts: 0, complexityRejections: 0, averageExecutionTime: 0 }; } startQuery(queryId, query) { const queryInfo = { id: queryId, query: query?.substring(0, 200), startTime: Date.now(), memoryStart: process.memoryUsage().heapUsed, timeout: setTimeout(() => this.cancelQuery(queryId, 'timeout'), 30000) }; this.activeQueries.set(queryId, queryInfo); this.queryStats.totalQueries++; return queryInfo; } finishQuery(queryId) { const queryInfo = this.activeQueries.get(queryId); if (!queryInfo) return; clearTimeout(queryInfo.timeout); const duration = Date.now() - queryInfo.startTime; this.queryStats.averageExecutionTime = (this.queryStats.averageExecutionTime + duration) / 2; if (duration > 5000) { console.warn('Slow query detected:', { duration: `${duration}ms`, query: queryInfo.query }); } this.activeQueries.delete(queryId); } cancelQuery(queryId, reason) { const queryInfo = this.activeQueries.get(queryId); if (!queryInfo) return; if (reason === 'timeout') { this.queryStats.timeouts++; } console.warn('Query cancelled:', { queryId, reason, duration: `${Date.now() - queryInfo.startTime}ms` }); this.finishQuery(queryId); } } const resourceManager = new GraphQLResourceManager(); // DataLoader factory for efficient batching class DataLoaderFactory { static createUserLoader() { return new DataLoader(async (userIds) => { const users = await User.find({ _id: { $in: userIds } }); return userIds.map(id => users.find(user => user._id.toString() === id.toString()) || null ); }, { maxBatchSize: 100 }); } static createPostsByAuthorLoader() { return new DataLoader(async (authorIds) => { const posts = await Post.find({ authorId: { $in: authorIds } }).sort({ createdAt: -1 }).limit(1000); return authorIds.map(authorId => posts.filter(post => post.authorId.toString() === authorId.toString()) .slice(0, 50) // Limit per author ); }); } } // Pagination helper class PaginationHelper { static validateArgs(first, after) { const MAX_PAGE_SIZE = 100; if (first && first > MAX_PAGE_SIZE) { throw new Error(`Cannot request more than ${MAX_PAGE_SIZE} items`); } return { first: Math.min(first || 20, MAX_PAGE_SIZE), after: after || null }; } static async paginate(model, filter, options) { const { first, after } = options; let offset = 0; if (after) { offset = parseInt(Buffer.from(after, 'base64').toString()) + 1; } const [items, totalCount] = await Promise.all([ model.find(filter) .sort({ createdAt: -1 }) .skip(offset) .limit(first + 1), model.countDocuments(filter) ]); const hasNextPage = items.length > first; if (hasNextPage) items.pop(); const edges = items.map((item, index) => ({ node: item, cursor: Buffer.from((offset + index).toString()).toString('base64') })); return { edges, pageInfo: { hasNextPage, hasPreviousPage: offset > 0, startCursor: edges[0]?.cursor || null, endCursor: edges[edges.length - 1]?.cursor || null }, totalCount }; } } // Secure resolvers with DataLoader and pagination const resolvers = { Query: { users: async (_, args, { checkTimeout }) => { checkTimeout(); const { first, after } = PaginationHelper.validateArgs(args.first, args.after); const filter = {}; if (args.search) { if (args.search.length > 50) { throw new Error('Search query too long'); } filter.$text = { $search: args.search }; } return await PaginationHelper.paginate(User, filter, { first, after }); }, posts: async (_, args, { checkTimeout }) => { checkTimeout(); const { first, after } = PaginationHelper.validateArgs(args.first, args.after); const filter = {}; if (args.authorId) { filter.authorId = args.authorId; } return await PaginationHelper.paginate(Post, filter, { first, after }); }, user: async (_, { id }, { userLoader, checkTimeout }) => { checkTimeout(); return await userLoader.load(id); }, post: async (_, { id }, { postLoader, checkTimeout }) => { checkTimeout(); return await postLoader.load(id); } }, User: { posts: async (user, args, { checkTimeout }) => { checkTimeout(); const { first, after } = PaginationHelper.validateArgs(args.first, args.after); return await PaginationHelper.paginate( Post, { authorId: user._id }, { first, after } ); }, followers: async (user, args, { checkTimeout }) => { checkTimeout(); const { first, after } = PaginationHelper.validateArgs(args.first, args.after); // Efficient followers lookup const followerIds = await Follow.find({ followingId: user._id }) .select('followerId') .limit(first * 2); const filter = { _id: { $in: followerIds.map(f => f.followerId) } }; return await PaginationHelper.paginate(User, filter, { first, after }); }, following: async (user, args, { checkTimeout }) => { checkTimeout(); const { first, after } = PaginationHelper.validateArgs(args.first, args.after); const followingIds = await Follow.find({ followerId: user._id }) .select('followingId') .limit(first * 2); const filter = { _id: { $in: followingIds.map(f => f.followingId) } }; return await PaginationHelper.paginate(User, filter, { first, after }); }, // Use cached counts postCount: (user) => user.postCount || 0, followerCount: (user) => user.followerCount || 0, followingCount: (user) => user.followingCount || 0 }, Post: { author: async (post, _, { userLoader, checkTimeout }) => { checkTimeout(); return await userLoader.load(post.authorId); }, comments: async (post, args, { checkTimeout }) => { checkTimeout(); const { first, after } = PaginationHelper.validateArgs(args.first, args.after); return await PaginationHelper.paginate( Comment, { postId: post._id, parentId: null }, { first, after } ); }, likeCount: (post) => post.likeCount || 0, commentCount: (post) => post.commentCount || 0 }, Comment: { author: async (comment, _, { userLoader, checkTimeout }) => { checkTimeout(); return await userLoader.load(comment.authorId); }, post: async (comment, _, { postLoader, checkTimeout }) => { checkTimeout(); return await postLoader.load(comment.postId); }, replies: async (comment, args, { checkTimeout }) => { checkTimeout(); // Limit reply depth const MAX_DEPTH = 3; if (comment.depth && comment.depth >= MAX_DEPTH) { return { edges: [], pageInfo: { hasNextPage: false, hasPreviousPage: false }, totalCount: 0 }; } const { first, after } = PaginationHelper.validateArgs(args.first, args.after); return await PaginationHelper.paginate( Comment, { parentId: comment._id }, { first, after } ); }, replyCount: (comment) => comment.replyCount || 0 } }; // Rate limiting for GraphQL const graphqlLimiter = rateLimit({ windowMs: 60 * 1000, max: 100, message: { error: 'Too many GraphQL requests' } }); // Comprehensive server configuration const server = new ApolloServer({ typeDefs, resolvers, // Security validations validationRules: [ depthLimit(7), // Maximum query depth costAnalysis({ maximumCost: 1000, defaultCost: 1, scalarCost: 1, objectCost: 2, listFactor: 10, createError: (max, actual) => { resourceManager.queryStats.complexityRejections++; return new Error(`Query complexity ${actual} exceeds limit ${max}`); } }) ], // Disable in production introspection: process.env.NODE_ENV !== 'production', playground: process.env.NODE_ENV !== 'production', // Context with DataLoaders and timeout context: ({ req }) => { const startTime = Date.now(); return { userLoader: DataLoaderFactory.createUserLoader(), postsByAuthorLoader: DataLoaderFactory.createPostsByAuthorLoader(), startTime, checkTimeout: () => { if (Date.now() - startTime > 30000) { throw new Error('Query timeout after 30 seconds'); } } }; }, // Monitoring plugins plugins: [ { requestDidStart() { return { didResolveOperation(requestContext) { const queryId = `${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; requestContext.queryId = queryId; resourceManager.startQuery(queryId, requestContext.request.query); }, willSendResponse(requestContext) { if (requestContext.queryId) { resourceManager.finishQuery(requestContext.queryId); } }, didEncounterErrors(requestContext) { if (requestContext.queryId) { resourceManager.finishQuery(requestContext.queryId); } } }; } } ], formatError: (error) => { console.error('GraphQL Error:', error.message); if (process.env.NODE_ENV === 'production') { if (error.message.includes('complexity') || error.message.includes('timeout') || error.message.includes('depth')) { return new Error('Query rejected: Resource limits exceeded'); } } return error; } }); // Apply rate limiting app.use('/graphql', graphqlLimiter); // Health check with resource stats app.get('/graphql/health', (req, res) => { const stats = resourceManager.queryStats; const isHealthy = resourceManager.activeQueries.size < 10; res.status(isHealthy ? 200 : 503).json({ status: isHealthy ? 'healthy' : 'unhealthy', stats, activeQueries: resourceManager.activeQueries.size }); });

💡 Why This Fix Works

The secure implementation comprehensively addresses GraphQL query depth attacks through depth limiting, complexity analysis, efficient DataLoader-based resolvers, pagination with strict limits, query timeouts, resource monitoring, and proper error handling. It prevents exponential query growth while maintaining good performance through batching and caching.

Why it happens

GraphQL APIs without depth limiting allow attackers to create deeply nested queries that can exponentially increase the computational cost and database load. Each level of nesting can multiply the number of database queries or operations, leading to resource exhaustion.

Root causes

Unlimited Query Depth in GraphQL Resolvers

GraphQL APIs without depth limiting allow attackers to create deeply nested queries that can exponentially increase the computational cost and database load. Each level of nesting can multiply the number of database queries or operations, leading to resource exhaustion.

Preview example – JAVASCRIPT
// VULNERABLE: Apollo GraphQL without depth limiting
const { ApolloServer, gql } = require('apollo-server-express');

// Schema with recursive relationships - DANGEROUS without limits
const typeDefs = gql`
  type User {
    id: ID!
    name: String!
    posts: [Post!]!
    followers: [User!]!     # Recursive relationship
    following: [User!]!     # Recursive relationship
    comments: [Comment!]!
  }
  
  type Post {
    id: ID!
    title: String!
    content: String!
    author: User!           # Back-reference to User
    comments: [Comment!]!
    likes: [Like!]!
    tags: [Tag!]!
  }
  
  type Comment {
    id: ID!
    content: String!
    author: User!           # Back-reference to User
    post: Post!             # Back-reference to Post
    replies: [Comment!]!    # Recursive relationship
    likes: [Like!]!
  }
  
  type Query {
    users: [User!]!
    posts: [Post!]!
  }
`;

const resolvers = {
  Query: {
    // PROBLEM: No limits on result size
    users: () => User.find().limit(1000),  // Could return many users
    posts: () => Post.find().limit(1000)   // Could return many posts
  },
  User: {
    // DANGEROUS: N+1 problem without limits
    posts: (user) => Post.find({ authorId: user.id }),
    followers: (user) => User.find({ following: user.id }),
    following: (user) => User.find({ followers: user.id }),
    comments: (user) => Comment.find({ authorId: user.id })
  },
  Post: {
    author: (post) => User.findById(post.authorId),
    comments: (post) => Comment.find({ postId: post.id }),
    likes: (post) => Like.find({ postId: post.id })
  },
  Comment: {
    author: (comment) => User.findById(comment.authorId),
    post: (comment) => Post.findById(comment.postId),
    replies: (comment) => Comment.find({ parentId: comment.id }),
    likes: (comment) => Like.find({ commentId: comment.id })
  }
};

// PROBLEM: No query analysis or limits
const server = new ApolloServer({
  typeDefs,
  resolvers
  // Missing: depthLimit, costAnalysis, timeout
});

// Attack examples:
// 1. Deeply nested user relationships:
// query {
//   users {
//     followers {
//       followers {
//         followers {
//           followers {
//             posts {
//               comments {
//                 author {
//                   followers {
//                     posts {
//                       comments { ... }
//                     }
//                   }
//                 }
//               }
//             }
//           }
//         }
//       }
//     }
//   }
// }

// 2. Exponential complexity through multiple paths:
// query {
//   posts {
//     author { posts { author { posts { comments { replies } } } } }
//     comments { author { posts { comments { replies } } } }
//   }
// }

Lack of Query Complexity Analysis

GraphQL servers that don't implement query complexity analysis allow attackers to craft queries that request massive amounts of data or perform expensive operations. Without cost analysis, a single query can overwhelm the server by requesting thousands of records or performing complex calculations.

Preview example – JAVASCRIPT
// VULNERABLE: GraphQL without complexity analysis
const { ApolloServer, gql } = require('apollo-server-express');

const typeDefs = gql`
  type User {
    id: ID!
    name: String!
    posts(limit: Int = 100): [Post!]!      # Default limit too high
    analytics: UserAnalytics              # Expensive computation
    recommendations: [Post!]!             # Expensive ML operation
  }
  
  type Post {
    id: ID!
    title: String!
    content: String!
    author: User!
    similarPosts(limit: Int = 50): [Post!]!  # Expensive similarity search
    wordCount: Int!                       # Expensive computation
    readingTime: Float!                   # Expensive computation
    sentimentScore: Float!                # Expensive ML operation
  }
  
  type UserAnalytics {
    totalViews: Int!                      # Expensive aggregation
    engagementRate: Float!                # Complex calculation
    topPosts: [Post!]!                   # Expensive sorting/filtering
    readerDemographics: Demographics!     # Very expensive aggregation
  }
  
  type Query {
    users(limit: Int = 1000): [User!]!    # High default limit
    searchPosts(query: String!, limit: Int = 500): [Post!]!  # Expensive search
  }
`;

const resolvers = {
  Query: {
    // PROBLEM: No complexity validation
    users: (_, { limit }) => {
      // Could request 1000 users with all their data
      return User.find().limit(Math.min(limit, 1000));
    },
    searchPosts: async (_, { query, limit }) => {
      // EXPENSIVE: Full-text search without optimization
      return await Post.find({ $text: { $search: query } })
        .limit(Math.min(limit, 500))
        .populate('author');
    }
  },
  User: {
    posts: async (user, { limit }) => {
      // EXPENSIVE: Could fetch hundreds of posts per user
      return await Post.find({ authorId: user.id }).limit(limit);
    },
    analytics: async (user) => {
      // VERY EXPENSIVE: Complex aggregations
      return await calculateUserAnalytics(user.id);
    },
    recommendations: async (user) => {
      // EXTREMELY EXPENSIVE: ML-based recommendations
      return await getPersonalizedRecommendations(user.id);
    }
  },
  Post: {
    similarPosts: async (post, { limit }) => {
      // EXPENSIVE: Vector similarity search
      return await findSimilarPosts(post.id, limit);
    },
    wordCount: (post) => {
      // EXPENSIVE: Text processing for each post
      return countWords(post.content);
    },
    sentimentScore: async (post) => {
      // EXTREMELY EXPENSIVE: ML sentiment analysis
      return await analyzeSentiment(post.content);
    }
  },
  UserAnalytics: {
    totalViews: async (user) => {
      // EXPENSIVE: Database aggregation
      return await Analytics.aggregate([
        { $match: { userId: user.id } },
        { $group: { _id: null, total: { $sum: '$views' } } }
      ]);
    },
    readerDemographics: async (user) => {
      // EXTREMELY EXPENSIVE: Complex multi-table aggregation
      return await calculateReaderDemographics(user.id);
    }
  }
};

// PROBLEM: No server configuration for complexity
const server = new ApolloServer({
  typeDefs,
  resolvers
  // Missing: Query complexity analysis, timeouts, result size limits
});

// Attack query requesting massive computation:
// query ExpensiveQuery {
//   users(limit: 1000) {
//     id
//     name
//     analytics {
//       totalViews
//       engagementRate
//       readerDemographics {
//         ageGroups
//         countries
//         interests
//       }
//     }
//     posts(limit: 100) {
//       wordCount
//       sentimentScore
//       similarPosts(limit: 50) {
//         title
//         sentimentScore
//       }
//     }
//     recommendations {
//       title
//       sentimentScore
//     }
//   }
// }
// This query could:
// - Process 1000 users
// - Calculate analytics for each (expensive)
// - Process 100,000 posts (1000 users × 100 posts)
// - Calculate sentiment for 100,000+ posts
// - Find 5,000,000 similar posts (100,000 × 50)
// - Generate 1000 ML recommendations

Inefficient Resolver Implementation with N+1 Problems

GraphQL resolvers that don't implement proper batching or data loading create N+1 query problems that are amplified in deep or complex queries. Each field resolution triggers separate database queries, leading to exponential database load in nested queries.

Preview example – JAVASCRIPT
// VULNERABLE: GraphQL resolvers with N+1 problems
const { ApolloServer, gql } = require('apollo-server-express');

const typeDefs = gql`
  type Organization {
    id: ID!
    name: String!
    departments: [Department!]!
  }
  
  type Department {
    id: ID!
    name: String!
    employees: [Employee!]!
    projects: [Project!]!
  }
  
  type Employee {
    id: ID!
    name: String!
    email: String!
    manager: Employee
    directReports: [Employee!]!
    projects: [Project!]!
    timeEntries: [TimeEntry!]!
  }
  
  type Project {
    id: ID!
    name: String!
    assignees: [Employee!]!
    tasks: [Task!]!
  }
  
  type Task {
    id: ID!
    title: String!
    assignee: Employee!
    subtasks: [Task!]!
  }
  
  type Query {
    organizations: [Organization!]!
  }
`;

const resolvers = {
  Query: {
    // PROBLEM: Returns all organizations without limits
    organizations: () => Organization.find()
  },
  Organization: {
    // N+1 PROBLEM: Separate query for each organization
    departments: (org) => Department.find({ organizationId: org.id })
  },
  Department: {
    // N+1 PROBLEM: Separate query for each department
    employees: (dept) => Employee.find({ departmentId: dept.id }),
    projects: (dept) => Project.find({ departmentId: dept.id })
  },
  Employee: {
    // N+1 PROBLEM: Separate query for each employee
    manager: (employee) => {
      if (employee.managerId) {
        return Employee.findById(employee.managerId);
      }
      return null;
    },
    directReports: (employee) => Employee.find({ managerId: employee.id }),
    projects: (employee) => Project.find({ assigneeIds: employee.id }),
    timeEntries: (employee) => TimeEntry.find({ employeeId: employee.id })
  },
  Project: {
    // N+1 PROBLEM: Separate query for each project
    assignees: async (project) => {
      const assigneeIds = project.assigneeIds || [];
      // INEFFICIENT: Separate query for each assignee
      return Promise.all(
        assigneeIds.map(id => Employee.findById(id))
      );
    },
    tasks: (project) => Task.find({ projectId: project.id })
  },
  Task: {
    // N+1 PROBLEM: Separate query for each task
    assignee: (task) => Employee.findById(task.assigneeId),
    subtasks: (task) => Task.find({ parentTaskId: task.id })
  }
};

// Example attack query that creates massive N+1 problems:
// query ExponentialQuery {
//   organizations {                    # 1 query
//     name
//     departments {                  # N queries (N = number of orgs)
//       name
//       employees {                  # N×M queries (M = depts per org)
//         name
//         manager {                  # N×M×P queries (P = employees per dept)
//           name
//           directReports {          # N×M×P×Q queries
//             name
//             projects {             # N×M×P×Q×R queries
//               name
//               assignees {          # N×M×P×Q×R×S queries
//                 name
//               }
//               tasks {              # N×M×P×Q×R×T queries
//                 title
//                 assignee {         # N×M×P×Q×R×T×U queries
//                   name
//                 }
//                 subtasks {         # N×M×P×Q×R×T×V queries
//                   title
//                 }
//               }
//             }
//           }
//         }
//         timeEntries {              # Additional N×M×P queries
//           date
//           hours
//         }
//       }
//     }
//   }
// }

// With modest data (10 orgs, 5 depts each, 20 employees per dept),
// this query could generate 50,000+ database queries!

// PROBLEM: No query timeout or resource limits
const server = new ApolloServer({
  typeDefs,
  resolvers,
  // Missing: DataLoader, query timeout, complexity analysis
  context: () => ({
    // Should include DataLoader instances for batching
  })
});

Missing Query Timeout and Resource Limits

GraphQL servers without proper timeout controls and resource limits allow long-running queries to consume server resources indefinitely. Attackers can craft queries that take minutes or hours to complete, exhausting server capacity and blocking other requests.

Preview example – JAVASCRIPT
// VULNERABLE: GraphQL server without timeouts or limits
const { ApolloServer, gql } = require('apollo-server-express');

const typeDefs = gql`
  type Query {
    # DANGEROUS: Expensive operations without limits
    generateReport(filters: ReportFilters!): Report!
    searchEverything(query: String!): SearchResults!
    calculateStatistics(dateRange: DateRange!): Statistics!
  }
  
  type Report {
    id: ID!
    data: String!              # Could be massive JSON
    processedAt: String!
    metrics: [Metric!]!        # Could be thousands of items
  }
  
  type SearchResults {
    users: [User!]!           # Unbounded result set
    posts: [Post!]!           # Unbounded result set
    comments: [Comment!]!     # Unbounded result set
    totalCount: Int!
  }
  
  type Statistics {
    userStats: [UserStat!]!   # Expensive aggregations
    postStats: [PostStat!]!   # Expensive aggregations
    systemMetrics: SystemMetrics!
  }
`;

const resolvers = {
  Query: {
    generateReport: async (_, { filters }) => {
      // PROBLEM: No timeout - could run for hours
      console.log('Starting report generation...');
      
      // Expensive operations without limits
      const users = await User.find(filters.userFilters);  // Could be millions
      const posts = await Post.find(filters.postFilters);  // Could be millions
      
      // EXTREMELY EXPENSIVE: Process all data
      const processedData = await Promise.all([
        analyzeUserBehavior(users),      // Could take hours
        generateInsights(posts),         // CPU intensive
        calculateTrends(users, posts),   // Memory intensive
        generateCharts(users, posts)     // CPU/Memory intensive
      ]);
      
      // PROBLEM: No memory limits - could create massive objects
      const reportData = {
        users: users.map(user => ({
          ...user.toObject(),
          analytics: calculateUserAnalytics(user),
          predictions: predictUserBehavior(user)
        })),
        posts: posts.map(post => ({
          ...post.toObject(),
          sentimentAnalysis: analyzeSentiment(post.content),
          similarPosts: findSimilarPosts(post)
        })),
        insights: processedData
      };
      
      return {
        id: generateId(),
        data: JSON.stringify(reportData),  // Could be gigabytes
        processedAt: new Date().toISOString(),
        metrics: generateMetrics(reportData)
      };
    },
    
    searchEverything: async (_, { query }) => {
      // PROBLEM: No pagination or limits
      console.log('Searching everything for:', query);
      
      // EXPENSIVE: Full-text search across all collections
      const [users, posts, comments] = await Promise.all([
        User.find({ $text: { $search: query } }),        // No limit
        Post.find({ $text: { $search: query } }),        // No limit
        Comment.find({ $text: { $search: query } })      // No limit
      ]);
      
      // PROBLEM: Could return millions of results
      return {
        users,
        posts,
        comments,
        totalCount: users.length + posts.length + comments.length
      };
    },
    
    calculateStatistics: async (_, { dateRange }) => {
      // PROBLEM: No validation of date range size
      const startDate = new Date(dateRange.start);
      const endDate = new Date(dateRange.end);
      
      // DANGEROUS: Could be processing years of data
      console.log(`Calculating stats from ${startDate} to ${endDate}`);
      
      // EXTREMELY EXPENSIVE: Complex aggregations
      const userStats = await User.aggregate([
        { $match: { createdAt: { $gte: startDate, $lte: endDate } } },
        { $group: { _id: '$country', count: { $sum: 1 } } },
        { $sort: { count: -1 } }
        // Could process millions of users
      ]);
      
      const postStats = await Post.aggregate([
        { $match: { createdAt: { $gte: startDate, $lte: endDate } } },
        {
          $group: {
            _id: { $dateToString: { format: "%Y-%m-%d", date: "$createdAt" } },
            count: { $sum: 1 },
            avgWordCount: { $avg: { $size: { $split: ["$content", " "] } } }
          }
        }
        // Could process millions of posts with expensive text operations
      ]);
      
      return {
        userStats,
        postStats,
        systemMetrics: await calculateSystemMetrics(startDate, endDate)
      };
    }
  }
};

// PROBLEM: No server-level protections
const server = new ApolloServer({
  typeDefs,
  resolvers,
  // MISSING: All protective measures
  // timeout: no timeout configured
  // introspection: enabled by default
  // playground: enabled by default
  context: () => ({
    // No request tracking or limits
  })
});

// Attack queries:

// 1. Long-running report generation:
// query SlowReport {
//   generateReport(filters: {
//     userFilters: {},           # All users
//     postFilters: {},           # All posts
//     includeAnalytics: true
//   }) {
//     data                       # Gigabytes of data
//     metrics {
//       name
//       value
//     }
//   }
// }

// 2. Unbounded search:
// query MassiveSearch {
//   searchEverything(query: "a") {  # Matches everything
//     users { id name email }
//     posts { id title content }
//     comments { id content }
//     totalCount
//   }
// }

// 3. Multi-year statistics:
// query ExpensiveStats {
//   calculateStatistics(dateRange: {
//     start: "2020-01-01"
//     end: "2024-12-31"          # 4+ years of data
//   }) {
//     userStats { _id count }
//     postStats { _id count avgWordCount }
//   }
// }

Fixes

1

Implement Query Depth Limiting and Complexity Analysis

Use libraries like graphql-depth-limit and graphql-query-complexity to analyze and restrict query depth and computational cost. Configure appropriate limits based on your server capacity and business requirements.

View implementation – JAVASCRIPT
const { ApolloServer, gql } = require('apollo-server-express');
const depthLimit = require('graphql-depth-limit');
const costAnalysis = require('graphql-query-complexity').costAnalysisValidator;
const { createComplexityLimitRule } = require('graphql-query-complexity');

// Enhanced GraphQL server with comprehensive protection
const typeDefs = gql`
  type User {
    id: ID!
    name: String!
    posts(first: Int = 10, after: String): PostConnection!  # Paginated
    followers(first: Int = 20): [User!]!
    following(first: Int = 20): [User!]!
  }
  
  type Post {
    id: ID!
    title: String!
    content: String!
    author: User!
    comments(first: Int = 10): [Comment!]!
  }
  
  type PostConnection {
    edges: [PostEdge!]!
    pageInfo: PageInfo!
    totalCount: Int!
  }
  
  type PostEdge {
    node: Post!
    cursor: String!
  }
  
  type PageInfo {
    hasNextPage: Boolean!
    hasPreviousPage: Boolean!
    startCursor: String
    endCursor: String
  }
  
  type Query {
    users(first: Int = 20): [User!]!
    posts(first: Int = 20): [Post!]!
  }
`;

// Configure query complexity analysis
const MAX_QUERY_DEPTH = 10;
const MAX_QUERY_COMPLEXITY = 1000;

const server = new ApolloServer({
  typeDefs,
  resolvers,
  
  // Security validations
  validationRules: [
    // Limit query depth
    depthLimit(MAX_QUERY_DEPTH),
    
    // Analyze query complexity
    costAnalysis({
      maximumCost: MAX_QUERY_COMPLEXITY,
      defaultCost: 1,
      scalarCost: 1,
      objectCost: 2,
      listFactor: 10,  // Lists multiply cost
      introspectionCost: 1000,  // Make introspection expensive
      createError: (max, actual) => {
        return new Error(
          `Query complexity of ${actual} exceeds maximum allowed complexity of ${max}`
        );
      },
      // Custom field costs
      fieldExtensions: {
        cost: {
          User: {
            posts: { complexity: 10 },      // Expensive field
            followers: { complexity: 5 },
            following: { complexity: 5 }
          },
          Post: {
            comments: { complexity: 5 },
            author: { complexity: 2 }
          }
        }
      }
    }),
    
    // Custom validation for specific patterns
    (context) => ({
      Field: {
        enter(node, key, parent) {
          // Prevent certain dangerous field combinations
          if (node.name.value === 'users' && 
              context.getVariableValues().first > 100) {
            context.reportError(new Error(
              'Cannot request more than 100 users at once'
            ));
          }
        }
      }
    })
  ],
  
  // Request timeout
  plugins: [
    {
      requestDidStart() {
        return {
          willSendResponse(requestContext) {
            // Log expensive queries
            const complexity = requestContext.request.http?.complexity;
            if (complexity && complexity > 500) {
              console.warn('High complexity query executed:', {
                complexity,
                query: requestContext.request.query?.substring(0, 200),
                variables: requestContext.request.variables,
                userAgent: requestContext.request.http?.headers?.get('user-agent')
              });
            }
          },
          
          didEncounterErrors(requestContext) {
            // Log validation errors
            requestContext.errors?.forEach(error => {
              if (error.message.includes('complexity') || 
                  error.message.includes('depth')) {
                console.warn('Query security violation:', {
                  error: error.message,
                  query: requestContext.request.query,
                  ip: requestContext.request.ip
                });
              }
            });
          }
        };
      }
    }
  ],
  
  // Global timeout
  context: ({ req }) => {
    const startTime = Date.now();
    
    return {
      startTime,
      timeout: 30000, // 30 second timeout
      checkTimeout: () => {
        if (Date.now() - startTime > 30000) {
          throw new Error('Query timeout: Request took too long to process');
        }
      }
    };
  },
  
  formatError: (error) => {
    // Don't expose internal details in production
    if (process.env.NODE_ENV === 'production') {
      if (error.message.includes('timeout') || 
          error.message.includes('complexity') || 
          error.message.includes('depth')) {
        return new Error('Query rejected: Resource limits exceeded');
      }
    }
    
    return error;
  }
});
2

Implement DataLoader for Efficient Batch Loading

Use DataLoader to solve N+1 query problems by batching database requests. This prevents exponential database load in nested queries and significantly improves performance.

View implementation – JAVASCRIPT
const DataLoader = require('dataloader');
const { ApolloServer, gql } = require('apollo-server-express');

// Create DataLoader instances for efficient batching
class DataLoaderFactory {
  static createUserLoader() {
    return new DataLoader(async (userIds) => {
      console.log(`Batching ${userIds.length} user lookups`);
      
      const users = await User.find({ _id: { $in: userIds } });
      
      // Return users in the same order as requested IDs
      return userIds.map(id => 
        users.find(user => user._id.toString() === id.toString()) || null
      );
    }, {
      cache: true,
      maxBatchSize: 100,  // Limit batch size
      batchScheduleFn: callback => setTimeout(callback, 10)  // Small delay for batching
    });
  }
  
  static createPostsByAuthorLoader() {
    return new DataLoader(async (authorIds) => {
      console.log(`Batching posts lookup for ${authorIds.length} authors`);
      
      const posts = await Post.find({ 
        authorId: { $in: authorIds } 
      }).sort({ createdAt: -1 }).limit(100); // Limit results
      
      // Group posts by author ID
      const postsByAuthor = authorIds.map(authorId => 
        posts.filter(post => post.authorId.toString() === authorId.toString())
      );
      
      return postsByAuthor;
    });
  }
  
  static createCommentsByPostLoader() {
    return new DataLoader(async (postIds) => {
      console.log(`Batching comments lookup for ${postIds.length} posts`);
      
      const comments = await Comment.find({ 
        postId: { $in: postIds } 
      }).sort({ createdAt: -1 }).limit(1000); // Global limit
      
      return postIds.map(postId =>
        comments.filter(comment => comment.postId.toString() === postId.toString())
      );
    });
  }
  
  static createFollowersLoader() {
    return new DataLoader(async (userIds) => {
      console.log(`Batching followers lookup for ${userIds.length} users`);
      
      // Use aggregation for efficient followers lookup
      const followersData = await Follow.aggregate([
        { $match: { followingId: { $in: userIds } } },
        { 
          $group: {
            _id: '$followingId',
            followers: { $push: '$followerId' }
          }
        }
      ]);
      
      // Get actual user objects for followers
      const allFollowerIds = [...new Set(
        followersData.flatMap(item => item.followers)
      )];
      
      const followers = await User.find({ 
        _id: { $in: allFollowerIds } 
      }).select('id name avatar');
      
      return userIds.map(userId => {
        const userData = followersData.find(
          item => item._id.toString() === userId.toString()
        );
        
        if (!userData) return [];
        
        return userData.followers
          .map(followerId => followers.find(
            follower => follower._id.toString() === followerId.toString()
          ))
          .filter(Boolean)
          .slice(0, 50); // Limit followers returned
      });
    });
  }
}

// Enhanced resolvers with DataLoader
const resolvers = {
  Query: {
    users: async (_, { first = 20 }, { userLoader }) => {
      // Limit and paginate results
      const users = await User.find()
        .sort({ createdAt: -1 })
        .limit(Math.min(first, 100)); // Cap at 100
      
      return users;
    },
    
    posts: async (_, { first = 20 }) => {
      return await Post.find()
        .sort({ createdAt: -1 })
        .limit(Math.min(first, 100))
        .populate('author');
    }
  },
  
  User: {
    posts: async (user, { first = 10 }, { postsByAuthorLoader, checkTimeout }) => {
      checkTimeout(); // Check for timeout
      
      const posts = await postsByAuthorLoader.load(user._id);
      return posts.slice(0, Math.min(first, 50)); // Limit results
    },
    
    followers: async (user, { first = 20 }, { followersLoader, checkTimeout }) => {
      checkTimeout();
      
      const followers = await followersLoader.load(user._id);
      return followers.slice(0, Math.min(first, 100));
    },
    
    following: async (user, { first = 20 }, { followingLoader, checkTimeout }) => {
      checkTimeout();
      
      const following = await followingLoader.load(user._id);
      return following.slice(0, Math.min(first, 100));
    }
  },
  
  Post: {
    author: async (post, _, { userLoader, checkTimeout }) => {
      checkTimeout();
      
      return await userLoader.load(post.authorId);
    },
    
    comments: async (post, { first = 10 }, { commentsByPostLoader, checkTimeout }) => {
      checkTimeout();
      
      const comments = await commentsByPostLoader.load(post._id);
      return comments.slice(0, Math.min(first, 50));
    }
  },
  
  Comment: {
    author: async (comment, _, { userLoader, checkTimeout }) => {
      checkTimeout();
      
      return await userLoader.load(comment.authorId);
    },
    
    post: async (comment, _, { postLoader, checkTimeout }) => {
      checkTimeout();
      
      return await postLoader.load(comment.postId);
    }
  }
};

// Context factory with DataLoaders
function createContext({ req }) {
  const startTime = Date.now();
  const TIMEOUT_MS = 30000; // 30 seconds
  
  return {
    // DataLoader instances
    userLoader: DataLoaderFactory.createUserLoader(),
    postsByAuthorLoader: DataLoaderFactory.createPostsByAuthorLoader(),
    commentsByPostLoader: DataLoaderFactory.createCommentsByPostLoader(),
    followersLoader: DataLoaderFactory.createFollowersLoader(),
    
    // Timeout checking
    startTime,
    checkTimeout: () => {
      if (Date.now() - startTime > TIMEOUT_MS) {
        throw new Error(`Query timeout after ${TIMEOUT_MS}ms`);
      }
    },
    
    // Request metadata
    userAgent: req.headers['user-agent'],
    ip: req.ip
  };
}

// Server with DataLoader context
const server = new ApolloServer({
  typeDefs,
  resolvers,
  context: createContext,
  
  // Performance monitoring plugin
  plugins: [
    {
      requestDidStart() {
        return {
          willSendResponse(requestContext) {
            const duration = Date.now() - requestContext.context.startTime;
            
            if (duration > 5000) { // Log slow queries
              console.warn('Slow query detected:', {
                duration: `${duration}ms`,
                query: requestContext.request.query?.substring(0, 200),
                variables: requestContext.request.variables
              });
            }
          }
        };
      }
    }
  ]
});
3

Implement Query Timeouts and Resource Monitoring

Add comprehensive timeout controls, memory monitoring, and resource limits to prevent long-running queries from overwhelming the server. Implement graceful query cancellation and monitoring.

View implementation – JAVASCRIPT
const { ApolloServer, gql } = require('apollo-server-express');
const { createComplexityLimitRule } = require('graphql-query-complexity');
const depthLimit = require('graphql-depth-limit');

// Resource monitoring and limits
class QueryResourceManager {
  constructor() {
    this.activeQueries = new Map();
    this.queryStats = {
      totalQueries: 0,
      timeouts: 0,
      complexityRejections: 0,
      averageExecutionTime: 0
    };
  }
  
  startQuery(queryId, query, variables) {
    const queryInfo = {
      id: queryId,
      query: query?.substring(0, 500), // Truncate for logging
      variables,
      startTime: Date.now(),
      memoryStart: process.memoryUsage().heapUsed,
      timeout: null,
      cancelled: false
    };
    
    // Set timeout
    queryInfo.timeout = setTimeout(() => {
      this.cancelQuery(queryId, 'timeout');
    }, 30000); // 30 second timeout
    
    this.activeQueries.set(queryId, queryInfo);
    this.queryStats.totalQueries++;
    
    // Monitor memory usage
    this.checkMemoryUsage();
    
    return queryInfo;
  }
  
  finishQuery(queryId, success = true) {
    const queryInfo = this.activeQueries.get(queryId);
    if (!queryInfo) return;
    
    const duration = Date.now() - queryInfo.startTime;
    const memoryEnd = process.memoryUsage().heapUsed;
    const memoryUsed = memoryEnd - queryInfo.memoryStart;
    
    // Clear timeout
    if (queryInfo.timeout) {
      clearTimeout(queryInfo.timeout);
    }
    
    // Update statistics
    this.queryStats.averageExecutionTime = 
      (this.queryStats.averageExecutionTime + duration) / 2;
    
    // Log long-running or memory-intensive queries
    if (duration > 10000 || memoryUsed > 100 * 1024 * 1024) { // 100MB
      console.warn('Resource-intensive query completed:', {
        queryId,
        duration: `${duration}ms`,
        memoryUsed: `${Math.round(memoryUsed / 1024 / 1024)}MB`,
        success,
        query: queryInfo.query
      });
    }
    
    this.activeQueries.delete(queryId);
  }
  
  cancelQuery(queryId, reason) {
    const queryInfo = this.activeQueries.get(queryId);
    if (!queryInfo || queryInfo.cancelled) return;
    
    queryInfo.cancelled = true;
    
    if (reason === 'timeout') {
      this.queryStats.timeouts++;
    }
    
    console.warn('Query cancelled:', {
      queryId,
      reason,
      duration: `${Date.now() - queryInfo.startTime}ms`,
      query: queryInfo.query
    });
    
    this.finishQuery(queryId, false);
  }
  
  checkMemoryUsage() {
    const memoryUsage = process.memoryUsage();
    const heapUsedMB = Math.round(memoryUsage.heapUsed / 1024 / 1024);
    
    // Warn if memory usage is high
    if (heapUsedMB > 1000) { // 1GB
      console.warn('High memory usage detected:', {
        heapUsed: `${heapUsedMB}MB`,
        activeQueries: this.activeQueries.size
      });
      
      // Cancel oldest queries if memory is critically high
      if (heapUsedMB > 1500) { // 1.5GB
        this.cancelOldestQueries(3);
      }
    }
  }
  
  cancelOldestQueries(count) {
    const sortedQueries = Array.from(this.activeQueries.entries())
      .sort(([, a], [, b]) => a.startTime - b.startTime)
      .slice(0, count);
    
    sortedQueries.forEach(([queryId]) => {
      this.cancelQuery(queryId, 'memory_pressure');
    });
  }
  
  getStats() {
    return {
      ...this.queryStats,
      activeQueries: this.activeQueries.size,
      memoryUsage: process.memoryUsage()
    };
  }
}

const resourceManager = new QueryResourceManager();

// Custom timeout plugin
const timeoutPlugin = {
  requestDidStart() {
    return {
      didResolveOperation(requestContext) {
        const queryId = `${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
        requestContext.queryId = queryId;
        
        const queryInfo = resourceManager.startQuery(
          queryId,
          requestContext.request.query,
          requestContext.request.variables
        );
        
        requestContext.queryInfo = queryInfo;
        
        // Add timeout check to context
        requestContext.context.checkTimeout = () => {
          if (queryInfo.cancelled) {
            throw new Error('Query was cancelled due to resource limits');
          }
          
          const elapsed = Date.now() - queryInfo.startTime;
          if (elapsed > 30000) {
            resourceManager.cancelQuery(queryId, 'timeout');
            throw new Error('Query timeout: Request took too long to process');
          }
        };
      },
      
      willSendResponse(requestContext) {
        if (requestContext.queryId) {
          resourceManager.finishQuery(requestContext.queryId, true);
        }
      },
      
      didEncounterErrors(requestContext) {
        if (requestContext.queryId) {
          resourceManager.finishQuery(requestContext.queryId, false);
        }
      }
    };
  }
};

// Enhanced server with comprehensive protection
const server = new ApolloServer({
  typeDefs,
  resolvers,
  
  validationRules: [
    depthLimit(10),
    createComplexityLimitRule(1000, {
      onComplete: (complexity) => {
        if (complexity > 500) {
          console.warn('High complexity query approved:', { complexity });
        }
      },
      onReject: (complexity) => {
        resourceManager.queryStats.complexityRejections++;
        console.warn('Query rejected for high complexity:', { complexity });
      }
    })
  ],
  
  plugins: [
    timeoutPlugin,
    
    // Performance monitoring plugin
    {
      requestDidStart() {
        return {
          willSendResponse(requestContext) {
            const duration = Date.now() - requestContext.queryInfo?.startTime;
            
            // Log performance metrics
            if (duration > 1000) {
              console.info('Query performance:', {
                duration: `${duration}ms`,
                complexity: requestContext.queryComplexity,
                fieldCount: requestContext.fieldCount
              });
            }
          }
        };
      }
    }
  ],
  
  context: ({ req }) => {
    return {
      startTime: Date.now(),
      userAgent: req.headers['user-agent'],
      ip: req.ip,
      
      // Will be populated by timeout plugin
      checkTimeout: () => {}
    };
  },
  
  formatError: (error) => {
    // Log errors with context
    console.error('GraphQL Error:', {
      message: error.message,
      locations: error.locations,
      path: error.path,
      timestamp: new Date().toISOString()
    });
    
    // Return sanitized errors in production
    if (process.env.NODE_ENV === 'production') {
      if (error.message.includes('timeout') ||
          error.message.includes('cancelled') ||
          error.message.includes('complexity')) {
        return new Error('Query execution failed: Resource limits exceeded');
      }
    }
    
    return error;
  }
});

// Health check endpoint with resource stats
app.get('/graphql/health', (req, res) => {
  const stats = resourceManager.getStats();
  const isHealthy = stats.activeQueries < 10 && 
                   stats.memoryUsage.heapUsed < 1024 * 1024 * 1024; // 1GB
  
  res.status(isHealthy ? 200 : 503).json({
    status: isHealthy ? 'healthy' : 'unhealthy',
    stats,
    timestamp: new Date().toISOString()
  });
});

// Graceful shutdown
process.on('SIGTERM', () => {
  console.log('Shutting down GraphQL server...');
  
  // Cancel all active queries
  for (const [queryId] of resourceManager.activeQueries) {
    resourceManager.cancelQuery(queryId, 'shutdown');
  }
  
  server.stop().then(() => {
    console.log('GraphQL server stopped');
    process.exit(0);
  });
});
4

Implement Pagination and Result Set Limits

Use proper pagination patterns and enforce result set limits to prevent queries from returning massive amounts of data. Implement cursor-based pagination for consistent performance.

View implementation – JAVASCRIPT
const { ApolloServer, gql } = require('apollo-server-express');
const { connectionFromArray, cursorToOffset, offsetToCursor } = require('graphql-relay');

// Secure GraphQL schema with pagination
const typeDefs = gql`
  # Implement Relay-style pagination
  type PageInfo {
    hasNextPage: Boolean!
    hasPreviousPage: Boolean!
    startCursor: String
    endCursor: String
  }
  
  type UserEdge {
    node: User!
    cursor: String!
  }
  
  type UserConnection {
    edges: [UserEdge!]!
    pageInfo: PageInfo!
    totalCount: Int!
  }
  
  type PostEdge {
    node: Post!
    cursor: String!
  }
  
  type PostConnection {
    edges: [PostEdge!]!
    pageInfo: PageInfo!
    totalCount: Int!
  }
  
  type CommentEdge {
    node: Comment!
    cursor: String!
  }
  
  type CommentConnection {
    edges: [CommentEdge!]!
    pageInfo: PageInfo!
    totalCount: Int!
  }
  
  type User {
    id: ID!
    name: String!
    email: String!
    
    # Paginated relationships with limits
    posts(
      first: Int = 10,
      after: String,
      orderBy: PostOrderBy = CREATED_AT_DESC
    ): PostConnection!
    
    followers(
      first: Int = 20,
      after: String
    ): UserConnection!
    
    following(
      first: Int = 20, 
      after: String
    ): UserConnection!
    
    # Limited aggregations
    postCount: Int!          # Cached/indexed value
    followerCount: Int!      # Cached/indexed value
  }
  
  type Post {
    id: ID!
    title: String!
    content: String!
    createdAt: String!
    author: User!
    
    comments(
      first: Int = 10,
      after: String,
      orderBy: CommentOrderBy = CREATED_AT_DESC
    ): CommentConnection!
    
    # Pre-calculated metrics (avoid real-time computation)
    likeCount: Int!          # Cached value
    commentCount: Int!       # Cached value
    viewCount: Int!          # Cached value
  }
  
  type Comment {
    id: ID!
    content: String!
    createdAt: String!
    author: User!
    post: Post!
    
    # Limited nested comments
    replies(
      first: Int = 5,
      after: String
    ): CommentConnection!
    
    replyCount: Int!         # Cached value
  }
  
  enum PostOrderBy {
    CREATED_AT_ASC
    CREATED_AT_DESC
    TITLE_ASC
    TITLE_DESC
  }
  
  enum CommentOrderBy {
    CREATED_AT_ASC
    CREATED_AT_DESC
  }
  
  type Query {
    # Paginated root queries with strict limits
    users(
      first: Int = 20,
      after: String,
      search: String
    ): UserConnection!
    
    posts(
      first: Int = 20,
      after: String,
      authorId: ID,
      orderBy: PostOrderBy = CREATED_AT_DESC
    ): PostConnection!
    
    # Single item queries
    user(id: ID!): User
    post(id: ID!): Post
  }
`;

// Pagination utility functions
class PaginationHelper {
  static validatePaginationArgs(first, after) {
    // Enforce maximum page size
    const MAX_PAGE_SIZE = 100;
    const DEFAULT_PAGE_SIZE = 20;
    
    if (first && first > MAX_PAGE_SIZE) {
      throw new Error(`Cannot request more than ${MAX_PAGE_SIZE} items at once`);
    }
    
    if (first && first < 1) {
      throw new Error('Page size must be at least 1');
    }
    
    return {
      first: Math.min(first || DEFAULT_PAGE_SIZE, MAX_PAGE_SIZE),
      after: after || null
    };
  }
  
  static async paginateQuery(model, filter, options) {
    const { first, after, orderBy } = options;
    const { field, direction } = this.parseOrderBy(orderBy);
    
    // Calculate offset from cursor
    let offset = 0;
    if (after) {
      try {
        offset = cursorToOffset(after) + 1;
      } catch (error) {
        throw new Error('Invalid cursor provided');
      }
    }
    
    // Build sort object
    const sort = { [field]: direction === 'DESC' ? -1 : 1 };
    
    // Execute query with pagination
    const [items, totalCount] = await Promise.all([
      model.find(filter)
        .sort(sort)
        .skip(offset)
        .limit(first + 1), // Get one extra to check hasNextPage
      model.countDocuments(filter)
    ]);
    
    // Check if there are more items
    const hasNextPage = items.length > first;
    if (hasNextPage) {
      items.pop(); // Remove the extra item
    }
    
    const hasPreviousPage = offset > 0;
    
    // Create edges with cursors
    const edges = items.map((item, index) => ({
      node: item,
      cursor: offsetToCursor(offset + index)
    }));
    
    return {
      edges,
      pageInfo: {
        hasNextPage,
        hasPreviousPage,
        startCursor: edges.length > 0 ? edges[0].cursor : null,
        endCursor: edges.length > 0 ? edges[edges.length - 1].cursor : null
      },
      totalCount
    };
  }
  
  static parseOrderBy(orderBy) {
    const orderByMap = {
      CREATED_AT_ASC: { field: 'createdAt', direction: 'ASC' },
      CREATED_AT_DESC: { field: 'createdAt', direction: 'DESC' },
      TITLE_ASC: { field: 'title', direction: 'ASC' },
      TITLE_DESC: { field: 'title', direction: 'DESC' }
    };
    
    return orderByMap[orderBy] || orderByMap.CREATED_AT_DESC;
  }
}

// Secure resolvers with pagination
const resolvers = {
  Query: {
    users: async (_, args, { checkTimeout }) => {
      checkTimeout();
      
      const { first, after } = PaginationHelper.validatePaginationArgs(
        args.first, args.after
      );
      
      const filter = {};
      if (args.search) {
        // Limit search query length
        if (args.search.length > 100) {
          throw new Error('Search query too long');
        }
        filter.$text = { $search: args.search };
      }
      
      return await PaginationHelper.paginateQuery(User, filter, {
        first,
        after,
        orderBy: 'CREATED_AT_DESC'
      });
    },
    
    posts: async (_, args, { checkTimeout }) => {
      checkTimeout();
      
      const { first, after } = PaginationHelper.validatePaginationArgs(
        args.first, args.after
      );
      
      const filter = {};
      if (args.authorId) {
        filter.authorId = args.authorId;
      }
      
      return await PaginationHelper.paginateQuery(Post, filter, {
        first,
        after,
        orderBy: args.orderBy
      });
    },
    
    user: async (_, { id }, { userLoader, checkTimeout }) => {
      checkTimeout();
      return await userLoader.load(id);
    },
    
    post: async (_, { id }, { postLoader, checkTimeout }) => {
      checkTimeout();
      return await postLoader.load(id);
    }
  },
  
  User: {
    posts: async (user, args, { checkTimeout }) => {
      checkTimeout();
      
      const { first, after } = PaginationHelper.validatePaginationArgs(
        args.first, args.after
      );
      
      return await PaginationHelper.paginateQuery(
        Post, 
        { authorId: user._id },
        { first, after, orderBy: args.orderBy }
      );
    },
    
    followers: async (user, args, { checkTimeout }) => {
      checkTimeout();
      
      const { first, after } = PaginationHelper.validatePaginationArgs(
        args.first, args.after
      );
      
      // Use efficient aggregation for followers
      const followerIds = await Follow.find({ followingId: user._id })
        .select('followerId')
        .limit(first * 2); // Pre-filter to reasonable size
      
      const filter = { _id: { $in: followerIds.map(f => f.followerId) } };
      
      return await PaginationHelper.paginateQuery(User, filter, {
        first,
        after,
        orderBy: 'CREATED_AT_DESC'
      });
    },
    
    following: async (user, args, { checkTimeout }) => {
      checkTimeout();
      
      const { first, after } = PaginationHelper.validatePaginationArgs(
        args.first, args.after
      );
      
      const followingIds = await Follow.find({ followerId: user._id })
        .select('followingId')
        .limit(first * 2);
      
      const filter = { _id: { $in: followingIds.map(f => f.followingId) } };
      
      return await PaginationHelper.paginateQuery(User, filter, {
        first,
        after,
        orderBy: 'CREATED_AT_DESC'
      });
    },
    
    // Use cached/indexed counts instead of real-time aggregation
    postCount: (user) => user.postCount || 0,
    followerCount: (user) => user.followerCount || 0
  },
  
  Post: {
    author: async (post, _, { userLoader, checkTimeout }) => {
      checkTimeout();
      return await userLoader.load(post.authorId);
    },
    
    comments: async (post, args, { checkTimeout }) => {
      checkTimeout();
      
      const { first, after } = PaginationHelper.validatePaginationArgs(
        args.first, args.after
      );
      
      return await PaginationHelper.paginateQuery(
        Comment,
        { postId: post._id, parentId: null }, // Only top-level comments
        { first, after, orderBy: args.orderBy }
      );
    },
    
    // Use cached metrics
    likeCount: (post) => post.likeCount || 0,
    commentCount: (post) => post.commentCount || 0,
    viewCount: (post) => post.viewCount || 0
  },
  
  Comment: {
    author: async (comment, _, { userLoader, checkTimeout }) => {
      checkTimeout();
      return await userLoader.load(comment.authorId);
    },
    
    post: async (comment, _, { postLoader, checkTimeout }) => {
      checkTimeout();
      return await postLoader.load(comment.postId);
    },
    
    replies: async (comment, args, { checkTimeout }) => {
      checkTimeout();
      
      const { first, after } = PaginationHelper.validatePaginationArgs(
        args.first, args.after
      );
      
      // Limit reply depth to prevent infinite nesting
      const MAX_REPLY_DEPTH = 3;
      if (comment.depth && comment.depth >= MAX_REPLY_DEPTH) {
        return {
          edges: [],
          pageInfo: {
            hasNextPage: false,
            hasPreviousPage: false,
            startCursor: null,
            endCursor: null
          },
          totalCount: 0
        };
      }
      
      return await PaginationHelper.paginateQuery(
        Comment,
        { parentId: comment._id },
        { first, after, orderBy: 'CREATED_AT_DESC' }
      );
    },
    
    replyCount: (comment) => comment.replyCount || 0
  }
};

// Example usage with security limits:
// query SecurePaginatedQuery {
//   users(first: 20) {
//     edges {
//       node {
//         name
//         posts(first: 10) {
//           edges {
//             node {
//               title
//               comments(first: 5) {
//                 edges {
//                   node {
//                     content
//                     replies(first: 3) {
//                       totalCount
//                       # Cannot go deeper than 3 levels
//                     }
//                   }
//                 }
//               }
//             }
//           }
//         }
//       }
//     }
//     pageInfo {
//       hasNextPage
//       endCursor
//     }
//   }
// }

Detect This Vulnerability in Your Code

Sourcery automatically identifies graphql query depth and complexity attacks causing resource exhaustion and many other security issues in your codebase.