← Back to Cookbook

Competitive Intel Monitor

Scrapes competitor sites weekly and emails a summary of changes.

firecrawlairesend

Source

/**
 * Scrapes competitor websites weekly and summarizes changes.
 */

schedule weekly_intel {
  label: "Weekly Intel Scan"
  cron: "0 9 * * 1"
  timezone: "America/New_York"
}

graph scan_competitors {
  label: "Scan Competitors"

  root {
    type: code
    label: "Start"
    code: @ts {
      return { scanned_at: new Date().toISOString() }
    }
    outputSchema: @json {
      {
        "type": "object",
        "properties": {
          "scanned_at": { "type": "string" }
        }
      }
    }
  }

  node scrape_homepage {
    type: firecrawl
    label: "Scrape competitor homepage"
    url: @ts { return "https://stripe.com" }
    onlyMainContent: true
    formats: ["markdown"]
  }

  node scrape_changelog {
    type: firecrawl
    label: "Scrape competitor changelog"
    url: @ts { return "https://stripe.com/blog" }
    onlyMainContent: true
    formats: ["markdown"]
  }

  node analyze {
    type: ai
    label: "Analyze changes"
    kind: object
    model: "google/gemini-2.5-flash"
    prompt: @ts {
      return "Analyze these competitor pages for notable changes, new features, or strategic shifts.\n\nHomepage:\n" + JSON.stringify(context.nodes.scrape_homepage.output) + "\n\nChangelog:\n" + JSON.stringify(context.nodes.scrape_changelog.output)
    }
    schema: @json {
      {
        "type": "object",
        "required": ["summary", "key_changes"],
        "properties": {
          "summary": { "type": "string" },
          "key_changes": { "type": "array", "items": { "type": "string" } },
          "new_features": { "type": "array", "items": { "type": "string" } },
          "strategic_implications": { "type": "string" }
        }
      }
    }
  }

  node send_report {
    type: resend
    label: "Send intel report"
    from: @ts { return "[email protected]" }
    to: @ts { return "[email protected]" }
    subject: @ts { return "Weekly Competitive Intel - " + context.nodes.root.output.scanned_at.split("T")[0] }
    text: @ts {
      const report = context.nodes.analyze.output
      const changes = report.key_changes.map(function(c, i) { return (i + 1) + ". " + c }).join("\n")
      return "Summary: " + report.summary + "\n\nKey Changes:\n" + changes + "\n\nStrategic Implications: " + (report.strategic_implications || "None noted")
    }
  }

  flow {
    root -> scrape_homepage
    root -> scrape_changelog
    scrape_homepage -> analyze
    scrape_changelog -> analyze
    analyze -> send_report
  }
}

trigger on_weekly_intel {
  schedule:weekly_intel -> scan_competitors
  enabled: true
}

Flow

Trigger → graph

Graph nodes