diff --git a/AI product imagines.json b/AI product imagines.json new file mode 100644 index 0000000..94393c3 --- /dev/null +++ b/AI product imagines.json @@ -0,0 +1,609 @@ +{ + "name": "AI product Images", + "nodes": [ + { + "parameters": {}, + "type": "n8n-nodes-base.manualTrigger", + "typeVersion": 1, + "position": [ + 0, + 0 + ], + "id": "1ddfbdfd-f3c0-4ef5-8b48-a3ae77a92955", + "name": "When clicking ‘Test workflow’" + }, + { + "parameters": { + "resource": "fileFolder", + "filter": { + "folderId": { + "__rl": true, + "value": "1NQ_9HXkMrjm_DPZENAmwRo0JJkvQm8BU", + "mode": "id" + } + }, + "options": {} + }, + "type": "n8n-nodes-base.googleDrive", + "typeVersion": 3, + "position": [ + 220, + 0 + ], + "id": "6feb317d-1d43-4174-a5a3-b9e4a2bf46e6", + "name": "Google Drive", + "credentials": { + "googleDriveOAuth2Api": { + "id": "mVYRcVX1PvkdODpc", + "name": "Google Drive account" + } + } + }, + { + "parameters": { + "operation": "download", + "fileId": { + "__rl": true, + "value": "={{$json[\"id\"]}}", + "mode": "id" + }, + "options": {} + }, + "type": "n8n-nodes-base.googleDrive", + "typeVersion": 3, + "position": [ + 460, + 0 + ], + "id": "b5232274-25a6-43b7-a424-dcfff47057ba", + "name": "Google Drive1", + "credentials": { + "googleDriveOAuth2Api": { + "id": "mVYRcVX1PvkdODpc", + "name": "Google Drive account" + } + } + }, + { + "parameters": { + "resource": "image", + "operation": "analyze", + "modelId": { + "__rl": true, + "value": "gpt-4o", + "mode": "list", + "cachedResultName": "GPT-4O" + }, + "text": "Describe the visual style of this image, what stands out. if you had to have a holistic overview, as a professional facebook ads designer. How would you explain this image / or images to be able to reproduce the elements that make it work for other ads.\n\nThe core goal of the output here should be to create a template of the style for inspirations. As later we will take ideas from these to generate our own high converting facebook ads.\n\nensure you do not make this product specific, rather focusing on creating outlines for static ad styles. so keep ti vague in terms of what exactly is in the ad, but rather the principles of the ad", + "inputType": "base64", + "options": {} + }, + "type": "@n8n/n8n-nodes-langchain.openAi", + "typeVersion": 1.8, + "position": [ + 720, + -20 + ], + "id": "ff9ab47d-5980-4d2d-ab5e-1e76d0df87ab", + "name": "OpenAI", + "credentials": { + "openAiApi": { + "id": "hLlMCh2BqN9e4ile", + "name": "OpenAi account" + } + } + }, + { + "parameters": { + "resource": "fileFolder", + "searchMethod": "query", + "filter": { + "folderId": { + "__rl": true, + "value": "11t72SNGpHJvGk-UurfuLwGpdMz37-cwW", + "mode": "id" + } + }, + "options": {} + }, + "type": "n8n-nodes-base.googleDrive", + "typeVersion": 3, + "position": [ + 220, + 240 + ], + "id": "b5e0b994-cde8-4069-bdac-3173ecf8ccfb", + "name": "Google Drive2", + "credentials": { + "googleDriveOAuth2Api": { + "id": "mVYRcVX1PvkdODpc", + "name": "Google Drive account" + } + } + }, + { + "parameters": { + "operation": "download", + "fileId": { + "__rl": true, + "value": "={{$json[\"id\"]}}", + "mode": "id" + }, + "options": {} + }, + "type": "n8n-nodes-base.googleDrive", + "typeVersion": 3, + "position": [ + 460, + 240 + ], + "id": "41b44442-46d1-40b7-9a26-57f410d0426b", + "name": "Google Drive3", + "credentials": { + "googleDriveOAuth2Api": { + "id": "mVYRcVX1PvkdODpc", + "name": "Google Drive account" + } + } + }, + { + "parameters": { + "resource": "image", + "operation": "analyze", + "modelId": { + "__rl": true, + "value": "gpt-4o", + "mode": "list", + "cachedResultName": "GPT-4O" + }, + "text": "Analyse our product image. Identify the core emotions behind it and the main product. we will use this later to connect the product image with some ad styles and generate our own ads", + "inputType": "base64", + "options": {} + }, + "type": "@n8n/n8n-nodes-langchain.openAi", + "typeVersion": 1.8, + "position": [ + 700, + 240 + ], + "id": "1eea9b08-38d1-48dc-b578-88d0818de342", + "name": "OpenAI1", + "credentials": { + "openAiApi": { + "id": "hLlMCh2BqN9e4ile", + "name": "OpenAi account" + } + } + }, + { + "parameters": { + "model": { + "__rl": true, + "value": "gpt-4", + "mode": "list", + "cachedResultName": "gpt-4" + }, + "options": {} + }, + "type": "@n8n/n8n-nodes-langchain.lmChatOpenAi", + "typeVersion": 1.2, + "position": [ + 1040, + 420 + ], + "id": "6a97f2fe-fc62-4591-bb03-37d4ae24343c", + "name": "OpenAI Chat Model", + "credentials": { + "openAiApi": { + "id": "hLlMCh2BqN9e4ile", + "name": "OpenAi account" + } + } + }, + { + "parameters": { + "promptType": "define", + "text": "=You’ve been given an outline that includes: \n\n(use all the data from here when creating the prompts {{ $json.choices[0].message.content }} - it is also critical our product image is displayed in here: {{ $('OpenAI1').item.json.content }} )\n\nWhat the product is and who it’s for\n\nWhat’s visible in the product image (e.g. background, angle, lighting)\n\nPatterns and emotional triggers from top-performing ad examples\n\nYour task:\nUsing this outline, generate 10 image ad prompts that follow this format exactly:\n\nprompt: [Detailed visual description of the ad concept]\n\nEach prompt should include:\n\nA bold, testimonial-style headline (in quotation marks at the top)\n\nA clear description of the emotional transformation the image should convey\n\nWhat is shown in the image (body parts, facial expressions, environment, etc.)\n\nWhere and how the product is placed\n\nThe color palette and visual tone (e.g., icy blue for calm, red for pain)\n\nWhere the headline should appear on the image\n\nRendering style (e.g., cinematic, hyperrealistic, dramatic shadows)\n\nFinal format for Facebook or Instagram (1:1 format always)\n\nRepeat this 10 times. Each one must be unique, emotionally powerful, and visually clear.", + "hasOutputParser": true, + "options": {} + }, + "type": "@n8n/n8n-nodes-langchain.agent", + "typeVersion": 1.9, + "position": [ + 1120, + 240 + ], + "id": "16138c72-2b3d-4c4b-9d11-ad91d15c2e4c", + "name": "AI prompt agent" + }, + { + "parameters": { + "jsonSchemaExample": "[\n {\n \"Prompt\": \"Sun-drenched poolside shot of the product on a marble ledge at golden hour, with soft shadows and warm tones. Aspect ratio 1:1.\"\n },\n {\n \"Prompt\": \"Cool lavender-tinted sunset beach backdrop behind the product, highlighting reflective metallic accents. Aspect ratio 4:5.\"\n },\n {\n \"Prompt\": \"...\"\n }\n]\n" + }, + "type": "@n8n/n8n-nodes-langchain.outputParserStructured", + "typeVersion": 1.2, + "position": [ + 1380, + 420 + ], + "id": "8fbfad99-d724-45a9-9b3e-e45e486dcfc5", + "name": "Structured Output Parser" + }, + { + "parameters": { + "fieldToSplitOut": "output", + "options": {} + }, + "type": "n8n-nodes-base.splitOut", + "typeVersion": 1, + "position": [ + 1560, + 160 + ], + "id": "508946cd-0aed-4858-bf5a-c25e64371ea4", + "name": "Split Out" + }, + { + "parameters": { + "method": "POST", + "url": "https://api.openai.com/v1/images/generations", + "sendHeaders": true, + "headerParameters": { + "parameters": [ + { + "name": "Authorization", + "value": "Bearer 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" + }, + { + "name": "Content-type", + "value": "application/json" + } + ] + }, + "sendBody": true, + "bodyParameters": { + "parameters": [ + { + "name": "model", + "value": "gpt-image-1" + }, + { + "name": "prompt", + "value": "={{ $json.Prompt }}" + }, + { + "name": "size", + "value": "1024x1024" + } + ] + }, + "options": {} + }, + "type": "n8n-nodes-base.httpRequest", + "typeVersion": 4.2, + "position": [ + 2320, + 160 + ], + "id": "ab08c200-9ff9-4dd9-86be-09b8f1e219a9", + "name": "HTTP Request1" + }, + { + "parameters": { + "options": {} + }, + "type": "n8n-nodes-base.splitInBatches", + "typeVersion": 3, + "position": [ + 1840, + 160 + ], + "id": "dc69258e-7c57-4158-92af-7257ba85102e", + "name": "Loop Over Items1" + }, + { + "parameters": { + "amount": 15 + }, + "type": "n8n-nodes-base.wait", + "typeVersion": 1.1, + "position": [ + 2100, + 160 + ], + "id": "1af8885e-00a3-49f1-b159-1a02eba84a84", + "name": "Wait", + "webhookId": "9f2950cd-2ab2-405f-83d7-4f44e15e16f2" + }, + { + "parameters": { + "operation": "toBinary", + "sourceProperty": "data[0].b64_json", + "options": { + "fileName": "image.png", + "mimeType": "image/png" + } + }, + "type": "n8n-nodes-base.convertToFile", + "typeVersion": 1.1, + "position": [ + 2540, + 160 + ], + "id": "535a5a20-11ab-476a-be3b-07e23073d5f5", + "name": "Convert to File" + }, + { + "parameters": { + "modelId": { + "__rl": true, + "value": "gpt-4", + "mode": "list", + "cachedResultName": "GPT-4" + }, + "messages": { + "values": [ + { + "content": "=Analyse this prompt which is a template of a high converting facebook ad we have built {{ $json.content }}\n\nNow we will take this template, and add our product in as the hero for our ads{{ $json.content}}\n\nensure the template is applied to make our product the hereo, with all copy, colours and vibe being focused on making the product stand out, in a facebook ad, deisgn to convert.\n\nMake a relevant prompt & outline as our next step in the flow is to break down this prompt (combining the facebook ad visual style, with our product image) and spit it into 10 prompts, all to create individual statics.\n\nDo what you think is best to pass this information forward.", + "role": "system" + }, + { + "content": "Put both pieces of data together and only generate 1 output prompt\n" + } + ] + }, + "simplify": false, + "options": {} + }, + "type": "@n8n/n8n-nodes-langchain.openAi", + "typeVersion": 1.8, + "position": [ + 1020, + -20 + ], + "id": "a01ff233-7e6f-456e-b691-f54a5c73aee0", + "name": "OpenAI2", + "executeOnce": false, + "alwaysOutputData": false, + "credentials": { + "openAiApi": { + "id": "CiqduZPbaJF5yveA", + "name": "OpenAi account 2" + } + } + }, + { + "parameters": { + "driveId": { + "__rl": true, + "mode": "list", + "value": "My Drive" + }, + "folderId": { + "__rl": true, + "value": "1V_USzVT-v-6LIjk3HPd0nlr2vnv4nJAr", + "mode": "list", + "cachedResultName": "n8n testing", + "cachedResultUrl": "https://drive.google.com/drive/folders/1V_USzVT-v-6LIjk3HPd0nlr2vnv4nJAr" + }, + "options": {} + }, + "type": "n8n-nodes-base.googleDrive", + "typeVersion": 3, + "position": [ + 2780, + 160 + ], + "id": "3deb2f20-abc3-439e-b181-24c1956a4657", + "name": "Google Drive4", + "credentials": { + "googleDriveOAuth2Api": { + "id": "mVYRcVX1PvkdODpc", + "name": "Google Drive account" + } + } + } + ], + "pinData": {}, + "connections": { + "When clicking ‘Test workflow’": { + "main": [ + [ + { + "node": "Google Drive", + "type": "main", + "index": 0 + } + ] + ] + }, + "Google Drive": { + "main": [ + [ + { + "node": "Google Drive1", + "type": "main", + "index": 0 + } + ] + ] + }, + "Google Drive1": { + "main": [ + [ + { + "node": "OpenAI", + "type": "main", + "index": 0 + } + ] + ] + }, + "Google Drive2": { + "main": [ + [ + { + "node": "Google Drive3", + "type": "main", + "index": 0 + } + ] + ] + }, + "Google Drive3": { + "main": [ + [ + { + "node": "OpenAI1", + "type": "main", + "index": 0 + } + ] + ] + }, + "OpenAI": { + "main": [ + [ + { + "node": "Google Drive2", + "type": "main", + "index": 0 + } + ] + ] + }, + "OpenAI1": { + "main": [ + [ + { + "node": "OpenAI2", + "type": "main", + "index": 0 + } + ] + ] + }, + "OpenAI Chat Model": { + "ai_languageModel": [ + [ + { + "node": "AI prompt agent", + "type": "ai_languageModel", + "index": 0 + } + ] + ] + }, + "AI prompt agent": { + "main": [ + [ + { + "node": "Split Out", + "type": "main", + "index": 0 + } + ] + ] + }, + "Structured Output Parser": { + "ai_outputParser": [ + [ + { + "node": "AI prompt agent", + "type": "ai_outputParser", + "index": 0 + } + ] + ] + }, + "Split Out": { + "main": [ + [ + { + "node": "Loop Over Items1", + "type": "main", + "index": 0 + } + ] + ] + }, + "HTTP Request1": { + "main": [ + [ + { + "node": "Convert to File", + "type": "main", + "index": 0 + } + ] + ] + }, + "Loop Over Items1": { + "main": [ + [], + [ + { + "node": "Wait", + "type": "main", + "index": 0 + } + ] + ] + }, + "Wait": { + "main": [ + [ + { + "node": "HTTP Request1", + "type": "main", + "index": 0 + } + ] + ] + }, + "Convert to File": { + "main": [ + [ + { + "node": "Google Drive4", + "type": "main", + "index": 0 + } + ] + ] + }, + "OpenAI2": { + "main": [ + [ + { + "node": "AI prompt agent", + "type": "main", + "index": 0 + } + ] + ] + }, + "Google Drive4": { + "main": [ + [ + { + "node": "Loop Over Items1", + "type": "main", + "index": 0 + } + ] + ] + } + }, + "active": false, + "settings": { + "executionOrder": "v1" + }, + "versionId": "85462564-6f39-41df-b09c-9507c177c96a", + "meta": { + "templateCredsSetupCompleted": true, + "instanceId": "aa305389a9f146cc99db373653903c47ecf7fe4bb66df261da9bedf94add0f72" + }, + "id": "XZrlhnSYaHKcAdp4", + "tags": [] +} diff --git a/AI_Research_RAG_and_Data_Analysis/Analyze tradingview.com charts with Chrome extension, N8N and OpenAI.txt b/AI_Research_RAG_and_Data_Analysis/Analyze tradingview.com charts with Chrome extension, N8N and OpenAI.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Analyze tradingview.com charts with Chrome extension, N8N and OpenAI.txt rename to AI_Research_RAG_and_Data_Analysis/Analyze tradingview.com charts with Chrome extension, N8N and OpenAI.json diff --git a/AI_Research_RAG_and_Data_Analysis/Automated Hugging Face Paper Summary Fetching & Categorization Workflow.txt b/AI_Research_RAG_and_Data_Analysis/Automated Hugging Face Paper Summary Fetching & Categorization Workflow.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Automated Hugging Face Paper Summary Fetching & Categorization Workflow.txt rename to AI_Research_RAG_and_Data_Analysis/Automated Hugging Face Paper Summary Fetching & Categorization Workflow.json diff --git a/AI_Research_RAG_and_Data_Analysis/Autonomous AI crawler.txt b/AI_Research_RAG_and_Data_Analysis/Autonomous AI crawler.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Autonomous AI crawler.txt rename to AI_Research_RAG_and_Data_Analysis/Autonomous AI crawler.json diff --git a/AI_Research_RAG_and_Data_Analysis/Build Your Own Image Search Using AI Object Detection, CDN and ElasticSearchBuild Your Own Image Search Using AI Object Detection, CDN and ElasticSearch.txt b/AI_Research_RAG_and_Data_Analysis/Build Your Own Image Search Using AI Object Detection, CDN and ElasticSearchBuild Your Own Image Search Using AI Object Detection, CDN and ElasticSearch.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Build Your Own Image Search Using AI Object Detection, CDN and ElasticSearchBuild Your Own Image Search Using AI Object Detection, CDN and ElasticSearch.txt rename to AI_Research_RAG_and_Data_Analysis/Build Your Own Image Search Using AI Object Detection, CDN and ElasticSearchBuild Your Own Image Search Using AI Object Detection, CDN and ElasticSearch.json diff --git a/AI_Research_RAG_and_Data_Analysis/Build a Financial Documents Assistant using Qdrant and Mistral.ai.txt b/AI_Research_RAG_and_Data_Analysis/Build a Financial Documents Assistant using Qdrant and Mistral.ai.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Build a Financial Documents Assistant using Qdrant and Mistral.ai.txt rename to AI_Research_RAG_and_Data_Analysis/Build a Financial Documents Assistant using Qdrant and Mistral.ai.json diff --git a/AI_Research_RAG_and_Data_Analysis/Build a Tax Code Assistant with Qdrant, Mistral.ai and OpenAI.txt b/AI_Research_RAG_and_Data_Analysis/Build a Tax Code Assistant with Qdrant, Mistral.ai and OpenAI.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Build a Tax Code Assistant with Qdrant, Mistral.ai and OpenAI.txt rename to AI_Research_RAG_and_Data_Analysis/Build a Tax Code Assistant with Qdrant, Mistral.ai and OpenAI.json diff --git a/AI_Research_RAG_and_Data_Analysis/Building RAG Chatbot for Movie Recommendations with Qdrant and Open AI.txt b/AI_Research_RAG_and_Data_Analysis/Building RAG Chatbot for Movie Recommendations with Qdrant and Open AI.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Building RAG Chatbot for Movie Recommendations with Qdrant and Open AI.txt rename to AI_Research_RAG_and_Data_Analysis/Building RAG Chatbot for Movie Recommendations with Qdrant and Open AI.json diff --git a/AI_Research_RAG_and_Data_Analysis/Chat with GitHub API Documentation_ RAG-Powered Chatbot with Pinecone & OpenAI.txt b/AI_Research_RAG_and_Data_Analysis/Chat with GitHub API Documentation_ RAG-Powered Chatbot with Pinecone & OpenAI.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Chat with GitHub API Documentation_ RAG-Powered Chatbot with Pinecone & OpenAI.txt rename to AI_Research_RAG_and_Data_Analysis/Chat with GitHub API Documentation_ RAG-Powered Chatbot with Pinecone & OpenAI.json diff --git a/AI_Research_RAG_and_Data_Analysis/Create a Google Analytics Data Report with AI and sent it to E-Mail and Telegram.txt b/AI_Research_RAG_and_Data_Analysis/Create a Google Analytics Data Report with AI and sent it to E-Mail and Telegram.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Create a Google Analytics Data Report with AI and sent it to E-Mail and Telegram.txt rename to AI_Research_RAG_and_Data_Analysis/Create a Google Analytics Data Report with AI and sent it to E-Mail and Telegram.json diff --git a/AI_Research_RAG_and_Data_Analysis/Customer Insights with Qdrant, Python and Information Extractor.txt b/AI_Research_RAG_and_Data_Analysis/Customer Insights with Qdrant, Python and Information Extractor.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Customer Insights with Qdrant, Python and Information Extractor.txt rename to AI_Research_RAG_and_Data_Analysis/Customer Insights with Qdrant, Python and Information Extractor.json diff --git a/AI_Research_RAG_and_Data_Analysis/Deduplicate Scraping AI Grants for Eligibility using AI.txt b/AI_Research_RAG_and_Data_Analysis/Deduplicate Scraping AI Grants for Eligibility using AI.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Deduplicate Scraping AI Grants for Eligibility using AI.txt rename to AI_Research_RAG_and_Data_Analysis/Deduplicate Scraping AI Grants for Eligibility using AI.json diff --git a/AI_Research_RAG_and_Data_Analysis/Enrich Property Inventory Survey with Image Recognition and AI Agent.txt b/AI_Research_RAG_and_Data_Analysis/Enrich Property Inventory Survey with Image Recognition and AI Agent.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Enrich Property Inventory Survey with Image Recognition and AI Agent.txt rename to AI_Research_RAG_and_Data_Analysis/Enrich Property Inventory Survey with Image Recognition and AI Agent.json diff --git a/AI_Research_RAG_and_Data_Analysis/Extract insights & analyse YouTube comments via AI Agent chat.txt b/AI_Research_RAG_and_Data_Analysis/Extract insights & analyse YouTube comments via AI Agent chat.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Extract insights & analyse YouTube comments via AI Agent chat.txt rename to AI_Research_RAG_and_Data_Analysis/Extract insights & analyse YouTube comments via AI Agent chat.json diff --git a/AI_Research_RAG_and_Data_Analysis/Generate SEO Seed Keywords Using AI.txt b/AI_Research_RAG_and_Data_Analysis/Generate SEO Seed Keywords Using AI.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Generate SEO Seed Keywords Using AI.txt rename to AI_Research_RAG_and_Data_Analysis/Generate SEO Seed Keywords Using AI.json diff --git a/AI_Research_RAG_and_Data_Analysis/Hacker News Job Listing Scraper and Parser.txt b/AI_Research_RAG_and_Data_Analysis/Hacker News Job Listing Scraper and Parser.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Hacker News Job Listing Scraper and Parser.txt rename to AI_Research_RAG_and_Data_Analysis/Hacker News Job Listing Scraper and Parser.json diff --git a/AI_Research_RAG_and_Data_Analysis/Hacker News to Video Content.txt b/AI_Research_RAG_and_Data_Analysis/Hacker News to Video Content.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Hacker News to Video Content.txt rename to AI_Research_RAG_and_Data_Analysis/Hacker News to Video Content.json diff --git a/AI_Research_RAG_and_Data_Analysis/Host Your Own AI Deep Research Agent with n8n, Apify and OpenAI o3.txt b/AI_Research_RAG_and_Data_Analysis/Host Your Own AI Deep Research Agent with n8n, Apify and OpenAI o3.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Host Your Own AI Deep Research Agent with n8n, Apify and OpenAI o3.txt rename to AI_Research_RAG_and_Data_Analysis/Host Your Own AI Deep Research Agent with n8n, Apify and OpenAI o3.json diff --git a/AI_Research_RAG_and_Data_Analysis/Intelligent Web Query and Semantic Re-Ranking Flow using Brave and Google Gemini.txt b/AI_Research_RAG_and_Data_Analysis/Intelligent Web Query and Semantic Re-Ranking Flow using Brave and Google Gemini.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Intelligent Web Query and Semantic Re-Ranking Flow using Brave and Google Gemini.txt rename to AI_Research_RAG_and_Data_Analysis/Intelligent Web Query and Semantic Re-Ranking Flow using Brave and Google Gemini.json diff --git a/AI_Research_RAG_and_Data_Analysis/Learn Anything from HN - Get Top Resource Recommendations from Hacker News.txt b/AI_Research_RAG_and_Data_Analysis/Learn Anything from HN - Get Top Resource Recommendations from Hacker News.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Learn Anything from HN - Get Top Resource Recommendations from Hacker News.txt rename to AI_Research_RAG_and_Data_Analysis/Learn Anything from HN - Get Top Resource Recommendations from Hacker News.json diff --git a/AI_Research_RAG_and_Data_Analysis/Make OpenAI Citation for File Retrieval RAG.txt b/AI_Research_RAG_and_Data_Analysis/Make OpenAI Citation for File Retrieval RAG.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Make OpenAI Citation for File Retrieval RAG.txt rename to AI_Research_RAG_and_Data_Analysis/Make OpenAI Citation for File Retrieval RAG.json diff --git a/AI_Research_RAG_and_Data_Analysis/Open Deep Research - AI-Powered Autonomous Research Workflow.txt b/AI_Research_RAG_and_Data_Analysis/Open Deep Research - AI-Powered Autonomous Research Workflow.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Open Deep Research - AI-Powered Autonomous Research Workflow.txt rename to AI_Research_RAG_and_Data_Analysis/Open Deep Research - AI-Powered Autonomous Research Workflow.json diff --git a/AI_Research_RAG_and_Data_Analysis/Query Perplexity AI from your n8n workflows.txt b/AI_Research_RAG_and_Data_Analysis/Query Perplexity AI from your n8n workflows.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Query Perplexity AI from your n8n workflows.txt rename to AI_Research_RAG_and_Data_Analysis/Query Perplexity AI from your n8n workflows.json diff --git a/AI_Research_RAG_and_Data_Analysis/Recipe Recommendations with Qdrant and Mistral.txt b/AI_Research_RAG_and_Data_Analysis/Recipe Recommendations with Qdrant and Mistral.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Recipe Recommendations with Qdrant and Mistral.txt rename to AI_Research_RAG_and_Data_Analysis/Recipe Recommendations with Qdrant and Mistral.json diff --git a/AI_Research_RAG_and_Data_Analysis/Reconcile Rent Payments with Local Excel Spreadsheet and OpenAI.txt b/AI_Research_RAG_and_Data_Analysis/Reconcile Rent Payments with Local Excel Spreadsheet and OpenAI.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Reconcile Rent Payments with Local Excel Spreadsheet and OpenAI.txt rename to AI_Research_RAG_and_Data_Analysis/Reconcile Rent Payments with Local Excel Spreadsheet and OpenAI.json diff --git a/AI_Research_RAG_and_Data_Analysis/Scrape Trustpilot Reviews with DeepSeek, Analyze Sentiment with OpenAI.txt b/AI_Research_RAG_and_Data_Analysis/Scrape Trustpilot Reviews with DeepSeek, Analyze Sentiment with OpenAI.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Scrape Trustpilot Reviews with DeepSeek, Analyze Sentiment with OpenAI.txt rename to AI_Research_RAG_and_Data_Analysis/Scrape Trustpilot Reviews with DeepSeek, Analyze Sentiment with OpenAI.json diff --git a/AI_Research_RAG_and_Data_Analysis/Scrape and summarize posts of a news site without RSS feed using AI and save them to a NocoDB.txt b/AI_Research_RAG_and_Data_Analysis/Scrape and summarize posts of a news site without RSS feed using AI and save them to a NocoDB.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Scrape and summarize posts of a news site without RSS feed using AI and save them to a NocoDB.txt rename to AI_Research_RAG_and_Data_Analysis/Scrape and summarize posts of a news site without RSS feed using AI and save them to a NocoDB.json diff --git a/AI_Research_RAG_and_Data_Analysis/Scrape and summarize webpages with AI.txt b/AI_Research_RAG_and_Data_Analysis/Scrape and summarize webpages with AI.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Scrape and summarize webpages with AI.txt rename to AI_Research_RAG_and_Data_Analysis/Scrape and summarize webpages with AI.json diff --git a/AI_Research_RAG_and_Data_Analysis/Send Google analytics data to A.I. to analyze then save results in Baserow.txt b/AI_Research_RAG_and_Data_Analysis/Send Google analytics data to A.I. to analyze then save results in Baserow.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Send Google analytics data to A.I. to analyze then save results in Baserow.txt rename to AI_Research_RAG_and_Data_Analysis/Send Google analytics data to A.I. to analyze then save results in Baserow.json diff --git a/AI_Research_RAG_and_Data_Analysis/Send Google analytics data to A.I. to analyze then save results in BaserowSend Google analytics data to A.I. to analyze then save results in Baserow.txt b/AI_Research_RAG_and_Data_Analysis/Send Google analytics data to A.I. to analyze then save results in BaserowSend Google analytics data to A.I. to analyze then save results in Baserow.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Send Google analytics data to A.I. to analyze then save results in BaserowSend Google analytics data to A.I. to analyze then save results in Baserow.txt rename to AI_Research_RAG_and_Data_Analysis/Send Google analytics data to A.I. to analyze then save results in BaserowSend Google analytics data to A.I. to analyze then save results in Baserow.json diff --git a/AI_Research_RAG_and_Data_Analysis/Spot Workplace Discrimination Patterns with AI.txt b/AI_Research_RAG_and_Data_Analysis/Spot Workplace Discrimination Patterns with AI.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Spot Workplace Discrimination Patterns with AI.txt rename to AI_Research_RAG_and_Data_Analysis/Spot Workplace Discrimination Patterns with AI.json diff --git a/AI_Research_RAG_and_Data_Analysis/Summarize SERPBear data with AI (via Openrouter) and save it to Baserow.txt b/AI_Research_RAG_and_Data_Analysis/Summarize SERPBear data with AI (via Openrouter) and save it to Baserow.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Summarize SERPBear data with AI (via Openrouter) and save it to Baserow.txt rename to AI_Research_RAG_and_Data_Analysis/Summarize SERPBear data with AI (via Openrouter) and save it to Baserow.json diff --git a/AI_Research_RAG_and_Data_Analysis/Summarize Umami data with AI (via Openrouter) and save it to Baserow.txt b/AI_Research_RAG_and_Data_Analysis/Summarize Umami data with AI (via Openrouter) and save it to Baserow.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Summarize Umami data with AI (via Openrouter) and save it to Baserow.txt rename to AI_Research_RAG_and_Data_Analysis/Summarize Umami data with AI (via Openrouter) and save it to Baserow.json diff --git a/AI_Research_RAG_and_Data_Analysis/Survey Insights with Qdrant, Python and Information Extractor.txt b/AI_Research_RAG_and_Data_Analysis/Survey Insights with Qdrant, Python and Information Extractor.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Survey Insights with Qdrant, Python and Information Extractor.txt rename to AI_Research_RAG_and_Data_Analysis/Survey Insights with Qdrant, Python and Information Extractor.json diff --git a/AI_Research_RAG_and_Data_Analysis/Ultimate Scraper Workflow for n8n.txt b/AI_Research_RAG_and_Data_Analysis/Ultimate Scraper Workflow for n8n.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Ultimate Scraper Workflow for n8n.txt rename to AI_Research_RAG_and_Data_Analysis/Ultimate Scraper Workflow for n8n.json diff --git a/AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [1_3 anomaly][1_2 KNN].txt b/AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [1_3 anomaly][1_2 KNN].json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [1_3 anomaly][1_2 KNN].txt rename to AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [1_3 anomaly][1_2 KNN].json diff --git a/AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [2_2 KNN] (1).txt b/AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [2_2 KNN].json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [2_2 KNN] (1).txt rename to AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [2_2 KNN].json diff --git a/AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [2_2 KNN].txt b/AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [2_2 KNN].txt deleted file mode 100644 index 1606a3e..0000000 --- a/AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [2_2 KNN].txt +++ /dev/null @@ -1,544 +0,0 @@ -{ -"id": "itzURpN5wbUNOXOw", -"meta": { -"instanceId": "205b3bc06c96f2dc835b4f00e1cbf9a937a74eeb3b47c99d0c30b0586dbf85aa" -}, -"name": "[2/2] KNN classifier (lands dataset)", -"tags": [ -{ -"id": "QN7etptCmdcGIpkS", -"name": "classifier", -"createdAt": "2024-12-08T22:08:15.968Z", -"updatedAt": "2024-12-09T19:25:04.113Z" -} -], -"nodes": [ -{ -"id": "33373ccb-164e-431c-8a9a-d68668fc70be", -"name": "Embed image", -"type": "n8n-nodes-base.httpRequest", -"position": [ --140, --240 -], -"parameters": { -"url": "https://api.voyageai.com/v1/multimodalembeddings", -"method": "POST", -"options": {}, -"jsonBody": "={{\n{\n \"inputs\": [\n {\n \"content\": [\n {\n \"type\": \"image_url\",\n \"image_url\": $json.imageURL\n }\n ]\n }\n ],\n \"model\": \"voyage-multimodal-3\",\n \"input_type\": \"document\"\n}\n}}", -"sendBody": true, -"specifyBody": "json", -"authentication": "genericCredentialType", -"genericAuthType": "httpHeaderAuth" -}, -"credentials": { -"httpHeaderAuth": { -"id": "Vb0RNVDnIHmgnZOP", -"name": "Voyage API" -} -}, -"typeVersion": 4.2 -}, -{ -"id": "58adecfa-45c7-4928-b850-053ea6f3b1c5", -"name": "Query Qdrant", -"type": "n8n-nodes-base.httpRequest", -"position": [ -440, --240 -], -"parameters": { -"url": "={{ $json.qdrantCloudURL }}/collections/{{ $json.collectionName }}/points/query", -"method": "POST", -"options": {}, -"jsonBody": "={{\n{\n \"query\": $json.ImageEmbedding,\n \"using\": \"voyage\",\n \"limit\": $json.limitKNN,\n \"with_payload\": true\n}\n}}", -"sendBody": true, -"specifyBody": "json", -"authentication": "predefinedCredentialType", -"nodeCredentialType": "qdrantApi" -}, -"credentials": { -"qdrantApi": { -"id": "it3j3hP9FICqhgX6", -"name": "QdrantApi account" -} -}, -"typeVersion": 4.2 -}, -{ -"id": "258026b7-2dda-4165-bfe1-c4163b9caf78", -"name": "Majority Vote", -"type": "n8n-nodes-base.code", -"position": [ -840, --240 -], -"parameters": { -"language": "python", -"pythonCode": "from collections import Counter\n\ninput_json = _input.all()[0]\npoints = input_json['json']['result']['points']\nmajority_vote_two_most_common = Counter([point[\"payload\"][\"landscape_name\"] for point in points]).most_common(2)\n\nreturn [{\n \"json\": {\n \"result\": majority_vote_two_most_common \n }\n}]\n" -}, -"typeVersion": 2 -}, -{ -"id": "e83e7a0c-cb36-46d0-8908-86ee1bddf638", -"name": "Increase limitKNN", -"type": "n8n-nodes-base.set", -"position": [ -1240, --240 -], -"parameters": { -"options": {}, -"assignments": { -"assignments": [ -{ -"id": "0b5d257b-1b27-48bc-bec2-78649bc844cc", -"name": "limitKNN", -"type": "number", -"value": "={{ $('Propagate loop variables').item.json.limitKNN + 5}}" -}, -{ -"id": "afee4bb3-f78b-4355-945d-3776e33337a4", -"name": "ImageEmbedding", -"type": "array", -"value": "={{ $('Qdrant variables + embedding + KNN neigbours').first().json.ImageEmbedding }}" -}, -{ -"id": "701ed7ba-d112-4699-a611-c0c134757a6c", -"name": "qdrantCloudURL", -"type": "string", -"value": "={{ $('Qdrant variables + embedding + KNN neigbours').first().json.qdrantCloudURL }}" -}, -{ -"id": "f5612f78-e7d8-4124-9c3a-27bd5870c9bf", -"name": "collectionName", -"type": "string", -"value": "={{ $('Qdrant variables + embedding + KNN neigbours').first().json.collectionName }}" -} -] -} -}, -"typeVersion": 3.4 -}, -{ -"id": "8edbff53-cba6-4491-9d5e-bac7ad6db418", -"name": "Propagate loop variables", -"type": "n8n-nodes-base.set", -"position": [ -640, --240 -], -"parameters": { -"options": {}, -"assignments": { -"assignments": [ -{ -"id": "880838bf-2be2-4f5f-9417-974b3cbee163", -"name": "=limitKNN", -"type": "number", -"value": "={{ $json.result.points.length}}" -}, -{ -"id": "5fff2bea-f644-4fd9-ad04-afbecd19a5bc", -"name": "result", -"type": "object", -"value": "={{ $json.result }}" -} -] -} -}, -"typeVersion": 3.4 -}, -{ -"id": "6fad4cc0-f02c-429d-aa4e-0d69ebab9d65", -"name": "Image Test URL", -"type": "n8n-nodes-base.set", -"position": [ --320, --240 -], -"parameters": { -"options": {}, -"assignments": { -"assignments": [ -{ -"id": "46ceba40-fb25-450c-8550-d43d8b8aa94c", -"name": "imageURL", -"type": "string", -"value": "={{ $json.query.imageURL }}" -} -] -} -}, -"typeVersion": 3.4 -}, -{ -"id": "f02e79e2-32c8-4af0-8bf9-281119b23cc0", -"name": "Return class", -"type": "n8n-nodes-base.set", -"position": [ -1240, -0 -], -"parameters": { -"options": {}, -"assignments": { -"assignments": [ -{ -"id": "bd8ca541-8758-4551-b667-1de373231364", -"name": "class", -"type": "string", -"value": "={{ $json.result[0][0] }}" -} -] -} -}, -"typeVersion": 3.4 -}, -{ -"id": "83ca90fb-d5d5-45f4-8957-4363a4baf8ed", -"name": "Check tie", -"type": "n8n-nodes-base.if", -"position": [ -1040, --240 -], -"parameters": { -"options": {}, -"conditions": { -"options": { -"version": 2, -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "980663f6-9d7d-4e88-87b9-02030882472c", -"operator": { -"type": "number", -"operation": "gt" -}, -"leftValue": "={{ $json.result.length }}", -"rightValue": 1 -}, -{ -"id": "9f46fdeb-0f89-4010-99af-624c1c429d6a", -"operator": { -"type": "number", -"operation": "equals" -}, -"leftValue": "={{ $json.result[0][1] }}", -"rightValue": "={{ $json.result[1][1] }}" -}, -{ -"id": "c59bc4fe-6821-4639-8595-fdaf4194c1e1", -"operator": { -"type": "number", -"operation": "lte" -}, -"leftValue": "={{ $('Propagate loop variables').item.json.limitKNN }}", -"rightValue": 100 -} -] -} -}, -"typeVersion": 2.2 -}, -{ -"id": "847ced21-4cfd-45d8-98fa-b578adc054d6", -"name": "Qdrant variables + embedding + KNN neigbours", -"type": "n8n-nodes-base.set", -"position": [ -120, --240 -], -"parameters": { -"options": {}, -"assignments": { -"assignments": [ -{ -"id": "de66070d-5e74-414e-8af7-d094cbc26f62", -"name": "ImageEmbedding", -"type": "array", -"value": "={{ $json.data[0].embedding }}" -}, -{ -"id": "58b7384d-fd0c-44aa-9f8e-0306a99be431", -"name": "qdrantCloudURL", -"type": "string", -"value": "=https://152bc6e2-832a-415c-a1aa-fb529f8baf8d.eu-central-1-0.aws.cloud.qdrant.io" -}, -{ -"id": "e34c4d88-b102-43cc-a09e-e0553f2da23a", -"name": "collectionName", -"type": "string", -"value": "=land-use" -}, -{ -"id": "db37e18d-340b-4624-84f6-df993af866d6", -"name": "limitKNN", -"type": "number", -"value": "=10" -} -] -} -}, -"typeVersion": 3.4 -}, -{ -"id": "d1bc4edc-37d2-43ac-8d8b-560453e68d1f", -"name": "Sticky Note", -"type": "n8n-nodes-base.stickyNote", -"position": [ --940, --120 -], -"parameters": { -"color": 6, -"width": 320, -"height": 540, -"content": "Here we're classifying existing types of satellite imagery of land types:\n- 'agricultural',\n- 'airplane',\n- 'baseballdiamond',\n- 'beach',\n- 'buildings',\n- 'chaparral',\n- 'denseresidential',\n- 'forest',\n- 'freeway',\n- 'golfcourse',\n- 'harbor',\n- 'intersection',\n- 'mediumresidential',\n- 'mobilehomepark',\n- 'overpass',\n- 'parkinglot',\n- 'river',\n- 'runway',\n- 'sparseresidential',\n- 'storagetanks',\n- 'tenniscourt'\n" -}, -"typeVersion": 1 -}, -{ -"id": "13560a31-3c72-43b8-9635-3f9ca11f23c9", -"name": "Sticky Note1", -"type": "n8n-nodes-base.stickyNote", -"position": [ --520, --460 -], -"parameters": { -"color": 6, -"content": "I tested this KNN classifier on a whole `test` set of a dataset (it's not a part of the collection, only `validation` + `train` parts). Accuracy of classification on `test` is **93.24%**, no fine-tuning, no metric learning." -}, -"typeVersion": 1 -}, -{ -"id": "8c9dcbcb-a1ad-430f-b7dd-e19b5645b0f6", -"name": "Execute Workflow Trigger", -"type": "n8n-nodes-base.executeWorkflowTrigger", -"position": [ --520, --240 -], -"parameters": {}, -"typeVersion": 1 -}, -{ -"id": "b36fb270-2101-45e9-bb5c-06c4e07b769c", -"name": "Sticky Note2", -"type": "n8n-nodes-base.stickyNote", -"position": [ --1080, --520 -], -"parameters": { -"width": 460, -"height": 380, -"content": "## KNN classification workflow-tool\n### This n8n template takes an image URL (as anomaly detection tool does), and as output, it returns a class of the object on the image (out of land types list)\n\n* An image URL is received via the Execute Workflow Trigger, which is then sent to the Voyage.ai Multimodal Embeddings API to fetch its embedding.\n* The image's embedding vector is then used to query Qdrant, returning a set of X similar images with pre-labeled classes.\n* Majority voting is done for classes of neighbouring images.\n* A loop is used to resolve scenarios where there is a tie in Majority Voting (for example, we have 5 \"forest\" and 5 \"beach\"), and we increase the number of neighbours to retrieve.\n* When the loop finally resolves, the identified class is returned to the calling workflow." -}, -"typeVersion": 1 -}, -{ -"id": "51ece7fc-fd85-4d20-ae26-4df2d3893251", -"name": "Sticky Note3", -"type": "n8n-nodes-base.stickyNote", -"position": [ -120, --40 -], -"parameters": { -"height": 200, -"content": "Variables define another Qdrant's collection with landscapes (uploaded similarly as the crops collection, don't forget to switch it with your data) + amount of neighbours **limitKNN** in the database we'll use for an input image classification." -}, -"typeVersion": 1 -}, -{ -"id": "7aad5904-eb0b-4389-9d47-cc91780737ba", -"name": "Sticky Note4", -"type": "n8n-nodes-base.stickyNote", -"position": [ --180, --60 -], -"parameters": { -"height": 80, -"content": "Similarly to anomaly detection tool, we're embedding input image with the Voyage model" -}, -"typeVersion": 1 -}, -{ -"id": "d3702707-ee4a-481f-82ca-d9386f5b7c8a", -"name": "Sticky Note5", -"type": "n8n-nodes-base.stickyNote", -"position": [ -440, --500 -], -"parameters": { -"width": 740, -"height": 200, -"content": "## Tie loop\nHere we're [querying](https://api.qdrant.tech/api-reference/search/query-points) Qdrant, getting **limitKNN** nearest neighbours to our image <*Query Qdrant node*>, parsing their classes from payloads (images were pre-labeled & uploaded with their labels to Qdrant) & calculating the most frequent class name <*Majority Vote node*>. If there is a tie <*check tie node*> in 2 most common classes, for example, we have 5 \"forest\" and 5 \"harbor\", we repeat the procedure with the number of neighbours increased by 5 <*propagate loop variables node* and *increase limitKNN node*>.\nIf there is no tie, or we have already checked 100 neighbours, we exit the loop <*check tie node*> and return the class-answer." -}, -"typeVersion": 1 -}, -{ -"id": "d26911bb-0442-4adc-8511-7cec2d232393", -"name": "Sticky Note6", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1240, -160 -], -"parameters": { -"height": 80, -"content": "Here, we extract the name of the input image class decided by the Majority Vote\n" -}, -"typeVersion": 1 -}, -{ -"id": "84ffc859-1d5c-4063-9051-3587f30a0017", -"name": "Sticky Note10", -"type": "n8n-nodes-base.stickyNote", -"position": [ --520, -80 -], -"parameters": { -"color": 4, -"width": 540, -"height": 260, -"content": "### KNN (k nearest neighbours) classification\n1. The first pipeline is uploading (lands) dataset to Qdrant's collection.\n2. **This is the KNN classifier tool, which takes any image as input and classifies it based on queries to the Qdrant (lands) collection.**\n\n### To recreate it\nYou'll have to upload [lands](https://www.kaggle.com/datasets/apollo2506/landuse-scene-classification) dataset from Kaggle to your own Google Storage bucket, and re-create APIs/connections to [Qdrant Cloud](https://qdrant.tech/documentation/quickstart-cloud/) (you can use **Free Tier** cluster), Voyage AI API & Google Cloud Storage\n\n**In general, pipelines are adaptable to any dataset of images**\n" -}, -"typeVersion": 1 -} -], -"active": false, -"pinData": { -"Execute Workflow Trigger": [ -{ -"json": { -"query": { -"imageURL": "https://storage.googleapis.com/n8n-qdrant-demo/land-use/images_train_test_val/test/buildings/buildings_000323.png" -} -} -} -] -}, -"settings": { -"executionOrder": "v1" -}, -"versionId": "c8cfe732-fd78-4985-9540-ed8cb2de7ef3", -"connections": { -"Check tie": { -"main": [ -[ -{ -"node": "Increase limitKNN", -"type": "main", -"index": 0 -} -], -[ -{ -"node": "Return class", -"type": "main", -"index": 0 -} -] -] -}, -"Embed image": { -"main": [ -[ -{ -"node": "Qdrant variables + embedding + KNN neigbours", -"type": "main", -"index": 0 -} -] -] -}, -"Query Qdrant": { -"main": [ -[ -{ -"node": "Propagate loop variables", -"type": "main", -"index": 0 -} -] -] -}, -"Majority Vote": { -"main": [ -[ -{ -"node": "Check tie", -"type": "main", -"index": 0 -} -] -] -}, -"Image Test URL": { -"main": [ -[ -{ -"node": "Embed image", -"type": "main", -"index": 0 -} -] -] -}, -"Increase limitKNN": { -"main": [ -[ -{ -"node": "Query Qdrant", -"type": "main", -"index": 0 -} -] -] -}, -"Execute Workflow Trigger": { -"main": [ -[ -{ -"node": "Image Test URL", -"type": "main", -"index": 0 -} -] -] -}, -"Propagate loop variables": { -"main": [ -[ -{ -"node": "Majority Vote", -"type": "main", -"index": 0 -} -] -] -}, -"Qdrant variables + embedding + KNN neigbours": { -"main": [ -[ -{ -"node": "Query Qdrant", -"type": "main", -"index": 0 -} -] -] -} -} -} \ No newline at end of file diff --git a/AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [2_3 - anomaly].txt b/AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [2_3 - anomaly].json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [2_3 - anomaly].txt rename to AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [2_3 - anomaly].json diff --git a/AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [3_3 - anomaly].txt b/AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [3_3 - anomaly].json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [3_3 - anomaly].txt rename to AI_Research_RAG_and_Data_Analysis/Vector Database as a Big Data Analysis Tool for AI Agents [3_3 - anomaly].json diff --git a/AI_Research_RAG_and_Data_Analysis/Visual Regression Testing with Apify and AI Vision Model.txt b/AI_Research_RAG_and_Data_Analysis/Visual Regression Testing with Apify and AI Vision Model.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/Visual Regression Testing with Apify and AI Vision Model.txt rename to AI_Research_RAG_and_Data_Analysis/Visual Regression Testing with Apify and AI Vision Model.json diff --git a/AI_Research_RAG_and_Data_Analysis/🔍 Perplexity Research to HTML_ AI-Powered Content Creation.txt b/AI_Research_RAG_and_Data_Analysis/🔍 Perplexity Research to HTML_ AI-Powered Content Creation.json similarity index 100% rename from AI_Research_RAG_and_Data_Analysis/🔍 Perplexity Research to HTML_ AI-Powered Content Creation.txt rename to AI_Research_RAG_and_Data_Analysis/🔍 Perplexity Research to HTML_ AI-Powered Content Creation.json diff --git a/Airtable/AI Agent for project management and meetings with Airtable and Fireflies.txt b/Airtable/AI Agent for project management and meetings with Airtable and Fireflies.json similarity index 100% rename from Airtable/AI Agent for project management and meetings with Airtable and Fireflies.txt rename to Airtable/AI Agent for project management and meetings with Airtable and Fireflies.json diff --git a/Airtable/AI Agent to chat with Airtable and analyze data.txt b/Airtable/AI Agent to chat with Airtable and analyze data.json similarity index 95% rename from Airtable/AI Agent to chat with Airtable and analyze data.txt rename to Airtable/AI Agent to chat with Airtable and analyze data.json index 5a62105..b8ce8b5 100644 --- a/Airtable/AI Agent to chat with Airtable and analyze data.txt +++ b/Airtable/AI Agent to chat with Airtable and analyze data.json @@ -1394,4 +1394,5 @@ ] } } -}AI Agent to chat with Airtable and analyze data \ No newline at end of file + +} diff --git a/Airtable/Get Airtable data via AI and Obsidian Notes.txt b/Airtable/Get Airtable data via AI and Obsidian Notes.json similarity index 100% rename from Airtable/Get Airtable data via AI and Obsidian Notes.txt rename to Airtable/Get Airtable data via AI and Obsidian Notes.json diff --git a/Airtable/Handling Job Application Submissions with AI and n8n Forms.txt b/Airtable/Handling Job Application Submissions with AI and n8n Forms.json similarity index 100% rename from Airtable/Handling Job Application Submissions with AI and n8n Forms.txt rename to Airtable/Handling Job Application Submissions with AI and n8n Forms.json diff --git a/Airtable/vAssistant for Hubspot Chat using OpenAi and Airtable.txt b/Airtable/vAssistant for Hubspot Chat using OpenAi and Airtable.json similarity index 100% rename from Airtable/vAssistant for Hubspot Chat using OpenAi and Airtable.txt rename to Airtable/vAssistant for Hubspot Chat using OpenAi and Airtable.json diff --git a/Database_and_Storage/Chat with Postgresql Database.txt b/Database_and_Storage/Chat with Postgresql Database.json similarity index 100% rename from Database_and_Storage/Chat with Postgresql Database.txt rename to Database_and_Storage/Chat with Postgresql Database.json diff --git a/Database_and_Storage/Generate SQL queries from schema only - AI-powered.txt b/Database_and_Storage/Generate SQL queries from schema only - AI-powered.json similarity index 100% rename from Database_and_Storage/Generate SQL queries from schema only - AI-powered.txt rename to Database_and_Storage/Generate SQL queries from schema only - AI-powered.json diff --git a/Database_and_Storage/MongoDB AI Agent - Intelligent Movie Recommendations.txt b/Database_and_Storage/MongoDB AI Agent - Intelligent Movie Recommendations.json similarity index 100% rename from Database_and_Storage/MongoDB AI Agent - Intelligent Movie Recommendations.txt rename to Database_and_Storage/MongoDB AI Agent - Intelligent Movie Recommendations.json diff --git a/Database_and_Storage/Supabase Insertion & Upsertion & Retrieval.txt b/Database_and_Storage/Supabase Insertion & Upsertion & Retrieval.json similarity index 100% rename from Database_and_Storage/Supabase Insertion & Upsertion & Retrieval.txt rename to Database_and_Storage/Supabase Insertion & Upsertion & Retrieval.json diff --git a/Database_and_Storage/Talk to your SQLite database with a LangChain AI Agent.txt b/Database_and_Storage/Talk to your SQLite database with a LangChain AI Agent.json similarity index 100% rename from Database_and_Storage/Talk to your SQLite database with a LangChain AI Agent.txt rename to Database_and_Storage/Talk to your SQLite database with a LangChain AI Agent.json diff --git a/Discord/Discord AI-powered bot.txt b/Discord/Discord AI-powered bot.json similarity index 100% rename from Discord/Discord AI-powered bot.txt rename to Discord/Discord AI-powered bot.json diff --git a/Discord/Send daily translated Calvin and Hobbes Comics to Discord.txt b/Discord/Send daily translated Calvin and Hobbes Comics to Discord.json similarity index 100% rename from Discord/Send daily translated Calvin and Hobbes Comics to Discord.txt rename to Discord/Send daily translated Calvin and Hobbes Comics to Discord.json diff --git a/Discord/Share YouTube Videos with AI Summaries on Discord.txt b/Discord/Share YouTube Videos with AI Summaries on Discord.json similarity index 100% rename from Discord/Share YouTube Videos with AI Summaries on Discord.txt rename to Discord/Share YouTube Videos with AI Summaries on Discord.json diff --git a/Forms_and_Surveys/Conversational Interviews with AI Agents and n8n Forms.txt b/Forms_and_Surveys/Conversational Interviews with AI Agents and n8n Forms.json similarity index 100% rename from Forms_and_Surveys/Conversational Interviews with AI Agents and n8n Forms.txt rename to Forms_and_Surveys/Conversational Interviews with AI Agents and n8n Forms.json diff --git a/Forms_and_Surveys/Email Subscription Service with n8n Forms, Airtable and AI (1).txt b/Forms_and_Surveys/Email Subscription Service with n8n Forms, Airtable and AI.json similarity index 100% rename from Forms_and_Surveys/Email Subscription Service with n8n Forms, Airtable and AI (1).txt rename to Forms_and_Surveys/Email Subscription Service with n8n Forms, Airtable and AI.json diff --git a/Forms_and_Surveys/Email Subscription Service with n8n Forms, Airtable and AI.txt b/Forms_and_Surveys/Email Subscription Service with n8n Forms, Airtable and AI.txt deleted file mode 100644 index c268937..0000000 --- a/Forms_and_Surveys/Email Subscription Service with n8n Forms, Airtable and AI.txt +++ /dev/null @@ -1,1536 +0,0 @@ -{ -"nodes": [ -{ -"id": "4dd52c72-9a9b-4db4-8de5-5b12b1e5c4be", -"name": "Schedule Trigger", -"type": "n8n-nodes-base.scheduleTrigger", -"position": [ -180, -1480 -], -"parameters": { -"rule": { -"interval": [ -{ -"triggerAtHour": 9 -} -] -} -}, -"typeVersion": 1.2 -}, -{ -"id": "9226181c-b84c-4ea1-a5b4-eedb6c62037b", -"name": "Search daily", -"type": "n8n-nodes-base.airtable", -"position": [ -440, -1480 -], -"parameters": { -"base": { -"__rl": true, -"mode": "list", -"value": "appL3dptT6ZTSzY9v", -"cachedResultUrl": "https://airtable.com/appL3dptT6ZTSzY9v", -"cachedResultName": "Scheduled Emails" -}, -"table": { -"__rl": true, -"mode": "list", -"value": "tblzR9vSuFUzlQNMI", -"cachedResultUrl": "https://airtable.com/appL3dptT6ZTSzY9v/tblzR9vSuFUzlQNMI", -"cachedResultName": "Table 1" -}, -"options": {}, -"operation": "search", -"filterByFormula": "AND({Status} = 'active', {Interval} = 'daily')" -}, -"credentials": { -"airtableTokenApi": { -"id": "Und0frCQ6SNVX3VV", -"name": "Airtable Personal Access Token account" -} -}, -"typeVersion": 2.1 -}, -{ -"id": "1a3b6224-2f66-41c6-8b3d-be286cf16370", -"name": "Search weekly", -"type": "n8n-nodes-base.airtable", -"position": [ -440, -1660 -], -"parameters": { -"base": { -"__rl": true, -"mode": "list", -"value": "appL3dptT6ZTSzY9v", -"cachedResultUrl": "https://airtable.com/appL3dptT6ZTSzY9v", -"cachedResultName": "Scheduled Emails" -}, -"table": { -"__rl": true, -"mode": "list", -"value": "tblzR9vSuFUzlQNMI", -"cachedResultUrl": "https://airtable.com/appL3dptT6ZTSzY9v/tblzR9vSuFUzlQNMI", -"cachedResultName": "Table 1" -}, -"options": {}, -"operation": "search", -"filterByFormula": "=AND(\n {Status} = 'active', \n {Interval} = 'weekly', \n {Last Sent} <= DATEADD(TODAY(), -7, 'days')\n)" -}, -"credentials": { -"airtableTokenApi": { -"id": "Und0frCQ6SNVX3VV", -"name": "Airtable Personal Access Token account" -} -}, -"typeVersion": 2.1 -}, -{ -"id": "1ea47e14-0a28-4780-95c7-31e24eb724d5", -"name": "confirmation email1", -"type": "n8n-nodes-base.gmail", -"position": [ -620, -820 -], -"webhookId": "dd8bd6df-2013-4f8d-a2cc-cd9b3913e3d2", -"parameters": { -"sendTo": "={{ $('Subscribe Form').item.json.email }}", -"message": "=This is to confirm your request to subscribe to \"Learn something every day!\" - a free service to send you facts about your favourite topics.\n\nTopic: {{ $('Subscribe Form').item.json.topic }}\nSchedule: {{ $('Subscribe Form').item.json.frequency }}", -"options": { -"appendAttribution": false -}, -"subject": "Learn something every day confirmation" -}, -"credentials": { -"gmailOAuth2": { -"id": "Sf5Gfl9NiFTNXFWb", -"name": "Gmail account" -} -}, -"typeVersion": 2.1 -}, -{ -"id": "d95262af-1b52-4f9c-8346-183b4eee8544", -"name": "Execute Workflow", -"type": "n8n-nodes-base.executeWorkflow", -"position": [ -1140, -1480 -], -"parameters": { -"mode": "each", -"options": { -"waitForSubWorkflow": false -}, -"workflowId": { -"__rl": true, -"mode": "id", -"value": "={{ $workflow.id }}" -} -}, -"typeVersion": 1.1 -}, -{ -"id": "075292af-7a66-4275-ac2d-3c392189a10c", -"name": "Create Event", -"type": "n8n-nodes-base.set", -"position": [ -980, -1480 -], -"parameters": { -"options": {}, -"assignments": { -"assignments": [ -{ -"id": "b28a0142-a028-471a-8180-9883e930feea", -"name": "email", -"type": "string", -"value": "={{ $json.Email }}" -}, -{ -"id": "970f5495-05df-42b6-a422-b2ac27f8eb95", -"name": "topic", -"type": "string", -"value": "={{ $json.Topic }}" -}, -{ -"id": "e871c431-948f-4b80-aa17-1e4266674663", -"name": "interval", -"type": "string", -"value": "={{ $json.Interval }}" -}, -{ -"id": "9b72597d-1446-4ef3-86e5-0a071c69155b", -"name": "id", -"type": "string", -"value": "={{ $json.id }}" -}, -{ -"id": "b17039c2-14a2-4811-9528-88ae963e44f7", -"name": "created_at", -"type": "string", -"value": "={{ $json.Created }}" -} -] -} -}, -"typeVersion": 3.4 -}, -{ -"id": "28776aaf-6bd9-4f9f-bcf0-3d4401a74219", -"name": "Execute Workflow Trigger", -"type": "n8n-nodes-base.executeWorkflowTrigger", -"position": [ -1360, -1480 -], -"parameters": {}, -"typeVersion": 1 -}, -{ -"id": "0eb62e75-228b-452b-80ab-f9ef3ad33204", -"name": "Unsubscribe Form", -"type": "n8n-nodes-base.formTrigger", -"position": [ -180, -1160 -], -"webhookId": "e64db96d-5e61-40d5-88fb-761621a829ab", -"parameters": { -"options": { -"path": "free-factoids-unsubscribe" -}, -"formTitle": "Unsubscribe from Learn Something Every Day", -"formFields": { -"values": [ -{ -"fieldLabel": "ID", -"requiredField": true -}, -{ -"fieldType": "dropdown", -"fieldLabel": "Reason For Unsubscribe", -"multiselect": true, -"fieldOptions": { -"values": [ -{ -"option": "Emails not relevant" -}, -{ -"option": "Too many Emails" -}, -{ -"option": "I did not sign up to this service" -} -] -} -} -] -}, -"formDescription": "We're sorry to see you go! Please take a moment to help us improve the service." -}, -"typeVersion": 2.2 -}, -{ -"id": "f889efe9-dc3c-428b-ad8e-4f7d17f23e75", -"name": "Set Email Vars", -"type": "n8n-nodes-base.set", -"position": [ -2500, -1480 -], -"parameters": { -"options": {}, -"assignments": { -"assignments": [ -{ -"id": "62a684fb-16f9-4326-8eeb-777d604b305a", -"name": "to", -"type": "string", -"value": "={{ $('Execute Workflow Trigger').first().json.email }},jim@height.io" -}, -{ -"id": "4270849e-c805-4580-9088-e8d1c3ef2fb4", -"name": "subject", -"type": "string", -"value": "=Your {{ $('Execute Workflow Trigger').first().json.interval }} factoid" -}, -{ -"id": "81d0e897-2496-4a3c-b16c-9319338f899f", -"name": "message", -"type": "string", -"value": "=

\nYou asked about \"{{ $('Execution Data').first().json.topic.replace('\"','') }}\"\n

\n

\n{{ $('Content Generation Agent').first().json.output }}\n

" -}, -{ -"id": "ee05de7b-5342-4deb-8118-edaf235d92cc", -"name": "unsubscribe_link", -"type": "string", -"value": "=https:///form/inspiration-unsubscribe?ID={{ $('Execute Workflow Trigger').first().json.id }}" -} -] -}, -"includeOtherFields": true -}, -"typeVersion": 3.4 -}, -{ -"id": "84741e6d-f5be-440d-8633-4eb30ccce170", -"name": "Log Last Sent", -"type": "n8n-nodes-base.airtable", -"position": [ -2860, -1480 -], -"parameters": { -"base": { -"__rl": true, -"mode": "list", -"value": "appL3dptT6ZTSzY9v", -"cachedResultUrl": "https://airtable.com/appL3dptT6ZTSzY9v", -"cachedResultName": "Scheduled Emails" -}, -"table": { -"__rl": true, -"mode": "list", -"value": "tblzR9vSuFUzlQNMI", -"cachedResultUrl": "https://airtable.com/appL3dptT6ZTSzY9v/tblzR9vSuFUzlQNMI", -"cachedResultName": "Table 1" -}, -"columns": { -"value": { -"id": "={{ $('Execute Workflow Trigger').first().json.id }}", -"Last Sent": "2024-11-29T13:34:11" -}, -"schema": [ -{ -"id": "id", -"type": "string", -"display": true, -"removed": false, -"readOnly": true, -"required": false, -"displayName": "id", -"defaultMatch": true -}, -{ -"id": "Name", -"type": "string", -"display": true, -"removed": true, -"readOnly": false, -"required": false, -"displayName": "Name", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Email", -"type": "string", -"display": true, -"removed": true, -"readOnly": false, -"required": false, -"displayName": "Email", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Status", -"type": "options", -"display": true, -"options": [ -{ -"name": "inactive", -"value": "inactive" -}, -{ -"name": "active", -"value": "active" -} -], -"removed": true, -"readOnly": false, -"required": false, -"displayName": "Status", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Interval", -"type": "options", -"display": true, -"options": [ -{ -"name": "daily", -"value": "daily" -}, -{ -"name": "weekly", -"value": "weekly" -}, -{ -"name": "surprise", -"value": "surprise" -} -], -"removed": true, -"readOnly": false, -"required": false, -"displayName": "Interval", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Start Day", -"type": "options", -"display": true, -"options": [ -{ -"name": "Mon", -"value": "Mon" -}, -{ -"name": "Tue", -"value": "Tue" -}, -{ -"name": "Wed", -"value": "Wed" -}, -{ -"name": "Thu", -"value": "Thu" -}, -{ -"name": "Fri", -"value": "Fri" -}, -{ -"name": "Sat", -"value": "Sat" -}, -{ -"name": "Sun", -"value": "Sun" -} -], -"removed": true, -"readOnly": false, -"required": false, -"displayName": "Start Day", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Topic", -"type": "string", -"display": true, -"removed": true, -"readOnly": false, -"required": false, -"displayName": "Topic", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Created", -"type": "string", -"display": true, -"removed": true, -"readOnly": true, -"required": false, -"displayName": "Created", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Last Modified", -"type": "string", -"display": true, -"removed": true, -"readOnly": true, -"required": false, -"displayName": "Last Modified", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Last Sent", -"type": "dateTime", -"display": true, -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Last Sent", -"defaultMatch": false, -"canBeUsedToMatch": true -} -], -"mappingMode": "defineBelow", -"matchingColumns": [ -"id" -] -}, -"options": {}, -"operation": "update" -}, -"credentials": { -"airtableTokenApi": { -"id": "Und0frCQ6SNVX3VV", -"name": "Airtable Personal Access Token account" -} -}, -"typeVersion": 2.1 -}, -{ -"id": "88f864d6-13fb-4f09-b22d-030d016678e1", -"name": "Search surprise", -"type": "n8n-nodes-base.airtable", -"position": [ -440, -1840 -], -"parameters": { -"base": { -"__rl": true, -"mode": "list", -"value": "appL3dptT6ZTSzY9v", -"cachedResultUrl": "https://airtable.com/appL3dptT6ZTSzY9v", -"cachedResultName": "Scheduled Emails" -}, -"table": { -"__rl": true, -"mode": "list", -"value": "tblzR9vSuFUzlQNMI", -"cachedResultUrl": "https://airtable.com/appL3dptT6ZTSzY9v/tblzR9vSuFUzlQNMI", -"cachedResultName": "Table 1" -}, -"options": {}, -"operation": "search", -"filterByFormula": "=AND(\n {Status} = 'active', \n {Interval} = 'surprise'\n)" -}, -"credentials": { -"airtableTokenApi": { -"id": "Und0frCQ6SNVX3VV", -"name": "Airtable Personal Access Token account" -} -}, -"typeVersion": 2.1 -}, -{ -"id": "28238d9a-7bc0-4a22-bb4e-a7a2827e4da3", -"name": "Should Send = True", -"type": "n8n-nodes-base.filter", -"position": [ -800, -1840 -], -"parameters": { -"options": {}, -"conditions": { -"options": { -"version": 2, -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "9aaf9ae2-8f96-443a-8294-c04270296b22", -"operator": { -"type": "boolean", -"operation": "true", -"singleValue": true -}, -"leftValue": "={{ $json.should_send }}", -"rightValue": "" -} -] -} -}, -"typeVersion": 2.2 -}, -{ -"id": "3a46dd3d-48a6-40ca-8823-0516aa9f73a4", -"name": "Should Send?", -"type": "n8n-nodes-base.code", -"position": [ -620, -1840 -], -"parameters": { -"mode": "runOnceForEachItem", -"jsCode": "const luckyPick = Math.floor(Math.random() * 10) + 1;\n$input.item.json.should_send = luckyPick == 8;\nreturn $input.item;" -}, -"typeVersion": 2 -}, -{ -"id": "3611da19-920b-48e6-84a4-f7be0b3a78fc", -"name": "Create Subscriber", -"type": "n8n-nodes-base.airtable", -"position": [ -440, -820 -], -"parameters": { -"base": { -"__rl": true, -"mode": "list", -"value": "appL3dptT6ZTSzY9v", -"cachedResultUrl": "https://airtable.com/appL3dptT6ZTSzY9v", -"cachedResultName": "Scheduled Emails" -}, -"table": { -"__rl": true, -"mode": "list", -"value": "tblzR9vSuFUzlQNMI", -"cachedResultUrl": "https://airtable.com/appL3dptT6ZTSzY9v/tblzR9vSuFUzlQNMI", -"cachedResultName": "Table 1" -}, -"columns": { -"value": { -"Email": "={{ $json.email }}", -"Topic": "={{ $json.topic }}", -"Status": "active", -"Interval": "={{ $json.frequency }}", -"Start Day": "={{ $json.submittedAt.toDateTime().format('EEE') }}" -}, -"schema": [ -{ -"id": "Name", -"type": "string", -"display": true, -"removed": true, -"readOnly": false, -"required": false, -"displayName": "Name", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Email", -"type": "string", -"display": true, -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Email", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Status", -"type": "options", -"display": true, -"options": [ -{ -"name": "inactive", -"value": "inactive" -}, -{ -"name": "active", -"value": "active" -} -], -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Status", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Interval", -"type": "options", -"display": true, -"options": [ -{ -"name": "daily", -"value": "daily" -}, -{ -"name": "weekly", -"value": "weekly" -}, -{ -"name": "surprise", -"value": "surprise" -} -], -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Interval", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Start Day", -"type": "options", -"display": true, -"options": [ -{ -"name": "Mon", -"value": "Mon" -}, -{ -"name": "Tue", -"value": "Tue" -}, -{ -"name": "Wed", -"value": "Wed" -}, -{ -"name": "Thu", -"value": "Thu" -}, -{ -"name": "Fri", -"value": "Fri" -}, -{ -"name": "Sat", -"value": "Sat" -}, -{ -"name": "Sun", -"value": "Sun" -} -], -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Start Day", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Topic", -"type": "string", -"display": true, -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Topic", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Created", -"type": "string", -"display": true, -"removed": true, -"readOnly": true, -"required": false, -"displayName": "Created", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Last Modified", -"type": "string", -"display": true, -"removed": true, -"readOnly": true, -"required": false, -"displayName": "Last Modified", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Last Sent", -"type": "dateTime", -"display": true, -"removed": true, -"readOnly": false, -"required": false, -"displayName": "Last Sent", -"defaultMatch": false, -"canBeUsedToMatch": true -} -], -"mappingMode": "defineBelow", -"matchingColumns": [ -"Email" -] -}, -"options": {}, -"operation": "upsert" -}, -"credentials": { -"airtableTokenApi": { -"id": "Und0frCQ6SNVX3VV", -"name": "Airtable Personal Access Token account" -} -}, -"typeVersion": 2.1 -}, -{ -"id": "2213a81f-53a9-4142-9586-e87b88710eec", -"name": "Update Subscriber", -"type": "n8n-nodes-base.airtable", -"position": [ -440, -1160 -], -"parameters": { -"base": { -"__rl": true, -"mode": "list", -"value": "appL3dptT6ZTSzY9v", -"cachedResultUrl": "https://airtable.com/appL3dptT6ZTSzY9v", -"cachedResultName": "Scheduled Emails" -}, -"table": { -"__rl": true, -"mode": "list", -"value": "tblzR9vSuFUzlQNMI", -"cachedResultUrl": "https://airtable.com/appL3dptT6ZTSzY9v/tblzR9vSuFUzlQNMI", -"cachedResultName": "Table 1" -}, -"columns": { -"value": { -"id": "={{ $json.ID }}", -"Status": "inactive" -}, -"schema": [ -{ -"id": "id", -"type": "string", -"display": true, -"removed": false, -"readOnly": true, -"required": false, -"displayName": "id", -"defaultMatch": true -}, -{ -"id": "Name", -"type": "string", -"display": true, -"removed": true, -"readOnly": false, -"required": false, -"displayName": "Name", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Email", -"type": "string", -"display": true, -"removed": true, -"readOnly": false, -"required": false, -"displayName": "Email", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Status", -"type": "options", -"display": true, -"options": [ -{ -"name": "inactive", -"value": "inactive" -}, -{ -"name": "active", -"value": "active" -} -], -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Status", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Interval", -"type": "options", -"display": true, -"options": [ -{ -"name": "daily", -"value": "daily" -}, -{ -"name": "weekly", -"value": "weekly" -} -], -"removed": true, -"readOnly": false, -"required": false, -"displayName": "Interval", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Start Day", -"type": "options", -"display": true, -"options": [ -{ -"name": "Mon", -"value": "Mon" -}, -{ -"name": "Tue", -"value": "Tue" -}, -{ -"name": "Wed", -"value": "Wed" -}, -{ -"name": "Thu", -"value": "Thu" -}, -{ -"name": "Fri", -"value": "Fri" -}, -{ -"name": "Sat", -"value": "Sat" -}, -{ -"name": "Sun", -"value": "Sun" -} -], -"removed": true, -"readOnly": false, -"required": false, -"displayName": "Start Day", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Topic", -"type": "string", -"display": true, -"removed": true, -"readOnly": false, -"required": false, -"displayName": "Topic", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Created", -"type": "string", -"display": true, -"removed": true, -"readOnly": true, -"required": false, -"displayName": "Created", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Last Modified", -"type": "string", -"display": true, -"removed": true, -"readOnly": true, -"required": false, -"displayName": "Last Modified", -"defaultMatch": false, -"canBeUsedToMatch": true -} -], -"mappingMode": "defineBelow", -"matchingColumns": [ -"id" -] -}, -"options": {}, -"operation": "update" -}, -"credentials": { -"airtableTokenApi": { -"id": "Und0frCQ6SNVX3VV", -"name": "Airtable Personal Access Token account" -} -}, -"typeVersion": 2.1 -}, -{ -"id": "c94ec18b-e0cf-4859-8b89-23abdd63739c", -"name": "Sticky Note", -"type": "n8n-nodes-base.stickyNote", -"position": [ -900, -1280 -], -"parameters": { -"color": 7, -"width": 335, -"height": 173, -"content": "### 4. Using Subworkflows to run executions concurrently\nThis configuration is desired when sequential execution is slow and unnecessary. Also if one email fails, it doesn't fail the execution for everyone else." -}, -"typeVersion": 1 -}, -{ -"id": "c14cab28-13eb-4d91-8578-8187a95a8909", -"name": "Sticky Note1", -"type": "n8n-nodes-base.stickyNote", -"position": [ -180, -700 -], -"parameters": { -"color": 7, -"width": 380, -"height": 80, -"content": "### 1. Subscribe flow\nUse a form to allow users to subscribe to the service." -}, -"typeVersion": 1 -}, -{ -"id": "0e44ada0-f8a7-440e-aded-33b446190a08", -"name": "Sticky Note2", -"type": "n8n-nodes-base.stickyNote", -"position": [ -180, -1020 -], -"parameters": { -"color": 7, -"width": 355, -"height": 115, -"content": "### 2. Unsubscribe flow\n* Uses Form's pre-fill field feature to identify user\n* Doesn't use \"email\" as identifier so you can't unsubscribe others" -}, -"typeVersion": 1 -}, -{ -"id": "e67bdffe-ccfc-4818-990d-b2a5ab613035", -"name": "Sticky Note3", -"type": "n8n-nodes-base.stickyNote", -"position": [ -180, -1340 -], -"parameters": { -"color": 7, -"width": 347, -"height": 114, -"content": "### 3. Scheduled Trigger\n* Runs every day at 9am\n* Handles all 3 frequency types\n* Send emails concurrently" -}, -"typeVersion": 1 -}, -{ -"id": "ce7d5310-7170-46d3-b8d8-3f97407f9dfd", -"name": "Subscribe Form", -"type": "n8n-nodes-base.formTrigger", -"position": [ -180, -820 -], -"webhookId": "c6abe3e3-ba87-4124-a227-84e253581b58", -"parameters": { -"options": { -"path": "free-factoids-subscribe", -"appendAttribution": false, -"respondWithOptions": { -"values": { -"formSubmittedText": "Thanks! Your factoid is on its way!" -} -} -}, -"formTitle": "Learn something every day!", -"formFields": { -"values": [ -{ -"fieldType": "textarea", -"fieldLabel": "topic", -"placeholder": "What topic(s) would you like to learn about?", -"requiredField": true -}, -{ -"fieldType": "email", -"fieldLabel": "email", -"placeholder": "eg. jim@example.com", -"requiredField": true -}, -{ -"fieldType": "dropdown", -"fieldLabel": "frequency", -"fieldOptions": { -"values": [ -{ -"option": "daily" -}, -{ -"option": "weekly" -}, -{ -"option": "surprise me" -} -] -}, -"requiredField": true -} -] -}, -"formDescription": "Get a fact a day (or week) about any subject sent to your inbox." -}, -"typeVersion": 2.2 -}, -{ -"id": "a5d50886-7d6b-4bf8-b376-b23c12a60608", -"name": "Execution Data", -"type": "n8n-nodes-base.executionData", -"position": [ -1560, -1480 -], -"parameters": { -"dataToSave": { -"values": [ -{ -"key": "email", -"value": "={{ $json.email }}" -} -] -} -}, -"typeVersion": 1 -}, -{ -"id": "69b40d8d-7734-47f1-89fe-9ea0378424b7", -"name": "Window Buffer Memory", -"type": "@n8n/n8n-nodes-langchain.memoryBufferWindow", -"position": [ -1860, -1680 -], -"parameters": { -"sessionKey": "=scheduled_send_{{ $json.email }}", -"sessionIdType": "customKey" -}, -"typeVersion": 1.3 -}, -{ -"id": "f83cff18-f41f-4a63-9d43-7e3947aae386", -"name": "Wikipedia", -"type": "@n8n/n8n-nodes-langchain.toolWikipedia", -"position": [ -2020, -1680 -], -"parameters": {}, -"typeVersion": 1 -}, -{ -"id": "77457037-e3ab-42f1-948b-b994d42f2f6e", -"name": "Content Generation Agent", -"type": "@n8n/n8n-nodes-langchain.agent", -"position": [ -1780, -1460 -], -"parameters": { -"text": "=Generate an new factoid on the following topic: \"{{ $json.topic.replace('\"','') }}\"\nEnsure it is unique and not one generated previously.", -"options": {}, -"promptType": "define" -}, -"typeVersion": 1.7 -}, -{ -"id": "cdfdd870-48b6-4c7d-a7d1-a22d70423e37", -"name": "Groq Chat Model", -"type": "@n8n/n8n-nodes-langchain.lmChatGroq", -"position": [ -1720, -1680 -], -"parameters": { -"model": "llama-3.3-70b-versatile", -"options": {} -}, -"credentials": { -"groqApi": { -"id": "02xZ4o87lUMUFmbT", -"name": "Groq account" -} -}, -"typeVersion": 1 -}, -{ -"id": "87df322d-a544-476f-b2ff-83feb619fe7f", -"name": "Generate Image", -"type": "@n8n/n8n-nodes-langchain.openAi", -"position": [ -2120, -1460 -], -"parameters": { -"prompt": "=Generate a child-friendly illustration which compliments the following paragraph:\n{{ $json.output }}", -"options": {}, -"resource": "image" -}, -"credentials": { -"openAiApi": { -"id": "8gccIjcuf3gvaoEr", -"name": "OpenAi account" -} -}, -"typeVersion": 1.7 -}, -{ -"id": "5c8d9e72-4015-44da-b5d5-829864d33672", -"name": "Resize Image", -"type": "n8n-nodes-base.editImage", -"position": [ -2280, -1460 -], -"parameters": { -"width": 480, -"height": 360, -"options": {}, -"operation": "resize" -}, -"typeVersion": 1 -}, -{ -"id": "a9939fad-98b3-4894-aae0-c11fa40d09da", -"name": "Send Message", -"type": "n8n-nodes-base.gmail", -"position": [ -2680, -1480 -], -"webhookId": "dd8bd6df-2013-4f8d-a2cc-cd9b3913e3d2", -"parameters": { -"sendTo": "={{ $json.to }}", -"message": "=\n\n\n \n \n {{ $json.subject }}\n\n\n {{ $json.message }}\n

\nUnsubscribe\n

\n\n\n", -"options": { -"attachmentsUi": { -"attachmentsBinary": [ -{} -] -}, -"appendAttribution": false -}, -"subject": "={{ $json.subject }}" -}, -"credentials": { -"gmailOAuth2": { -"id": "Sf5Gfl9NiFTNXFWb", -"name": "Gmail account" -} -}, -"typeVersion": 2.1 -}, -{ -"id": "10b6ad35-fc1c-47a2-b234-5de3557d1164", -"name": "Sticky Note4", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1320, -1660 -], -"parameters": { -"color": 7, -"width": 335, -"height": 113, -"content": "### 5. Use Execution Data to Filter Logs\nIf you've registered for community+ or are on n8n cloud, best practice is to use execution node to allow filtering of execution logs." -}, -"typeVersion": 1 -}, -{ -"id": "e3563fae-ff35-457b-9fb1-784eda637518", -"name": "Sticky Note5", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1780, -1280 -], -"parameters": { -"color": 7, -"width": 340, -"height": 140, -"content": "### 6. Use AI to Generate Factoid and Image\nUse an AI agent to automate the generation of factoids as requested by the user. This is a simple example but we recommend a adding a unique touch to stand out from the crowd!" -}, -"typeVersion": 1 -}, -{ -"id": "d1016c5d-c855-44c5-8ad3-a534bedaa8cf", -"name": "Sticky Note6", -"type": "n8n-nodes-base.stickyNote", -"position": [ -2500, -1040 -], -"parameters": { -"color": 7, -"width": 460, -"height": 400, -"content": "### 7. Send Email to User\nFinally, send a message to the user with both text and image.\nLog the event in the Airtable for later analysis if required.\n\n![Screenshot of email result](https://res.cloudinary.com/daglih2g8/image/upload/f_auto,q_auto/v1/n8n-workflows/dbpctdhohj3vlewy6oyc)" -}, -"typeVersion": 1 -}, -{ -"id": "773075fa-e5a2-4d4f-8527-eb07c7038b00", -"name": "Sticky Note7", -"type": "n8n-nodes-base.stickyNote", -"position": [ --420, -680 -], -"parameters": { -"width": 480, -"height": 900, -"content": "## Try It Out!\n\n### This n8n templates demonstrates how to build a simple subscriber service entirely in n8n using n8n forms as a frontend, n8n generally as the backend and Airtable as the storage layer.\n\nThis template in particular shows a fully automated service to send automated messages containing facts about a topic the user requested for.\n\n### How it works\n* An n8n form is setup up to allow users to subscribe with a desired topic and interval of which to recieve messages via n8n forms which is then added to the Airtable.\n* A scheduled trigger is executed every morning and searches for subscribers to send messages for based on their desired intervals.\n* Once found, Subscribers are sent to a subworkflow which performs the text content generation via an AI agent and also uses a vision model to generate an image.\n* Both are attached to an email which is sent to the subscriber. This email also includes an unsubscribe link.\n* The unsubscribe flow works similarly via n8n form interface which when submitted disables further scheduled emails to the user.\n\n## How to use\n* Make a copy of sample Airtable here: https://airtable.com/appL3dptT6ZTSzY9v/shrLukHafy5bwDRfD\n* Make sure the workflow is \"activated\" and the forms are available and reachable by your audience.\n\n\n### Need Help?\nJoin the [Discord](https://discord.com/invite/XPKeKXeB7d) or ask in the [Forum](https://community.n8n.io/)!\n\nHappy Hacking!" -}, -"typeVersion": 1 -} -], -"pinData": {}, -"connections": { -"Wikipedia": { -"ai_tool": [ -[ -{ -"node": "Content Generation Agent", -"type": "ai_tool", -"index": 0 -} -] -] -}, -"Create Event": { -"main": [ -[ -{ -"node": "Execute Workflow", -"type": "main", -"index": 0 -} -] -] -}, -"Resize Image": { -"main": [ -[ -{ -"node": "Set Email Vars", -"type": "main", -"index": 0 -} -] -] -}, -"Search daily": { -"main": [ -[ -{ -"node": "Create Event", -"type": "main", -"index": 0 -} -] -] -}, -"Send Message": { -"main": [ -[ -{ -"node": "Log Last Sent", -"type": "main", -"index": 0 -} -] -] -}, -"Should Send?": { -"main": [ -[ -{ -"node": "Should Send = True", -"type": "main", -"index": 0 -} -] -] -}, -"Search weekly": { -"main": [ -[ -{ -"node": "Create Event", -"type": "main", -"index": 0 -} -] -] -}, -"Execution Data": { -"main": [ -[ -{ -"node": "Content Generation Agent", -"type": "main", -"index": 0 -} -] -] -}, -"Generate Image": { -"main": [ -[ -{ -"node": "Resize Image", -"type": "main", -"index": 0 -} -] -] -}, -"Set Email Vars": { -"main": [ -[ -{ -"node": "Send Message", -"type": "main", -"index": 0 -} -] -] -}, -"Subscribe Form": { -"main": [ -[ -{ -"node": "Create Subscriber", -"type": "main", -"index": 0 -} -] -] -}, -"Groq Chat Model": { -"ai_languageModel": [ -[ -{ -"node": "Content Generation Agent", -"type": "ai_languageModel", -"index": 0 -} -] -] -}, -"Search surprise": { -"main": [ -[ -{ -"node": "Should Send?", -"type": "main", -"index": 0 -} -] -] -}, -"Schedule Trigger": { -"main": [ -[ -{ -"node": "Search surprise", -"type": "main", -"index": 0 -}, -{ -"node": "Search daily", -"type": "main", -"index": 0 -}, -{ -"node": "Search weekly", -"type": "main", -"index": 0 -} -] -] -}, -"Unsubscribe Form": { -"main": [ -[ -{ -"node": "Update Subscriber", -"type": "main", -"index": 0 -} -] -] -}, -"Create Subscriber": { -"main": [ -[ -{ -"node": "confirmation email1", -"type": "main", -"index": 0 -} -] -] -}, -"Should Send = True": { -"main": [ -[ -{ -"node": "Create Event", -"type": "main", -"index": 0 -} -] -] -}, -"Window Buffer Memory": { -"ai_memory": [ -[ -{ -"node": "Content Generation Agent", -"type": "ai_memory", -"index": 0 -} -] -] -}, -"Content Generation Agent": { -"main": [ -[ -{ -"node": "Generate Image", -"type": "main", -"index": 0 -} -] -] -}, -"Execute Workflow Trigger": { -"main": [ -[ -{ -"node": "Execution Data", -"type": "main", -"index": 0 -} -] -] -} -} -} \ No newline at end of file diff --git a/Forms_and_Surveys/Qualifying Appointment Requests with AI & n8n Forms.txt b/Forms_and_Surveys/Qualifying Appointment Requests with AI & n8n Forms.json similarity index 100% rename from Forms_and_Surveys/Qualifying Appointment Requests with AI & n8n Forms.txt rename to Forms_and_Surveys/Qualifying Appointment Requests with AI & n8n Forms.json diff --git a/Gmail_and_Email_Automation/A Very Simple _Human in the Loop_ Email Response System Using AI and IMAP.txt b/Gmail_and_Email_Automation/A Very Simple _Human in the Loop_ Email Response System Using AI and IMAP.json similarity index 100% rename from Gmail_and_Email_Automation/A Very Simple _Human in the Loop_ Email Response System Using AI and IMAP.txt rename to Gmail_and_Email_Automation/A Very Simple _Human in the Loop_ Email Response System Using AI and IMAP.json diff --git a/Gmail_and_Email_Automation/AI-powered email processing autoresponder and response approval (Yes_No).txt b/Gmail_and_Email_Automation/AI-powered email processing autoresponder and response approval (Yes_No).json similarity index 100% rename from Gmail_and_Email_Automation/AI-powered email processing autoresponder and response approval (Yes_No).txt rename to Gmail_and_Email_Automation/AI-powered email processing autoresponder and response approval (Yes_No).json diff --git a/Gmail_and_Email_Automation/Analyze & Sort Suspicious Email Contents with ChatGPT.txt b/Gmail_and_Email_Automation/Analyze & Sort Suspicious Email Contents with ChatGPT.json similarity index 100% rename from Gmail_and_Email_Automation/Analyze & Sort Suspicious Email Contents with ChatGPT.txt rename to Gmail_and_Email_Automation/Analyze & Sort Suspicious Email Contents with ChatGPT.json diff --git a/Gmail_and_Email_Automation/Analyze Suspicious Email Contents with ChatGPT Vision.txt b/Gmail_and_Email_Automation/Analyze Suspicious Email Contents with ChatGPT Vision.json similarity index 100% rename from Gmail_and_Email_Automation/Analyze Suspicious Email Contents with ChatGPT Vision.txt rename to Gmail_and_Email_Automation/Analyze Suspicious Email Contents with ChatGPT Vision.json diff --git a/Gmail_and_Email_Automation/Auto Categorise Outlook Emails with AI.txt b/Gmail_and_Email_Automation/Auto Categorise Outlook Emails with AI.json similarity index 100% rename from Gmail_and_Email_Automation/Auto Categorise Outlook Emails with AI.txt rename to Gmail_and_Email_Automation/Auto Categorise Outlook Emails with AI.json diff --git a/Gmail_and_Email_Automation/Auto-label incoming Gmail messages with AI nodes.txt b/Gmail_and_Email_Automation/Auto-label incoming Gmail messages with AI nodes.json similarity index 100% rename from Gmail_and_Email_Automation/Auto-label incoming Gmail messages with AI nodes.txt rename to Gmail_and_Email_Automation/Auto-label incoming Gmail messages with AI nodes.json diff --git a/Gmail_and_Email_Automation/Basic Automatic Gmail Email Labelling with OpenAI and Gmail API.txt b/Gmail_and_Email_Automation/Basic Automatic Gmail Email Labelling with OpenAI and Gmail API.json similarity index 100% rename from Gmail_and_Email_Automation/Basic Automatic Gmail Email Labelling with OpenAI and Gmail API.txt rename to Gmail_and_Email_Automation/Basic Automatic Gmail Email Labelling with OpenAI and Gmail API.json diff --git a/Gmail_and_Email_Automation/Classify lemlist replies using OpenAI and automate reply handling.txt b/Gmail_and_Email_Automation/Classify lemlist replies using OpenAI and automate reply handling.json similarity index 100% rename from Gmail_and_Email_Automation/Classify lemlist replies using OpenAI and automate reply handling.txt rename to Gmail_and_Email_Automation/Classify lemlist replies using OpenAI and automate reply handling.json diff --git a/Gmail_and_Email_Automation/Compose reply draft in Gmail with OpenAI Assistant.txt b/Gmail_and_Email_Automation/Compose reply draft in Gmail with OpenAI Assistant.json similarity index 100% rename from Gmail_and_Email_Automation/Compose reply draft in Gmail with OpenAI Assistant.txt rename to Gmail_and_Email_Automation/Compose reply draft in Gmail with OpenAI Assistant.json diff --git a/Gmail_and_Email_Automation/Effortless Email Management with AI-Powered Summarization & Review.txt b/Gmail_and_Email_Automation/Effortless Email Management with AI-Powered Summarization & Review.json similarity index 100% rename from Gmail_and_Email_Automation/Effortless Email Management with AI-Powered Summarization & Review.txt rename to Gmail_and_Email_Automation/Effortless Email Management with AI-Powered Summarization & Review.json diff --git a/Gmail_and_Email_Automation/Email Summary Agent.txt b/Gmail_and_Email_Automation/Email Summary Agent.json similarity index 100% rename from Gmail_and_Email_Automation/Email Summary Agent.txt rename to Gmail_and_Email_Automation/Email Summary Agent.json diff --git a/Gmail_and_Email_Automation/Extract spending history from gmail to google sheet.txt b/Gmail_and_Email_Automation/Extract spending history from gmail to google sheet.json similarity index 100% rename from Gmail_and_Email_Automation/Extract spending history from gmail to google sheet.txt rename to Gmail_and_Email_Automation/Extract spending history from gmail to google sheet.json diff --git a/Gmail_and_Email_Automation/Gmail AI Auto-Responder_ Create Draft Replies to incoming emails.txt b/Gmail_and_Email_Automation/Gmail AI Auto-Responder_ Create Draft Replies to incoming emails.json similarity index 100% rename from Gmail_and_Email_Automation/Gmail AI Auto-Responder_ Create Draft Replies to incoming emails.txt rename to Gmail_and_Email_Automation/Gmail AI Auto-Responder_ Create Draft Replies to incoming emails.json diff --git a/Gmail_and_Email_Automation/Microsoft Outlook AI Email Assistant with contact support from Monday and Airtable.txt b/Gmail_and_Email_Automation/Microsoft Outlook AI Email Assistant with contact support from Monday and Airtable.json similarity index 100% rename from Gmail_and_Email_Automation/Microsoft Outlook AI Email Assistant with contact support from Monday and Airtable.txt rename to Gmail_and_Email_Automation/Microsoft Outlook AI Email Assistant with contact support from Monday and Airtable.json diff --git a/Gmail_and_Email_Automation/Modular & Customizable AI-Powered Email Routing_ Text Classifier for eCommerce.txt b/Gmail_and_Email_Automation/Modular & Customizable AI-Powered Email Routing_ Text Classifier for eCommerce.json similarity index 100% rename from Gmail_and_Email_Automation/Modular & Customizable AI-Powered Email Routing_ Text Classifier for eCommerce.txt rename to Gmail_and_Email_Automation/Modular & Customizable AI-Powered Email Routing_ Text Classifier for eCommerce.json diff --git a/Gmail_and_Email_Automation/Send a ChatGPT email reply and save responses to Google Sheets.txt b/Gmail_and_Email_Automation/Send a ChatGPT email reply and save responses to Google Sheets.json similarity index 100% rename from Gmail_and_Email_Automation/Send a ChatGPT email reply and save responses to Google Sheets.txt rename to Gmail_and_Email_Automation/Send a ChatGPT email reply and save responses to Google Sheets.json diff --git a/Gmail_and_Email_Automation/Send specific PDF attachments from Gmail to Google Drive using OpenAI.txt b/Gmail_and_Email_Automation/Send specific PDF attachments from Gmail to Google Drive using OpenAI.json similarity index 100% rename from Gmail_and_Email_Automation/Send specific PDF attachments from Gmail to Google Drive using OpenAI.txt rename to Gmail_and_Email_Automation/Send specific PDF attachments from Gmail to Google Drive using OpenAI.json diff --git a/Gmail_and_Email_Automation/Summarize your emails with A.I. (via Openrouter) and send to Line messenger (1).txt b/Gmail_and_Email_Automation/Summarize your emails with A.I. (via Openrouter) and send to Line messenger.json similarity index 100% rename from Gmail_and_Email_Automation/Summarize your emails with A.I. (via Openrouter) and send to Line messenger (1).txt rename to Gmail_and_Email_Automation/Summarize your emails with A.I. (via Openrouter) and send to Line messenger.json diff --git a/Gmail_and_Email_Automation/Summarize your emails with A.I. (via Openrouter) and send to Line messenger.txt b/Gmail_and_Email_Automation/Summarize your emails with A.I. (via Openrouter) and send to Line messenger.txt deleted file mode 100644 index 1235748..0000000 --- a/Gmail_and_Email_Automation/Summarize your emails with A.I. (via Openrouter) and send to Line messenger.txt +++ /dev/null @@ -1,177 +0,0 @@ -{ -"id": "QnVdtKiTf3nbrNkh", -"meta": { -"instanceId": "558d88703fb65b2d0e44613bc35916258b0f0bf983c5d4730c00c424b77ca36a", -"templateCredsSetupCompleted": true -}, -"name": "Summarize emails with A.I. then send to messenger", -"tags": [], -"nodes": [ -{ -"id": "50e12e63-df28-45ac-9208-48cbf5116d09", -"name": "Read emails (IMAP)", -"type": "n8n-nodes-base.emailReadImap", -"position": [ -340, -260 -], -"parameters": { -"options": {}, -"postProcessAction": "nothing" -}, -"credentials": { -"imap": { -"id": "gXtdakU9M02LBQc3", -"name": "IMAP account" -} -}, -"typeVersion": 2 -}, -{ -"id": "6565350b-2269-44e3-8f36-8797f32d3e09", -"name": "Send email to A.I. to summarize", -"type": "n8n-nodes-base.httpRequest", -"position": [ -700, -260 -], -"parameters": { -"url": "https://openrouter.ai/api/v1/chat/completions", -"method": "POST", -"options": {}, -"jsonBody": "={\n \"model\": \"meta-llama/llama-3.1-70b-instruct:free\",\n \"messages\": [\n {\n \"role\": \"user\",\n \"content\": \"I want you to read and summarize all the emails. If it's not rimportant, just give me a short summary with less than 10 words.\\n\\nHighlight as important if it is, add an emoji to indicate it is urgent:\\nFor the relevant content, find any action items and deadlines. Sometimes I need to sign up before a certain date or pay before a certain date, please highlight that in the summary for me.\\n\\nPut the deadline in BOLD at the top. If the email is not important, keep the summary short to 1 sentence only.\\n\\nHere's the email content for you to read:\\nSender email address: {{ encodeURIComponent($json.from) }}\\nSubject: {{ encodeURIComponent($json.subject) }}\\n{{ encodeURIComponent($json.textHtml) }}\"\n }\n ]\n}", -"sendBody": true, -"specifyBody": "json", -"authentication": "genericCredentialType", -"genericAuthType": "httpHeaderAuth" -}, -"credentials": { -"httpHeaderAuth": { -"id": "WY7UkF14ksPKq3S8", -"name": "Header Auth account 2" -} -}, -"typeVersion": 4.2, -"alwaysOutputData": false -}, -{ -"id": "d04c422a-c000-4e48-82d0-0bf44bcd9fff", -"name": "Send summarized content to messenger", -"type": "n8n-nodes-base.httpRequest", -"position": [ -1100, -260 -], -"parameters": { -"url": "https://api.line.me/v2/bot/message/push", -"method": "POST", -"options": {}, -"jsonBody": "={\n \"to\": \"U3ec262c49811f30cdc2d2f2b0a0df99a\",\n \"messages\": [\n {\n \"type\": \"text\",\n \"text\": \"{{ $json.choices[0].message.content.replace(/\\n/g, \"\\\\n\") }}\"\n }\n ]\n}\n\n\n ", -"sendBody": true, -"specifyBody": "json", -"authentication": "genericCredentialType", -"genericAuthType": "httpHeaderAuth" -}, -"credentials": { -"httpHeaderAuth": { -"id": "SzcKjO9Nn9vZPL2H", -"name": "Header Auth account 5" -} -}, -"typeVersion": 4.2 -}, -{ -"id": "57a1219c-4f40-407c-855b-86c4c7c468bb", -"name": "Sticky Note", -"type": "n8n-nodes-base.stickyNote", -"position": [ -180, -0 -], -"parameters": { -"width": 361, -"height": 90, -"content": "## Summarize emails with A.I.\nYou can find out more about the [use case](https://rumjahn.com/how-a-i-saved-my-kids-school-life-and-my-marriage/)" -}, -"typeVersion": 1 -}, -{ -"id": "17686264-56ac-419e-a32b-dc5c75f15f1f", -"name": "Sticky Note1", -"type": "n8n-nodes-base.stickyNote", -"position": [ -283, -141 -], -"parameters": { -"color": 5, -"width": 229, -"height": 280, -"content": "Find your email server's IMAP Settings. \n- Link for [gmail](https://www.getmailspring.com/setup/access-gmail-via-imap-smtp)" -}, -"typeVersion": 1 -}, -{ -"id": "1862abd6-7dca-4c66-90d6-110d4fcf4d99", -"name": "Sticky Note2", -"type": "n8n-nodes-base.stickyNote", -"position": [ -580, -0 -], -"parameters": { -"color": 6, -"width": 365, -"height": 442, -"content": "For the A.I. you can use Openrouter.ai. \n- Set up a free account\n- The A.I. model selected is FREE to use.\n## Credentials\n- Use header auth\n- Username: Authorization\n- Password: Bearer {insert your API key}.\n- The password is \"Bearer\" space plus your API key." -}, -"typeVersion": 1 -}, -{ -"id": "c4a3a76f-539d-4bbf-8f95-d7aaebf39a55", -"name": "Sticky Note3", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1000, -0 -], -"parameters": { -"color": 4, -"width": 307, -"height": 439, -"content": "Don't use the official Line node. It's outdated.\n## Credentials\n- Use header auth\n- Username: Authorization\n- Password: Bearer {channel access token}\n\nYou can find your channel access token at the [Line API console](https://developers.line.biz/console/). Go to Messaging API and scroll to the bottom." -}, -"typeVersion": 1 -} -], -"active": false, -"pinData": {}, -"settings": { -"executionOrder": "v1" -}, -"versionId": "81216e6a-2bd8-4215-8a96-376ee520469d", -"connections": { -"Read emails (IMAP)": { -"main": [ -[ -{ -"node": "Send email to A.I. to summarize", -"type": "main", -"index": 0 -} -] -] -}, -"Send email to A.I. to summarize": { -"main": [ -[ -{ -"node": "Send summarized content to messenger", -"type": "main", -"index": 0 -} -] -] -} -} -} \ No newline at end of file diff --git a/Gmail_and_Email_Automation/create e-mail responses with fastmail and OpenAI.txt b/Gmail_and_Email_Automation/create e-mail responses with fastmail and OpenAI.json similarity index 100% rename from Gmail_and_Email_Automation/create e-mail responses with fastmail and OpenAI.txt rename to Gmail_and_Email_Automation/create e-mail responses with fastmail and OpenAI.json diff --git a/Gmail_and_Email_Automation/📈 Receive Daily Market News from FT.com to your Microsoft outlook inbox.txt b/Gmail_and_Email_Automation/📈 Receive Daily Market News from FT.com to your Microsoft outlook inbox.json similarity index 100% rename from Gmail_and_Email_Automation/📈 Receive Daily Market News from FT.com to your Microsoft outlook inbox.txt rename to Gmail_and_Email_Automation/📈 Receive Daily Market News from FT.com to your Microsoft outlook inbox.json diff --git a/Google_Drive_and_Google_Sheets/Author and Publish Blog Posts From Google Sheets.txt b/Google_Drive_and_Google_Sheets/Author and Publish Blog Posts From Google Sheets.json similarity index 100% rename from Google_Drive_and_Google_Sheets/Author and Publish Blog Posts From Google Sheets.txt rename to Google_Drive_and_Google_Sheets/Author and Publish Blog Posts From Google Sheets.json diff --git a/Google_Drive_and_Google_Sheets/Automated End-to-End Fine-Tuning of OpenAI Models with Google Drive Integration.txt b/Google_Drive_and_Google_Sheets/Automated End-to-End Fine-Tuning of OpenAI Models with Google Drive Integration.json similarity index 100% rename from Google_Drive_and_Google_Sheets/Automated End-to-End Fine-Tuning of OpenAI Models with Google Drive Integration.txt rename to Google_Drive_and_Google_Sheets/Automated End-to-End Fine-Tuning of OpenAI Models with Google Drive Integration.json diff --git a/Google_Drive_and_Google_Sheets/Automatic Background Removal for Images in Google Drive.txt b/Google_Drive_and_Google_Sheets/Automatic Background Removal for Images in Google Drive.json similarity index 100% rename from Google_Drive_and_Google_Sheets/Automatic Background Removal for Images in Google Drive.txt rename to Google_Drive_and_Google_Sheets/Automatic Background Removal for Images in Google Drive.json diff --git a/Google_Drive_and_Google_Sheets/Build an OpenAI Assistant with Google Drive Integration.txt b/Google_Drive_and_Google_Sheets/Build an OpenAI Assistant with Google Drive Integration.json similarity index 100% rename from Google_Drive_and_Google_Sheets/Build an OpenAI Assistant with Google Drive Integration.txt rename to Google_Drive_and_Google_Sheets/Build an OpenAI Assistant with Google Drive Integration.json diff --git a/Google_Drive_and_Google_Sheets/Chat with a Google Sheet using AI.txt b/Google_Drive_and_Google_Sheets/Chat with a Google Sheet using AI.json similarity index 100% rename from Google_Drive_and_Google_Sheets/Chat with a Google Sheet using AI.txt rename to Google_Drive_and_Google_Sheets/Chat with a Google Sheet using AI.json diff --git a/Google_Drive_and_Google_Sheets/Chat with your event schedule from Google Sheets in Telegram.txt b/Google_Drive_and_Google_Sheets/Chat with your event schedule from Google Sheets in Telegram.json similarity index 100% rename from Google_Drive_and_Google_Sheets/Chat with your event schedule from Google Sheets in Telegram.txt rename to Google_Drive_and_Google_Sheets/Chat with your event schedule from Google Sheets in Telegram.json diff --git a/Google_Drive_and_Google_Sheets/Extract Information from a Logo Sheet using forms, AI, Google Sheet and Airtable.txt b/Google_Drive_and_Google_Sheets/Extract Information from a Logo Sheet using forms, AI, Google Sheet and Airtable.json similarity index 100% rename from Google_Drive_and_Google_Sheets/Extract Information from a Logo Sheet using forms, AI, Google Sheet and Airtable.txt rename to Google_Drive_and_Google_Sheets/Extract Information from a Logo Sheet using forms, AI, Google Sheet and Airtable.json diff --git a/Google_Drive_and_Google_Sheets/Flux Dev Image Generation (Fal.ai) to Google Drive.txt b/Google_Drive_and_Google_Sheets/Flux Dev Image Generation (Fal.ai) to Google Drive.json similarity index 100% rename from Google_Drive_and_Google_Sheets/Flux Dev Image Generation (Fal.ai) to Google Drive.txt rename to Google_Drive_and_Google_Sheets/Flux Dev Image Generation (Fal.ai) to Google Drive.json diff --git a/Google_Drive_and_Google_Sheets/Qualify new leads in Google Sheets via OpenAI_s GPT-4.txt b/Google_Drive_and_Google_Sheets/Qualify new leads in Google Sheets via OpenAI_s GPT-4.json similarity index 100% rename from Google_Drive_and_Google_Sheets/Qualify new leads in Google Sheets via OpenAI_s GPT-4.txt rename to Google_Drive_and_Google_Sheets/Qualify new leads in Google Sheets via OpenAI_s GPT-4.json diff --git a/Google_Drive_and_Google_Sheets/RAG Chatbot for Company Documents using Google Drive and Gemini.txt b/Google_Drive_and_Google_Sheets/RAG Chatbot for Company Documents using Google Drive and Gemini.json similarity index 100% rename from Google_Drive_and_Google_Sheets/RAG Chatbot for Company Documents using Google Drive and Gemini.txt rename to Google_Drive_and_Google_Sheets/RAG Chatbot for Company Documents using Google Drive and Gemini.json diff --git a/Google_Drive_and_Google_Sheets/RAG_Context-Aware Chunking _ Google Drive to Pinecone via OpenRouter & Gemini.txt b/Google_Drive_and_Google_Sheets/RAG_Context-Aware Chunking _ Google Drive to Pinecone via OpenRouter & Gemini.json similarity index 100% rename from Google_Drive_and_Google_Sheets/RAG_Context-Aware Chunking _ Google Drive to Pinecone via OpenRouter & Gemini.txt rename to Google_Drive_and_Google_Sheets/RAG_Context-Aware Chunking _ Google Drive to Pinecone via OpenRouter & Gemini.json diff --git a/Google_Drive_and_Google_Sheets/Screen Applicants With AI, notify HR and save them in a Google Sheet.txt b/Google_Drive_and_Google_Sheets/Screen Applicants With AI, notify HR and save them in a Google Sheet.json similarity index 100% rename from Google_Drive_and_Google_Sheets/Screen Applicants With AI, notify HR and save them in a Google Sheet.txt rename to Google_Drive_and_Google_Sheets/Screen Applicants With AI, notify HR and save them in a Google Sheet.json diff --git a/Google_Drive_and_Google_Sheets/Simple Expense Tracker with n8n Chat, AI Agent and Google Sheets.txt b/Google_Drive_and_Google_Sheets/Simple Expense Tracker with n8n Chat, AI Agent and Google Sheets.json similarity index 100% rename from Google_Drive_and_Google_Sheets/Simple Expense Tracker with n8n Chat, AI Agent and Google Sheets.txt rename to Google_Drive_and_Google_Sheets/Simple Expense Tracker with n8n Chat, AI Agent and Google Sheets.json diff --git a/Google_Drive_and_Google_Sheets/Summarize Google Sheets form feedback via OpenAI_s GPT-4.txt b/Google_Drive_and_Google_Sheets/Summarize Google Sheets form feedback via OpenAI_s GPT-4.json similarity index 100% rename from Google_Drive_and_Google_Sheets/Summarize Google Sheets form feedback via OpenAI_s GPT-4.txt rename to Google_Drive_and_Google_Sheets/Summarize Google Sheets form feedback via OpenAI_s GPT-4.json diff --git a/Google_Drive_and_Google_Sheets/Summarize the New Documents from Google Drive and Save Summary in Google Sheet.txt b/Google_Drive_and_Google_Sheets/Summarize the New Documents from Google Drive and Save Summary in Google Sheet.json similarity index 100% rename from Google_Drive_and_Google_Sheets/Summarize the New Documents from Google Drive and Save Summary in Google Sheet.txt rename to Google_Drive_and_Google_Sheets/Summarize the New Documents from Google Drive and Save Summary in Google Sheet.json diff --git a/Google_Drive_and_Google_Sheets/Upload to Instagram and Tiktok from Google Drive.txt b/Google_Drive_and_Google_Sheets/Upload to Instagram and Tiktok from Google Drive.json similarity index 100% rename from Google_Drive_and_Google_Sheets/Upload to Instagram and Tiktok from Google Drive.txt rename to Google_Drive_and_Google_Sheets/Upload to Instagram and Tiktok from Google Drive.json diff --git a/Google_Drive_and_Google_Sheets/✨ Vision-Based AI Agent Scraper - with Google Sheets, ScrapingBee, and Gemini.txt b/Google_Drive_and_Google_Sheets/✨ Vision-Based AI Agent Scraper - with Google Sheets, ScrapingBee, and Gemini.json similarity index 100% rename from Google_Drive_and_Google_Sheets/✨ Vision-Based AI Agent Scraper - with Google Sheets, ScrapingBee, and Gemini.txt rename to Google_Drive_and_Google_Sheets/✨ Vision-Based AI Agent Scraper - with Google Sheets, ScrapingBee, and Gemini.json diff --git a/HR_and_Recruitment/BambooHR AI-Powered Company Policies and Benefits Chatbot.txt b/HR_and_Recruitment/BambooHR AI-Powered Company Policies and Benefits Chatbot.json similarity index 100% rename from HR_and_Recruitment/BambooHR AI-Powered Company Policies and Benefits Chatbot.txt rename to HR_and_Recruitment/BambooHR AI-Powered Company Policies and Benefits Chatbot.json diff --git a/HR_and_Recruitment/CV Screening with OpenAI.txt b/HR_and_Recruitment/CV Screening with OpenAI.json similarity index 100% rename from HR_and_Recruitment/CV Screening with OpenAI.txt rename to HR_and_Recruitment/CV Screening with OpenAI.json diff --git a/HR_and_Recruitment/HR & IT Helpdesk Chatbot with Audio Transcription.txt b/HR_and_Recruitment/HR & IT Helpdesk Chatbot with Audio Transcription.json similarity index 100% rename from HR_and_Recruitment/HR & IT Helpdesk Chatbot with Audio Transcription.txt rename to HR_and_Recruitment/HR & IT Helpdesk Chatbot with Audio Transcription.json diff --git a/HR_and_Recruitment/HR Job Posting and Evaluation with AI.txt b/HR_and_Recruitment/HR Job Posting and Evaluation with AI.json similarity index 100% rename from HR_and_Recruitment/HR Job Posting and Evaluation with AI.txt rename to HR_and_Recruitment/HR Job Posting and Evaluation with AI.json diff --git a/Instagram_Twitter_Social_Media/AI agent for Instagram DM_inbox. Manychat + Open AI integration.txt b/Instagram_Twitter_Social_Media/AI agent for Instagram DM_inbox. Manychat + Open AI integration.json similarity index 100% rename from Instagram_Twitter_Social_Media/AI agent for Instagram DM_inbox. Manychat + Open AI integration.txt rename to Instagram_Twitter_Social_Media/AI agent for Instagram DM_inbox. Manychat + Open AI integration.json diff --git a/Instagram_Twitter_Social_Media/Create dynamic Twitter profile banner.txt b/Instagram_Twitter_Social_Media/Create dynamic Twitter profile banner.json similarity index 100% rename from Instagram_Twitter_Social_Media/Create dynamic Twitter profile banner.txt rename to Instagram_Twitter_Social_Media/Create dynamic Twitter profile banner.json diff --git a/Instagram_Twitter_Social_Media/Generate Instagram Content from Top Trends with AI Image Generation.txt b/Instagram_Twitter_Social_Media/Generate Instagram Content from Top Trends with AI Image Generation.json similarity index 100% rename from Instagram_Twitter_Social_Media/Generate Instagram Content from Top Trends with AI Image Generation.txt rename to Instagram_Twitter_Social_Media/Generate Instagram Content from Top Trends with AI Image Generation.json diff --git a/Instagram_Twitter_Social_Media/OpenAI-powered tweet generator.txt b/Instagram_Twitter_Social_Media/OpenAI-powered tweet generator.json similarity index 100% rename from Instagram_Twitter_Social_Media/OpenAI-powered tweet generator.txt rename to Instagram_Twitter_Social_Media/OpenAI-powered tweet generator.json diff --git a/Instagram_Twitter_Social_Media/Post New YouTube Videos to X.txt b/Instagram_Twitter_Social_Media/Post New YouTube Videos to X.json similarity index 100% rename from Instagram_Twitter_Social_Media/Post New YouTube Videos to X.txt rename to Instagram_Twitter_Social_Media/Post New YouTube Videos to X.json diff --git a/Instagram_Twitter_Social_Media/Reddit AI digest.txt b/Instagram_Twitter_Social_Media/Reddit AI digest.json similarity index 100% rename from Instagram_Twitter_Social_Media/Reddit AI digest.txt rename to Instagram_Twitter_Social_Media/Reddit AI digest.json diff --git a/Instagram_Twitter_Social_Media/Social Media Analysis and Automated Email Generation.txt b/Instagram_Twitter_Social_Media/Social Media Analysis and Automated Email Generation.json similarity index 100% rename from Instagram_Twitter_Social_Media/Social Media Analysis and Automated Email Generation.txt rename to Instagram_Twitter_Social_Media/Social Media Analysis and Automated Email Generation.json diff --git a/Instagram_Twitter_Social_Media/Speed Up Social Media Banners With BannerBear.com.txt b/Instagram_Twitter_Social_Media/Speed Up Social Media Banners With BannerBear.com.json similarity index 100% rename from Instagram_Twitter_Social_Media/Speed Up Social Media Banners With BannerBear.com.txt rename to Instagram_Twitter_Social_Media/Speed Up Social Media Banners With BannerBear.com.json diff --git a/Instagram_Twitter_Social_Media/Twitter Virtual AI Influencer.txt b/Instagram_Twitter_Social_Media/Twitter Virtual AI Influencer.json similarity index 100% rename from Instagram_Twitter_Social_Media/Twitter Virtual AI Influencer.txt rename to Instagram_Twitter_Social_Media/Twitter Virtual AI Influencer.json diff --git a/Instagram_Twitter_Social_Media/Update Twitter banner using HTTP request.txt b/Instagram_Twitter_Social_Media/Update Twitter banner using HTTP request.json similarity index 100% rename from Instagram_Twitter_Social_Media/Update Twitter banner using HTTP request.txt rename to Instagram_Twitter_Social_Media/Update Twitter banner using HTTP request.json diff --git a/Notion/Add positive feedback messages to a table in Notion.txt b/Notion/Add positive feedback messages to a table in Notion.json similarity index 100% rename from Notion/Add positive feedback messages to a table in Notion.txt rename to Notion/Add positive feedback messages to a table in Notion.json diff --git a/Notion/Analyse papers from Hugging Face with AI and store them in Notion.txt b/Notion/Analyse papers from Hugging Face with AI and store them in Notion.json similarity index 100% rename from Notion/Analyse papers from Hugging Face with AI and store them in Notion.txt rename to Notion/Analyse papers from Hugging Face with AI and store them in Notion.json diff --git a/Notion/Automate Competitor Research with Exa.ai, Notion and AI Agents.txt b/Notion/Automate Competitor Research with Exa.ai, Notion and AI Agents.json similarity index 100% rename from Notion/Automate Competitor Research with Exa.ai, Notion and AI Agents.txt rename to Notion/Automate Competitor Research with Exa.ai, Notion and AI Agents.json diff --git a/Notion/Automate LinkedIn Outreach with Notion and OpenAI.txt b/Notion/Automate LinkedIn Outreach with Notion and OpenAI.json similarity index 100% rename from Notion/Automate LinkedIn Outreach with Notion and OpenAI.txt rename to Notion/Automate LinkedIn Outreach with Notion and OpenAI.json diff --git a/Notion/Notion AI Assistant Generator.txt b/Notion/Notion AI Assistant Generator.json similarity index 100% rename from Notion/Notion AI Assistant Generator.txt rename to Notion/Notion AI Assistant Generator.json diff --git a/Notion/Notion knowledge base AI assistant.txt b/Notion/Notion knowledge base AI assistant.json similarity index 100% rename from Notion/Notion knowledge base AI assistant.txt rename to Notion/Notion knowledge base AI assistant.json diff --git a/Notion/Notion to Pinecone Vector Store Integration.txt b/Notion/Notion to Pinecone Vector Store Integration.json similarity index 100% rename from Notion/Notion to Pinecone Vector Store Integration.txt rename to Notion/Notion to Pinecone Vector Store Integration.json diff --git a/Notion/Store Notion_s Pages as Vector Documents into Supabase with OpenAI.txt b/Notion/Store Notion_s Pages as Vector Documents into Supabase with OpenAI.json similarity index 100% rename from Notion/Store Notion_s Pages as Vector Documents into Supabase with OpenAI.txt rename to Notion/Store Notion_s Pages as Vector Documents into Supabase with OpenAI.json diff --git a/Notion/Turn Emails into AI-Enhanced Tasks in Notion (Multi-User Support) with Gmail, Airtable and Softr.txt b/Notion/Turn Emails into AI-Enhanced Tasks in Notion (Multi-User Support) with Gmail, Airtable and Softr.json similarity index 100% rename from Notion/Turn Emails into AI-Enhanced Tasks in Notion (Multi-User Support) with Gmail, Airtable and Softr.txt rename to Notion/Turn Emails into AI-Enhanced Tasks in Notion (Multi-User Support) with Gmail, Airtable and Softr.json diff --git a/Notion/Upsert huge documents in a vector store with Supabase and Notion.txt b/Notion/Upsert huge documents in a vector store with Supabase and Notion.json similarity index 100% rename from Notion/Upsert huge documents in a vector store with Supabase and Notion.txt rename to Notion/Upsert huge documents in a vector store with Supabase and Notion.json diff --git a/OpenAI_and_LLMs/AI Agent To Chat With Files In Supabase Storage.txt b/OpenAI_and_LLMs/AI Agent To Chat With Files In Supabase Storage.json similarity index 100% rename from OpenAI_and_LLMs/AI Agent To Chat With Files In Supabase Storage.txt rename to OpenAI_and_LLMs/AI Agent To Chat With Files In Supabase Storage.json diff --git a/OpenAI_and_LLMs/AI Agent _ Google calendar assistant using OpenAI.txt b/OpenAI_and_LLMs/AI Agent _ Google calendar assistant using OpenAI.json similarity index 100% rename from OpenAI_and_LLMs/AI Agent _ Google calendar assistant using OpenAI.txt rename to OpenAI_and_LLMs/AI Agent _ Google calendar assistant using OpenAI.json diff --git a/OpenAI_and_LLMs/AI Agent for realtime insights on meetings.txt b/OpenAI_and_LLMs/AI Agent for realtime insights on meetings.json similarity index 100% rename from OpenAI_and_LLMs/AI Agent for realtime insights on meetings.txt rename to OpenAI_and_LLMs/AI Agent for realtime insights on meetings.json diff --git a/OpenAI_and_LLMs/AI Agent to chat with Supabase_PostgreSQL DB.txt b/OpenAI_and_LLMs/AI Agent to chat with Supabase_PostgreSQL DB.json similarity index 100% rename from OpenAI_and_LLMs/AI Agent to chat with Supabase_PostgreSQL DB.txt rename to OpenAI_and_LLMs/AI Agent to chat with Supabase_PostgreSQL DB.json diff --git a/OpenAI_and_LLMs/AI Agent to chat with you Search Console Data, using OpenAI and Postgres.txt b/OpenAI_and_LLMs/AI Agent to chat with you Search Console Data, using OpenAI and Postgres.json similarity index 100% rename from OpenAI_and_LLMs/AI Agent to chat with you Search Console Data, using OpenAI and Postgres.txt rename to OpenAI_and_LLMs/AI Agent to chat with you Search Console Data, using OpenAI and Postgres.json diff --git a/OpenAI_and_LLMs/AI Agent with Ollama for current weather and wiki.txt b/OpenAI_and_LLMs/AI Agent with Ollama for current weather and wiki.json similarity index 100% rename from OpenAI_and_LLMs/AI Agent with Ollama for current weather and wiki.txt rename to OpenAI_and_LLMs/AI Agent with Ollama for current weather and wiki.json diff --git a/OpenAI_and_LLMs/AI Automated HR Workflow for CV Analysis and Candidate Evaluation.txt b/OpenAI_and_LLMs/AI Automated HR Workflow for CV Analysis and Candidate Evaluation.json similarity index 100% rename from OpenAI_and_LLMs/AI Automated HR Workflow for CV Analysis and Candidate Evaluation.txt rename to OpenAI_and_LLMs/AI Automated HR Workflow for CV Analysis and Candidate Evaluation.json diff --git a/OpenAI_and_LLMs/AI Crew to Automate Fundamental Stock Analysis - Q&A Workflow.txt b/OpenAI_and_LLMs/AI Crew to Automate Fundamental Stock Analysis - Q&A Workflow.json similarity index 100% rename from OpenAI_and_LLMs/AI Crew to Automate Fundamental Stock Analysis - Q&A Workflow.txt rename to OpenAI_and_LLMs/AI Crew to Automate Fundamental Stock Analysis - Q&A Workflow.json diff --git a/OpenAI_and_LLMs/AI Customer feedback sentiment analysis.txt b/OpenAI_and_LLMs/AI Customer feedback sentiment analysis.json similarity index 100% rename from OpenAI_and_LLMs/AI Customer feedback sentiment analysis.txt rename to OpenAI_and_LLMs/AI Customer feedback sentiment analysis.json diff --git a/OpenAI_and_LLMs/AI Data Extraction with Dynamic Prompts and Airtable.txt b/OpenAI_and_LLMs/AI Data Extraction with Dynamic Prompts and Airtable.json similarity index 100% rename from OpenAI_and_LLMs/AI Data Extraction with Dynamic Prompts and Airtable.txt rename to OpenAI_and_LLMs/AI Data Extraction with Dynamic Prompts and Airtable.json diff --git a/OpenAI_and_LLMs/AI Data Extraction with Dynamic Prompts and Baserow.txt b/OpenAI_and_LLMs/AI Data Extraction with Dynamic Prompts and Baserow.json similarity index 100% rename from OpenAI_and_LLMs/AI Data Extraction with Dynamic Prompts and Baserow.txt rename to OpenAI_and_LLMs/AI Data Extraction with Dynamic Prompts and Baserow.json diff --git a/OpenAI_and_LLMs/AI Fitness Coach Strava Data Analysis and Personalized Training Insights.txt b/OpenAI_and_LLMs/AI Fitness Coach Strava Data Analysis and Personalized Training Insights.json similarity index 100% rename from OpenAI_and_LLMs/AI Fitness Coach Strava Data Analysis and Personalized Training Insights.txt rename to OpenAI_and_LLMs/AI Fitness Coach Strava Data Analysis and Personalized Training Insights.json diff --git a/OpenAI_and_LLMs/AI Powered Web Scraping with Jina, Google Sheets and OpenAI _ the EASY way.txt b/OpenAI_and_LLMs/AI Powered Web Scraping with Jina, Google Sheets and OpenAI _ the EASY way.json similarity index 100% rename from OpenAI_and_LLMs/AI Powered Web Scraping with Jina, Google Sheets and OpenAI _ the EASY way.txt rename to OpenAI_and_LLMs/AI Powered Web Scraping with Jina, Google Sheets and OpenAI _ the EASY way.json diff --git a/OpenAI_and_LLMs/AI Social Media Caption Creator creates social media post captions in Airtable.txt b/OpenAI_and_LLMs/AI Social Media Caption Creator creates social media post captions in Airtable.json similarity index 100% rename from OpenAI_and_LLMs/AI Social Media Caption Creator creates social media post captions in Airtable.txt rename to OpenAI_and_LLMs/AI Social Media Caption Creator creates social media post captions in Airtable.json diff --git a/OpenAI_and_LLMs/AI Voice Chat using Webhook, Memory Manager, OpenAI, Google Gemini & ElevenLabs.txt b/OpenAI_and_LLMs/AI Voice Chat using Webhook, Memory Manager, OpenAI, Google Gemini & ElevenLabs.json similarity index 100% rename from OpenAI_and_LLMs/AI Voice Chat using Webhook, Memory Manager, OpenAI, Google Gemini & ElevenLabs.txt rename to OpenAI_and_LLMs/AI Voice Chat using Webhook, Memory Manager, OpenAI, Google Gemini & ElevenLabs.json diff --git a/OpenAI_and_LLMs/AI Voice Chatbot with ElevenLabs & OpenAI for Customer Service and Restaurants.txt b/OpenAI_and_LLMs/AI Voice Chatbot with ElevenLabs & OpenAI for Customer Service and Restaurants.json similarity index 100% rename from OpenAI_and_LLMs/AI Voice Chatbot with ElevenLabs & OpenAI for Customer Service and Restaurants.txt rename to OpenAI_and_LLMs/AI Voice Chatbot with ElevenLabs & OpenAI for Customer Service and Restaurants.json diff --git a/OpenAI_and_LLMs/AI Youtube Trend Finder Based On Niche.txt b/OpenAI_and_LLMs/AI Youtube Trend Finder Based On Niche.json similarity index 100% rename from OpenAI_and_LLMs/AI Youtube Trend Finder Based On Niche.txt rename to OpenAI_and_LLMs/AI Youtube Trend Finder Based On Niche.json diff --git a/OpenAI_and_LLMs/AI agent chat.txt b/OpenAI_and_LLMs/AI agent chat.json similarity index 100% rename from OpenAI_and_LLMs/AI agent chat.txt rename to OpenAI_and_LLMs/AI agent chat.json diff --git a/OpenAI_and_LLMs/AI agent that can scrape webpages.txt b/OpenAI_and_LLMs/AI agent that can scrape webpages.json similarity index 100% rename from OpenAI_and_LLMs/AI agent that can scrape webpages.txt rename to OpenAI_and_LLMs/AI agent that can scrape webpages.json diff --git a/OpenAI_and_LLMs/AI chat with any data source (using the n8n workflow tool).txt b/OpenAI_and_LLMs/AI chat with any data source (using the n8n workflow tool).json similarity index 100% rename from OpenAI_and_LLMs/AI chat with any data source (using the n8n workflow tool).txt rename to OpenAI_and_LLMs/AI chat with any data source (using the n8n workflow tool).json diff --git a/OpenAI_and_LLMs/AI chatbot that can search the web.txt b/OpenAI_and_LLMs/AI chatbot that can search the web.json similarity index 100% rename from OpenAI_and_LLMs/AI chatbot that can search the web.txt rename to OpenAI_and_LLMs/AI chatbot that can search the web.json diff --git a/OpenAI_and_LLMs/AI web researcher for sales.txt b/OpenAI_and_LLMs/AI web researcher for sales.json similarity index 100% rename from OpenAI_and_LLMs/AI web researcher for sales.txt rename to OpenAI_and_LLMs/AI web researcher for sales.json diff --git a/OpenAI_and_LLMs/AI-Driven Lead Management and Inquiry Automation with ERPNext & n8n.txt b/OpenAI_and_LLMs/AI-Driven Lead Management and Inquiry Automation with ERPNext & n8n.json similarity index 100% rename from OpenAI_and_LLMs/AI-Driven Lead Management and Inquiry Automation with ERPNext & n8n.txt rename to OpenAI_and_LLMs/AI-Driven Lead Management and Inquiry Automation with ERPNext & n8n.json diff --git a/OpenAI_and_LLMs/AI-Generated Summary Block for WordPress Posts.txt b/OpenAI_and_LLMs/AI-Generated Summary Block for WordPress Posts.json similarity index 100% rename from OpenAI_and_LLMs/AI-Generated Summary Block for WordPress Posts.txt rename to OpenAI_and_LLMs/AI-Generated Summary Block for WordPress Posts.json diff --git a/OpenAI_and_LLMs/AI-Powered Candidate Shortlisting Automation for ERPNext.txt b/OpenAI_and_LLMs/AI-Powered Candidate Shortlisting Automation for ERPNext.json similarity index 100% rename from OpenAI_and_LLMs/AI-Powered Candidate Shortlisting Automation for ERPNext.txt rename to OpenAI_and_LLMs/AI-Powered Candidate Shortlisting Automation for ERPNext.json diff --git a/OpenAI_and_LLMs/AI-Powered Email Automation for Business_ Summarize & Respond with RAG.txt b/OpenAI_and_LLMs/AI-Powered Email Automation for Business_ Summarize & Respond with RAG.json similarity index 100% rename from OpenAI_and_LLMs/AI-Powered Email Automation for Business_ Summarize & Respond with RAG.txt rename to OpenAI_and_LLMs/AI-Powered Email Automation for Business_ Summarize & Respond with RAG.json diff --git a/OpenAI_and_LLMs/AI-Powered RAG Workflow For Stock Earnings Report Analysis.txt b/OpenAI_and_LLMs/AI-Powered RAG Workflow For Stock Earnings Report Analysis.json similarity index 100% rename from OpenAI_and_LLMs/AI-Powered RAG Workflow For Stock Earnings Report Analysis.txt rename to OpenAI_and_LLMs/AI-Powered RAG Workflow For Stock Earnings Report Analysis.json diff --git a/OpenAI_and_LLMs/AI-Powered Social Media Amplifier.txt b/OpenAI_and_LLMs/AI-Powered Social Media Amplifier.json similarity index 100% rename from OpenAI_and_LLMs/AI-Powered Social Media Amplifier.txt rename to OpenAI_and_LLMs/AI-Powered Social Media Amplifier.json diff --git a/OpenAI_and_LLMs/AI-powered WooCommerce Support-Agent.txt b/OpenAI_and_LLMs/AI-powered WooCommerce Support-Agent.json similarity index 100% rename from OpenAI_and_LLMs/AI-powered WooCommerce Support-Agent.txt rename to OpenAI_and_LLMs/AI-powered WooCommerce Support-Agent.json diff --git a/OpenAI_and_LLMs/AI_ Ask questions about any data source (using the n8n workflow retriever).txt b/OpenAI_and_LLMs/AI_ Ask questions about any data source (using the n8n workflow retriever).json similarity index 100% rename from OpenAI_and_LLMs/AI_ Ask questions about any data source (using the n8n workflow retriever).txt rename to OpenAI_and_LLMs/AI_ Ask questions about any data source (using the n8n workflow retriever).json diff --git a/OpenAI_and_LLMs/AI_ Summarize podcast episode and enhance using Wikipedia.txt b/OpenAI_and_LLMs/AI_ Summarize podcast episode and enhance using Wikipedia.json similarity index 100% rename from OpenAI_and_LLMs/AI_ Summarize podcast episode and enhance using Wikipedia.txt rename to OpenAI_and_LLMs/AI_ Summarize podcast episode and enhance using Wikipedia.json diff --git a/OpenAI_and_LLMs/Actioning Your Meeting Next Steps using Transcripts and AI.txt b/OpenAI_and_LLMs/Actioning Your Meeting Next Steps using Transcripts and AI.json similarity index 100% rename from OpenAI_and_LLMs/Actioning Your Meeting Next Steps using Transcripts and AI.txt rename to OpenAI_and_LLMs/Actioning Your Meeting Next Steps using Transcripts and AI.json diff --git a/OpenAI_and_LLMs/Advanced AI Demo (Presented at AI Developers #14 meetup).txt b/OpenAI_and_LLMs/Advanced AI Demo (Presented at AI Developers #14 meetup).json similarity index 100% rename from OpenAI_and_LLMs/Advanced AI Demo (Presented at AI Developers #14 meetup).txt rename to OpenAI_and_LLMs/Advanced AI Demo (Presented at AI Developers #14 meetup).json diff --git a/OpenAI_and_LLMs/Ask a human for help when the AI doesn_t know the answer.txt b/OpenAI_and_LLMs/Ask a human for help when the AI doesn_t know the answer.json similarity index 100% rename from OpenAI_and_LLMs/Ask a human for help when the AI doesn_t know the answer.txt rename to OpenAI_and_LLMs/Ask a human for help when the AI doesn_t know the answer.json diff --git a/OpenAI_and_LLMs/Automate Customer Support Issue Resolution using AI Text Classifier.txt b/OpenAI_and_LLMs/Automate Customer Support Issue Resolution using AI Text Classifier.json similarity index 100% rename from OpenAI_and_LLMs/Automate Customer Support Issue Resolution using AI Text Classifier.txt rename to OpenAI_and_LLMs/Automate Customer Support Issue Resolution using AI Text Classifier.json diff --git a/OpenAI_and_LLMs/Automate Image Validation Tasks using AI Vision.txt b/OpenAI_and_LLMs/Automate Image Validation Tasks using AI Vision.json similarity index 100% rename from OpenAI_and_LLMs/Automate Image Validation Tasks using AI Vision.txt rename to OpenAI_and_LLMs/Automate Image Validation Tasks using AI Vision.json diff --git a/OpenAI_and_LLMs/Automate Your RFP Process with OpenAI Assistants.txt b/OpenAI_and_LLMs/Automate Your RFP Process with OpenAI Assistants.json similarity index 100% rename from OpenAI_and_LLMs/Automate Your RFP Process with OpenAI Assistants.txt rename to OpenAI_and_LLMs/Automate Your RFP Process with OpenAI Assistants.json diff --git a/OpenAI_and_LLMs/Chat Assistant (OpenAI assistant) with Postgres Memory And API Calling Capabalities.txt b/OpenAI_and_LLMs/Chat Assistant (OpenAI assistant) with Postgres Memory And API Calling Capabalities.json similarity index 100% rename from OpenAI_and_LLMs/Chat Assistant (OpenAI assistant) with Postgres Memory And API Calling Capabalities.txt rename to OpenAI_and_LLMs/Chat Assistant (OpenAI assistant) with Postgres Memory And API Calling Capabalities.json diff --git a/OpenAI_and_LLMs/Chat with OpenAI Assistant (by adding a memory).txt b/OpenAI_and_LLMs/Chat with OpenAI Assistant (by adding a memory).json similarity index 100% rename from OpenAI_and_LLMs/Chat with OpenAI Assistant (by adding a memory).txt rename to OpenAI_and_LLMs/Chat with OpenAI Assistant (by adding a memory).json diff --git a/OpenAI_and_LLMs/Chat with local LLMs using n8n and Ollama.txt b/OpenAI_and_LLMs/Chat with local LLMs using n8n and Ollama.json similarity index 100% rename from OpenAI_and_LLMs/Chat with local LLMs using n8n and Ollama.txt rename to OpenAI_and_LLMs/Chat with local LLMs using n8n and Ollama.json diff --git a/OpenAI_and_LLMs/Configure your own Image Creation API Using OpenAI DALLE-3.txt b/OpenAI_and_LLMs/Configure your own Image Creation API Using OpenAI DALLE-3.json similarity index 100% rename from OpenAI_and_LLMs/Configure your own Image Creation API Using OpenAI DALLE-3.txt rename to OpenAI_and_LLMs/Configure your own Image Creation API Using OpenAI DALLE-3.json diff --git a/OpenAI_and_LLMs/Convert text to speech with OpenAI.txt b/OpenAI_and_LLMs/Convert text to speech with OpenAI.json similarity index 100% rename from OpenAI_and_LLMs/Convert text to speech with OpenAI.txt rename to OpenAI_and_LLMs/Convert text to speech with OpenAI.json diff --git a/OpenAI_and_LLMs/Create a Branded AI-Powered Website Chatbot.txt b/OpenAI_and_LLMs/Create a Branded AI-Powered Website Chatbot.json similarity index 100% rename from OpenAI_and_LLMs/Create a Branded AI-Powered Website Chatbot.txt rename to OpenAI_and_LLMs/Create a Branded AI-Powered Website Chatbot.json diff --git a/OpenAI_and_LLMs/Custom LangChain agent written in JavaScript.txt b/OpenAI_and_LLMs/Custom LangChain agent written in JavaScript.json similarity index 100% rename from OpenAI_and_LLMs/Custom LangChain agent written in JavaScript.txt rename to OpenAI_and_LLMs/Custom LangChain agent written in JavaScript.json diff --git a/OpenAI_and_LLMs/Daily Podcast Summary.txt b/OpenAI_and_LLMs/Daily Podcast Summary.json similarity index 100% rename from OpenAI_and_LLMs/Daily Podcast Summary.txt rename to OpenAI_and_LLMs/Daily Podcast Summary.json diff --git a/OpenAI_and_LLMs/Daily meetings summarization with Gemini AI.txt b/OpenAI_and_LLMs/Daily meetings summarization with Gemini AI.json similarity index 100% rename from OpenAI_and_LLMs/Daily meetings summarization with Gemini AI.txt rename to OpenAI_and_LLMs/Daily meetings summarization with Gemini AI.json diff --git a/OpenAI_and_LLMs/Detect hallucinations using specialised Ollama model bespoke-minicheck.txt b/OpenAI_and_LLMs/Detect hallucinations using specialised Ollama model bespoke-minicheck.json similarity index 100% rename from OpenAI_and_LLMs/Detect hallucinations using specialised Ollama model bespoke-minicheck.txt rename to OpenAI_and_LLMs/Detect hallucinations using specialised Ollama model bespoke-minicheck.json diff --git a/OpenAI_and_LLMs/Dynamically generate a webpage from user request using OpenAI Structured Output (1).txt b/OpenAI_and_LLMs/Dynamically generate a webpage from user request using OpenAI Structured Output.json similarity index 100% rename from OpenAI_and_LLMs/Dynamically generate a webpage from user request using OpenAI Structured Output (1).txt rename to OpenAI_and_LLMs/Dynamically generate a webpage from user request using OpenAI Structured Output.json diff --git a/OpenAI_and_LLMs/Dynamically generate a webpage from user request using OpenAI Structured Output.txt b/OpenAI_and_LLMs/Dynamically generate a webpage from user request using OpenAI Structured Output.txt deleted file mode 100644 index 0d516e7..0000000 --- a/OpenAI_and_LLMs/Dynamically generate a webpage from user request using OpenAI Structured Output.txt +++ /dev/null @@ -1,224 +0,0 @@ -{ -"id": "eXiaTDyKfXpMeyLh", -"meta": { -"instanceId": "f4f5d195bb2162a0972f737368404b18be694648d365d6c6771d7b4909d28167", -"templateCredsSetupCompleted": true -}, -"name": "Dynamically generate HTML page from user request using OpenAI Structured Output", -"tags": [], -"nodes": [ -{ -"id": "b1d9659f-4cd0-4f87-844d-32b2af1dcf13", -"name": "Respond to Webhook", -"type": "n8n-nodes-base.respondToWebhook", -"position": [ -2160, -380 -], -"parameters": { -"options": { -"responseHeaders": { -"entries": [ -{ -"name": "Content-Type", -"value": "text/html; charset=UTF-8" -} -] -} -}, -"respondWith": "text", -"responseBody": "={{ $json.html }}" -}, -"typeVersion": 1.1 -}, -{ -"id": "5ca8ad3e-7702-4f07-af24-d38e94fdc4ec", -"name": "Open AI - Using Structured Output", -"type": "n8n-nodes-base.httpRequest", -"position": [ -1240, -380 -], -"parameters": { -"url": "https://api.openai.com/v1/chat/completions", -"method": "POST", -"options": {}, -"jsonBody": "={\n \"model\": \"gpt-4o-2024-08-06\",\n \"messages\": [\n {\n \"role\": \"system\",\n \"content\": \"You are a user interface designer and copy writter. Your job is to help users visualize their website ideas. You design elegant and simple webs, with professional text. You use Tailwind framework\"\n },\n {\n \"role\": \"user\",\n \"content\": \"{{ $json.query.query }}\"\n }\n ],\n \"response_format\":\n{\n \"type\": \"json_schema\",\n \"json_schema\": {\n \"name\": \"ui\",\n \"description\": \"Dynamically generated UI\",\n \"strict\": true,\n \"schema\": {\n \"type\": \"object\",\n \"properties\": {\n \"type\": {\n \"type\": \"string\",\n \"description\": \"The type of the UI component\",\n \"enum\": [\n \"div\",\n \"span\",\n \"a\",\n \"p\",\n \"h1\",\n \"h2\",\n \"h3\",\n \"h4\",\n \"h5\",\n \"h6\",\n \"ul\",\n \"ol\",\n \"li\",\n \"img\",\n \"button\",\n \"input\",\n \"textarea\",\n \"select\",\n \"option\",\n \"label\",\n \"form\",\n \"table\",\n \"thead\",\n \"tbody\",\n \"tr\",\n \"th\",\n \"td\",\n \"nav\",\n \"header\",\n \"footer\",\n \"section\",\n \"article\",\n \"aside\",\n \"main\",\n \"figure\",\n \"figcaption\",\n \"blockquote\",\n \"q\",\n \"hr\",\n \"code\",\n \"pre\",\n \"iframe\",\n \"video\",\n \"audio\",\n \"canvas\",\n \"svg\",\n \"path\",\n \"circle\",\n \"rect\",\n \"line\",\n \"polyline\",\n \"polygon\",\n \"g\",\n \"use\",\n \"symbol\"\n]\n },\n \"label\": {\n \"type\": \"string\",\n \"description\": \"The label of the UI component, used for buttons or form fields\"\n },\n \"children\": {\n \"type\": \"array\",\n \"description\": \"Nested UI components\",\n \"items\": {\n \"$ref\": \"#\"\n }\n },\n \"attributes\": {\n \"type\": \"array\",\n \"description\": \"Arbitrary attributes for the UI component, suitable for any element using Tailwind framework\",\n \"items\": {\n \"type\": \"object\",\n \"properties\": {\n \"name\": {\n \"type\": \"string\",\n \"description\": \"The name of the attribute, for example onClick or className\"\n },\n \"value\": {\n \"type\": \"string\",\n \"description\": \"The value of the attribute using the Tailwind framework classes\"\n }\n },\n \"additionalProperties\": false,\n \"required\": [\"name\", \"value\"]\n }\n }\n },\n \"required\": [\"type\", \"label\", \"children\", \"attributes\"],\n \"additionalProperties\": false\n }\n }\n}\n}", -"sendBody": true, -"sendHeaders": true, -"specifyBody": "json", -"authentication": "predefinedCredentialType", -"headerParameters": { -"parameters": [ -{ -"name": "Content-Type", -"value": "application/json" -} -] -}, -"nodeCredentialType": "openAiApi" -}, -"credentials": { -"openAiApi": { -"id": "WqzqjezKh8VtxdqA", -"name": "OpenAi account - Baptiste" -} -}, -"typeVersion": 4.2 -}, -{ -"id": "24e5ca73-a3b3-4096-8c66-d84838d89b0c", -"name": "OpenAI - JSON to HTML", -"type": "@n8n/n8n-nodes-langchain.openAi", -"position": [ -1420, -380 -], -"parameters": { -"modelId": { -"__rl": true, -"mode": "list", -"value": "gpt-4o-mini", -"cachedResultName": "GPT-4O-MINI" -}, -"options": { -"temperature": 0.2 -}, -"messages": { -"values": [ -{ -"role": "system", -"content": "You convert a JSON to HTML. \nThe JSON output has the following fields:\n- html: the page HTML\n- title: the page title" -}, -{ -"content": "={{ $json.choices[0].message.content }}" -} -] -}, -"jsonOutput": true -}, -"credentials": { -"openAiApi": { -"id": "WqzqjezKh8VtxdqA", -"name": "OpenAi account - Baptiste" -} -}, -"typeVersion": 1.3 -}, -{ -"id": "c50bdc84-ba59-4f30-acf7-496cee25068d", -"name": "Format the HTML result", -"type": "n8n-nodes-base.html", -"position": [ -1940, -380 -], -"parameters": { -"html": "\n\n\n\n \n \n {{ $json.message.content.title }}\n\n\n{{ $json.message.content.html }}\n\n" -}, -"typeVersion": 1.2 -}, -{ -"id": "193093f4-b1ce-4964-ab10-c3208e343c69", -"name": "Sticky Note", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1134, -62 -], -"parameters": { -"color": 7, -"width": 638, -"height": 503, -"content": "## Generate HTML from user query\n\n**HTTP Request node**\n- Send the user query to OpenAI, with a defined JSON response format - *using HTTP Request node as it has not yet been implemented in the OpenAI nodes*\n- The response format is inspired by the [Structured Output defined in OpenAI Introduction post](https://openai.com/index/introducing-structured-outputs-in-the-api)\n- The output is a JSON containing HTML components and attributed\n\n\n**OpenAI node**\n- Format the response from the previous node from JSON format to HTML format" -}, -"typeVersion": 1 -}, -{ -"id": "0371156a-211f-4d92-82b1-f14fe60d4b6b", -"name": "Sticky Note1", -"type": "n8n-nodes-base.stickyNote", -"position": [ -0, -60 -], -"parameters": { -"color": 7, -"width": 768, -"height": 503, -"content": "## Workflow: Dynamically generate an HTML page from a user request using OpenAI Structured Output\n\n**Overview**\n- This workflow is a experiment to build HTML pages from a user input using the new Structured Output from OpenAI.\n- The Structured Output could be used in a variety of cases. Essentially, it guarantees the output from the GPT will follow a defined structure (JSON object).\n- It uses Tailwind CSS to make it slightly nicer, but any\n\n**How it works**\n- Once active, go to the production URL and add what you'd like to build as the parameter \"query\"\n- Example: https://production_url.com?query=a%20signup%20form\n- OpenAI nodes will first output the UI as a JSON then convert it to HTML\n- Finally, the response is integrated in a HTML container and rendered to the user\n\n**Further thoughts**\n- Results are not yet amazing, it is hard to see the direct value of such an experiment\n- But it showcase the potential of the Structured Output. Being able to guarantee the output format is key to build robust AI applications." -}, -"typeVersion": 1 -}, -{ -"id": "06380781-5189-4d99-9ecd-d8913ce40fd5", -"name": "Webhook", -"type": "n8n-nodes-base.webhook", -"position": [ -820, -380 -], -"webhookId": "d962c916-6369-431a-9d80-af6e6a50fdf5", -"parameters": { -"path": "d962c916-6369-431a-9d80-af6e6a50fdf5", -"options": { -"allowedOrigins": "*" -}, -"responseMode": "responseNode" -}, -"typeVersion": 2 -} -], -"active": true, -"pinData": {}, -"settings": { -"executionOrder": "v1" -}, -"versionId": "d2307a2a-5427-4769-94a6-10eab703a788", -"connections": { -"Webhook": { -"main": [ -[ -{ -"node": "Open AI - Using Structured Output", -"type": "main", -"index": 0 -} -] -] -}, -"OpenAI - JSON to HTML": { -"main": [ -[ -{ -"node": "Format the HTML result", -"type": "main", -"index": 0 -} -] -] -}, -"Format the HTML result": { -"main": [ -[ -{ -"node": "Respond to Webhook", -"type": "main", -"index": 0 -} -] -] -}, -"Open AI - Using Structured Output": { -"main": [ -[ -{ -"node": "OpenAI - JSON to HTML", -"type": "main", -"index": 0 -} -] -] -} -} -} \ No newline at end of file diff --git a/OpenAI_and_LLMs/Easy Image Captioning with Gemini 1.5 Pro.txt b/OpenAI_and_LLMs/Easy Image Captioning with Gemini 1.5 Pro.json similarity index 100% rename from OpenAI_and_LLMs/Easy Image Captioning with Gemini 1.5 Pro.txt rename to OpenAI_and_LLMs/Easy Image Captioning with Gemini 1.5 Pro.json diff --git a/OpenAI_and_LLMs/Enrich FAQ sections on your website pages at scale with AI.txt b/OpenAI_and_LLMs/Enrich FAQ sections on your website pages at scale with AI.json similarity index 100% rename from OpenAI_and_LLMs/Enrich FAQ sections on your website pages at scale with AI.txt rename to OpenAI_and_LLMs/Enrich FAQ sections on your website pages at scale with AI.json diff --git a/OpenAI_and_LLMs/Extract personal data with self-hosted LLM Mistral NeMo.txt b/OpenAI_and_LLMs/Extract personal data with self-hosted LLM Mistral NeMo.json similarity index 100% rename from OpenAI_and_LLMs/Extract personal data with self-hosted LLM Mistral NeMo.txt rename to OpenAI_and_LLMs/Extract personal data with self-hosted LLM Mistral NeMo.json diff --git a/OpenAI_and_LLMs/Fetch Dynamic Prompts from GitHub and Auto-Populate n8n Expressions in Prompt.txt b/OpenAI_and_LLMs/Fetch Dynamic Prompts from GitHub and Auto-Populate n8n Expressions in Prompt.json similarity index 100% rename from OpenAI_and_LLMs/Fetch Dynamic Prompts from GitHub and Auto-Populate n8n Expressions in Prompt.txt rename to OpenAI_and_LLMs/Fetch Dynamic Prompts from GitHub and Auto-Populate n8n Expressions in Prompt.json diff --git a/OpenAI_and_LLMs/Flux AI Image Generator.txt b/OpenAI_and_LLMs/Flux AI Image Generator.json similarity index 100% rename from OpenAI_and_LLMs/Flux AI Image Generator.txt rename to OpenAI_and_LLMs/Flux AI Image Generator.json diff --git a/OpenAI_and_LLMs/Force AI to use a specific output format.txt b/OpenAI_and_LLMs/Force AI to use a specific output format.json similarity index 100% rename from OpenAI_and_LLMs/Force AI to use a specific output format.txt rename to OpenAI_and_LLMs/Force AI to use a specific output format.json diff --git a/OpenAI_and_LLMs/Generate 9_16 Images from Content and Brand Guidelines.txt b/OpenAI_and_LLMs/Generate 9_16 Images from Content and Brand Guidelines.json similarity index 100% rename from OpenAI_and_LLMs/Generate 9_16 Images from Content and Brand Guidelines.txt rename to OpenAI_and_LLMs/Generate 9_16 Images from Content and Brand Guidelines.json diff --git a/OpenAI_and_LLMs/Generate Text-to-Speech Using Elevenlabs via API.txt b/OpenAI_and_LLMs/Generate Text-to-Speech Using Elevenlabs via API.json similarity index 100% rename from OpenAI_and_LLMs/Generate Text-to-Speech Using Elevenlabs via API.txt rename to OpenAI_and_LLMs/Generate Text-to-Speech Using Elevenlabs via API.json diff --git a/OpenAI_and_LLMs/Generate audio from text using OpenAI and Webhook _ Text to Speech Workflow.txt b/OpenAI_and_LLMs/Generate audio from text using OpenAI and Webhook _ Text to Speech Workflow.json similarity index 100% rename from OpenAI_and_LLMs/Generate audio from text using OpenAI and Webhook _ Text to Speech Workflow.txt rename to OpenAI_and_LLMs/Generate audio from text using OpenAI and Webhook _ Text to Speech Workflow.json diff --git a/OpenAI_and_LLMs/Generating Image Embeddings via Textual Summarisation.txt b/OpenAI_and_LLMs/Generating Image Embeddings via Textual Summarisation.json similarity index 100% rename from OpenAI_and_LLMs/Generating Image Embeddings via Textual Summarisation.txt rename to OpenAI_and_LLMs/Generating Image Embeddings via Textual Summarisation.json diff --git a/OpenAI_and_LLMs/Narrating over a Video using Multimodal AI.txt b/OpenAI_and_LLMs/Narrating over a Video using Multimodal AI.json similarity index 100% rename from OpenAI_and_LLMs/Narrating over a Video using Multimodal AI.txt rename to OpenAI_and_LLMs/Narrating over a Video using Multimodal AI.json diff --git a/OpenAI_and_LLMs/OpenAI Assistant workflow_ upload file, create an Assistant, chat with it!.txt b/OpenAI_and_LLMs/OpenAI Assistant workflow_ upload file, create an Assistant, chat with it!.json similarity index 100% rename from OpenAI_and_LLMs/OpenAI Assistant workflow_ upload file, create an Assistant, chat with it!.txt rename to OpenAI_and_LLMs/OpenAI Assistant workflow_ upload file, create an Assistant, chat with it!.json diff --git a/OpenAI_and_LLMs/OpenAI assistant with custom tools.txt b/OpenAI_and_LLMs/OpenAI assistant with custom tools.json similarity index 100% rename from OpenAI_and_LLMs/OpenAI assistant with custom tools.txt rename to OpenAI_and_LLMs/OpenAI assistant with custom tools.json diff --git a/OpenAI_and_LLMs/OpenAI examples_ ChatGPT, DALLE-2, Whisper-1 – 5-in-1.txt b/OpenAI_and_LLMs/OpenAI examples_ ChatGPT, DALLE-2, Whisper-1 – 5-in-1.json similarity index 100% rename from OpenAI_and_LLMs/OpenAI examples_ ChatGPT, DALLE-2, Whisper-1 – 5-in-1.txt rename to OpenAI_and_LLMs/OpenAI examples_ ChatGPT, DALLE-2, Whisper-1 – 5-in-1.json diff --git a/OpenAI_and_LLMs/Organise Your Local File Directories With AI.txt b/OpenAI_and_LLMs/Organise Your Local File Directories With AI.json similarity index 100% rename from OpenAI_and_LLMs/Organise Your Local File Directories With AI.txt rename to OpenAI_and_LLMs/Organise Your Local File Directories With AI.json diff --git a/OpenAI_and_LLMs/Personal Shopper Chatbot for WooCommerce with RAG using Google Drive and openAI.txt b/OpenAI_and_LLMs/Personal Shopper Chatbot for WooCommerce with RAG using Google Drive and openAI.json similarity index 100% rename from OpenAI_and_LLMs/Personal Shopper Chatbot for WooCommerce with RAG using Google Drive and openAI.txt rename to OpenAI_and_LLMs/Personal Shopper Chatbot for WooCommerce with RAG using Google Drive and openAI.json diff --git a/OpenAI_and_LLMs/Prompt-based Object Detection with Gemini 2.0.txt b/OpenAI_and_LLMs/Prompt-based Object Detection with Gemini 2.0.json similarity index 100% rename from OpenAI_and_LLMs/Prompt-based Object Detection with Gemini 2.0.txt rename to OpenAI_and_LLMs/Prompt-based Object Detection with Gemini 2.0.json diff --git a/OpenAI_and_LLMs/Proxmox AI Agent with n8n and Generative AI Integration.txt b/OpenAI_and_LLMs/Proxmox AI Agent with n8n and Generative AI Integration.json similarity index 100% rename from OpenAI_and_LLMs/Proxmox AI Agent with n8n and Generative AI Integration.txt rename to OpenAI_and_LLMs/Proxmox AI Agent with n8n and Generative AI Integration.json diff --git a/OpenAI_and_LLMs/Query n8n Credentials with AI SQL Agent.txt b/OpenAI_and_LLMs/Query n8n Credentials with AI SQL Agent.json similarity index 100% rename from OpenAI_and_LLMs/Query n8n Credentials with AI SQL Agent.txt rename to OpenAI_and_LLMs/Query n8n Credentials with AI SQL Agent.json diff --git a/OpenAI_and_LLMs/Suggest meeting slots using AI.txt b/OpenAI_and_LLMs/Suggest meeting slots using AI.json similarity index 100% rename from OpenAI_and_LLMs/Suggest meeting slots using AI.txt rename to OpenAI_and_LLMs/Suggest meeting slots using AI.json diff --git a/OpenAI_and_LLMs/Summarize YouTube Videos from Transcript.txt b/OpenAI_and_LLMs/Summarize YouTube Videos from Transcript.json similarity index 100% rename from OpenAI_and_LLMs/Summarize YouTube Videos from Transcript.txt rename to OpenAI_and_LLMs/Summarize YouTube Videos from Transcript.json diff --git a/OpenAI_and_LLMs/Transform Image to Lego Style Using Line and Dall-E.txt b/OpenAI_and_LLMs/Transform Image to Lego Style Using Line and Dall-E.json similarity index 100% rename from OpenAI_and_LLMs/Transform Image to Lego Style Using Line and Dall-E.txt rename to OpenAI_and_LLMs/Transform Image to Lego Style Using Line and Dall-E.json diff --git a/OpenAI_and_LLMs/Translate audio using AI.txt b/OpenAI_and_LLMs/Translate audio using AI.json similarity index 100% rename from OpenAI_and_LLMs/Translate audio using AI.txt rename to OpenAI_and_LLMs/Translate audio using AI.json diff --git a/OpenAI_and_LLMs/Use OpenRouter in n8n versions _1.78.txt b/OpenAI_and_LLMs/Use OpenRouter in n8n versions _1.78.json similarity index 100% rename from OpenAI_and_LLMs/Use OpenRouter in n8n versions _1.78.txt rename to OpenAI_and_LLMs/Use OpenRouter in n8n versions _1.78.json diff --git a/OpenAI_and_LLMs/lemlist __ GPT-3_ Supercharge your sales workflows.txt b/OpenAI_and_LLMs/lemlist __ GPT-3_ Supercharge your sales workflows.json similarity index 100% rename from OpenAI_and_LLMs/lemlist __ GPT-3_ Supercharge your sales workflows.txt rename to OpenAI_and_LLMs/lemlist __ GPT-3_ Supercharge your sales workflows.json diff --git a/OpenAI_and_LLMs/⚡AI-Powered YouTube Video Summarization & Analysis.txt b/OpenAI_and_LLMs/⚡AI-Powered YouTube Video Summarization & Analysis.json similarity index 100% rename from OpenAI_and_LLMs/⚡AI-Powered YouTube Video Summarization & Analysis.txt rename to OpenAI_and_LLMs/⚡AI-Powered YouTube Video Summarization & Analysis.json diff --git a/OpenAI_and_LLMs/🎨 Interactive Image Editor with FLUX.1 Fill Tool for Inpainting.txt b/OpenAI_and_LLMs/🎨 Interactive Image Editor with FLUX.1 Fill Tool for Inpainting.json similarity index 100% rename from OpenAI_and_LLMs/🎨 Interactive Image Editor with FLUX.1 Fill Tool for Inpainting.txt rename to OpenAI_and_LLMs/🎨 Interactive Image Editor with FLUX.1 Fill Tool for Inpainting.json diff --git a/OpenAI_and_LLMs/🐋DeepSeek V3 Chat & R1 Reasoning Quick Start.txt b/OpenAI_and_LLMs/🐋DeepSeek V3 Chat & R1 Reasoning Quick Start.json similarity index 100% rename from OpenAI_and_LLMs/🐋DeepSeek V3 Chat & R1 Reasoning Quick Start.txt rename to OpenAI_and_LLMs/🐋DeepSeek V3 Chat & R1 Reasoning Quick Start.json diff --git a/OpenAI_and_LLMs/📚 Auto-generate documentation for n8n workflows with GPT and Docsify.txt b/OpenAI_and_LLMs/📚 Auto-generate documentation for n8n workflows with GPT and Docsify.json similarity index 100% rename from OpenAI_and_LLMs/📚 Auto-generate documentation for n8n workflows with GPT and Docsify.txt rename to OpenAI_and_LLMs/📚 Auto-generate documentation for n8n workflows with GPT and Docsify.json diff --git a/OpenAI_and_LLMs/🔐🦙🤖 Private & Local Ollama Self-Hosted AI Assistant.txt b/OpenAI_and_LLMs/🔐🦙🤖 Private & Local Ollama Self-Hosted AI Assistant.json similarity index 100% rename from OpenAI_and_LLMs/🔐🦙🤖 Private & Local Ollama Self-Hosted AI Assistant.txt rename to OpenAI_and_LLMs/🔐🦙🤖 Private & Local Ollama Self-Hosted AI Assistant.json diff --git a/OpenAI_and_LLMs/🔥📈🤖 AI Agent for n8n Creators Leaderboard - Find Popular Workflows.txt b/OpenAI_and_LLMs/🔥📈🤖 AI Agent for n8n Creators Leaderboard - Find Popular Workflows.json similarity index 100% rename from OpenAI_and_LLMs/🔥📈🤖 AI Agent for n8n Creators Leaderboard - Find Popular Workflows.txt rename to OpenAI_and_LLMs/🔥📈🤖 AI Agent for n8n Creators Leaderboard - Find Popular Workflows.json diff --git a/OpenAI_and_LLMs/🚀 Local Multi-LLM Testing & Performance Tracker.txt b/OpenAI_and_LLMs/🚀 Local Multi-LLM Testing & Performance Tracker.json similarity index 100% rename from OpenAI_and_LLMs/🚀 Local Multi-LLM Testing & Performance Tracker.txt rename to OpenAI_and_LLMs/🚀 Local Multi-LLM Testing & Performance Tracker.json diff --git a/OpenAI_and_LLMs/🤖🧑_💻 AI Agent for Top n8n Creators Leaderboard Reporting.txt b/OpenAI_and_LLMs/🤖🧑_💻 AI Agent for Top n8n Creators Leaderboard Reporting.json similarity index 100% rename from OpenAI_and_LLMs/🤖🧑_💻 AI Agent for Top n8n Creators Leaderboard Reporting.txt rename to OpenAI_and_LLMs/🤖🧑_💻 AI Agent for Top n8n Creators Leaderboard Reporting.json diff --git a/Other/ALL_unique_nodes.txt b/Other/ALL_unique_nodes.json similarity index 100% rename from Other/ALL_unique_nodes.txt rename to Other/ALL_unique_nodes.json diff --git a/Other_Integrations_and_Use_Cases/API Schema Extractor.txt b/Other_Integrations_and_Use_Cases/API Schema Extractor.json similarity index 100% rename from Other_Integrations_and_Use_Cases/API Schema Extractor.txt rename to Other_Integrations_and_Use_Cases/API Schema Extractor.json diff --git a/Other_Integrations_and_Use_Cases/Analyze feedback and send a message on Mattermost.txt b/Other_Integrations_and_Use_Cases/Analyze feedback and send a message on Mattermost.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Analyze feedback and send a message on Mattermost.txt rename to Other_Integrations_and_Use_Cases/Analyze feedback and send a message on Mattermost.json diff --git a/Other_Integrations_and_Use_Cases/Analyze feedback using AWS Comprehend and send it to a Mattermost channel.txt b/Other_Integrations_and_Use_Cases/Analyze feedback using AWS Comprehend and send it to a Mattermost channel.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Analyze feedback using AWS Comprehend and send it to a Mattermost channel.txt rename to Other_Integrations_and_Use_Cases/Analyze feedback using AWS Comprehend and send it to a Mattermost channel.json diff --git a/Other_Integrations_and_Use_Cases/Automate Pinterest Analysis & AI-Powered Content Suggestions With Pinterest API.txt b/Other_Integrations_and_Use_Cases/Automate Pinterest Analysis & AI-Powered Content Suggestions With Pinterest API.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Automate Pinterest Analysis & AI-Powered Content Suggestions With Pinterest API.txt rename to Other_Integrations_and_Use_Cases/Automate Pinterest Analysis & AI-Powered Content Suggestions With Pinterest API.json diff --git a/Other_Integrations_and_Use_Cases/Automate SIEM Alert Enrichment with MITRE ATT&CK, Qdrant & Zendesk in n8n.txt b/Other_Integrations_and_Use_Cases/Automate SIEM Alert Enrichment with MITRE ATT&CK, Qdrant & Zendesk in n8n.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Automate SIEM Alert Enrichment with MITRE ATT&CK, Qdrant & Zendesk in n8n.txt rename to Other_Integrations_and_Use_Cases/Automate SIEM Alert Enrichment with MITRE ATT&CK, Qdrant & Zendesk in n8n.json diff --git a/Other_Integrations_and_Use_Cases/Automate Screenshots with URLbox & Analyze them with AI.txt b/Other_Integrations_and_Use_Cases/Automate Screenshots with URLbox & Analyze them with AI.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Automate Screenshots with URLbox & Analyze them with AI.txt rename to Other_Integrations_and_Use_Cases/Automate Screenshots with URLbox & Analyze them with AI.json diff --git a/Other_Integrations_and_Use_Cases/Automate testimonials in Strapi with n8n.txt b/Other_Integrations_and_Use_Cases/Automate testimonials in Strapi with n8n.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Automate testimonials in Strapi with n8n.txt rename to Other_Integrations_and_Use_Cases/Automate testimonials in Strapi with n8n.json diff --git a/Other_Integrations_and_Use_Cases/Bitrix24 Chatbot Application Workflow example with Webhook Integration.txt b/Other_Integrations_and_Use_Cases/Bitrix24 Chatbot Application Workflow example with Webhook Integration.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Bitrix24 Chatbot Application Workflow example with Webhook Integration.txt rename to Other_Integrations_and_Use_Cases/Bitrix24 Chatbot Application Workflow example with Webhook Integration.json diff --git a/Other_Integrations_and_Use_Cases/ChatGPT Automatic Code Review in Gitlab MR.txt b/Other_Integrations_and_Use_Cases/ChatGPT Automatic Code Review in Gitlab MR.json similarity index 100% rename from Other_Integrations_and_Use_Cases/ChatGPT Automatic Code Review in Gitlab MR.txt rename to Other_Integrations_and_Use_Cases/ChatGPT Automatic Code Review in Gitlab MR.json diff --git a/Other_Integrations_and_Use_Cases/Classify new bugs in Linear with OpenAI_s GPT-4 and move them to the right team.txt b/Other_Integrations_and_Use_Cases/Classify new bugs in Linear with OpenAI_s GPT-4 and move them to the right team.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Classify new bugs in Linear with OpenAI_s GPT-4 and move them to the right team.txt rename to Other_Integrations_and_Use_Cases/Classify new bugs in Linear with OpenAI_s GPT-4 and move them to the right team.json diff --git a/Other_Integrations_and_Use_Cases/Create, update, and get a profile in Humantic AI.txt b/Other_Integrations_and_Use_Cases/Create, update, and get a profile in Humantic AI.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Create, update, and get a profile in Humantic AI.txt rename to Other_Integrations_and_Use_Cases/Create, update, and get a profile in Humantic AI.json diff --git a/Other_Integrations_and_Use_Cases/Enhance Customer Chat by Buffering Messages with Twilio and Redis.txt b/Other_Integrations_and_Use_Cases/Enhance Customer Chat by Buffering Messages with Twilio and Redis.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Enhance Customer Chat by Buffering Messages with Twilio and Redis.txt rename to Other_Integrations_and_Use_Cases/Enhance Customer Chat by Buffering Messages with Twilio and Redis.json diff --git a/Other_Integrations_and_Use_Cases/Hacker News Throwback Machine - See What Was Hot on This Day, Every Year!.txt b/Other_Integrations_and_Use_Cases/Hacker News Throwback Machine - See What Was Hot on This Day, Every Year!.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Hacker News Throwback Machine - See What Was Hot on This Day, Every Year!.txt rename to Other_Integrations_and_Use_Cases/Hacker News Throwback Machine - See What Was Hot on This Day, Every Year!.json diff --git a/Other_Integrations_and_Use_Cases/Handling Appointment Leads and Follow-up With Twilio, Cal.com and AI.txt b/Other_Integrations_and_Use_Cases/Handling Appointment Leads and Follow-up With Twilio, Cal.com and AI.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Handling Appointment Leads and Follow-up With Twilio, Cal.com and AI.txt rename to Other_Integrations_and_Use_Cases/Handling Appointment Leads and Follow-up With Twilio, Cal.com and AI.json diff --git a/Other_Integrations_and_Use_Cases/Integrating AI with Open-Meteo API for Enhanced Weather Forecasting.txt b/Other_Integrations_and_Use_Cases/Integrating AI with Open-Meteo API for Enhanced Weather Forecasting.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Integrating AI with Open-Meteo API for Enhanced Weather Forecasting.txt rename to Other_Integrations_and_Use_Cases/Integrating AI with Open-Meteo API for Enhanced Weather Forecasting.json diff --git a/Other_Integrations_and_Use_Cases/Introduction to the HTTP Tool.txt b/Other_Integrations_and_Use_Cases/Introduction to the HTTP Tool.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Introduction to the HTTP Tool.txt rename to Other_Integrations_and_Use_Cases/Introduction to the HTTP Tool.json diff --git a/Other_Integrations_and_Use_Cases/KB Tool - Confluence Knowledge Base.txt b/Other_Integrations_and_Use_Cases/KB Tool - Confluence Knowledge Base.json similarity index 100% rename from Other_Integrations_and_Use_Cases/KB Tool - Confluence Knowledge Base.txt rename to Other_Integrations_and_Use_Cases/KB Tool - Confluence Knowledge Base.json diff --git a/Other_Integrations_and_Use_Cases/LINE Assistant with Google Calendar and Gmail Integration.txt b/Other_Integrations_and_Use_Cases/LINE Assistant with Google Calendar and Gmail Integration.json similarity index 100% rename from Other_Integrations_and_Use_Cases/LINE Assistant with Google Calendar and Gmail Integration.txt rename to Other_Integrations_and_Use_Cases/LINE Assistant with Google Calendar and Gmail Integration.json diff --git a/Other_Integrations_and_Use_Cases/Monthly Spotify Track Archiving and Playlist Classification.txt b/Other_Integrations_and_Use_Cases/Monthly Spotify Track Archiving and Playlist Classification.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Monthly Spotify Track Archiving and Playlist Classification.txt rename to Other_Integrations_and_Use_Cases/Monthly Spotify Track Archiving and Playlist Classification.json diff --git a/Other_Integrations_and_Use_Cases/Obsidian Notes Read Aloud using AI_ Available as a Podcast Feed.txt b/Other_Integrations_and_Use_Cases/Obsidian Notes Read Aloud using AI_ Available as a Podcast Feed.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Obsidian Notes Read Aloud using AI_ Available as a Podcast Feed.txt rename to Other_Integrations_and_Use_Cases/Obsidian Notes Read Aloud using AI_ Available as a Podcast Feed.json diff --git a/Other_Integrations_and_Use_Cases/Optimize & Update Printify Title and Description Workflow.txt b/Other_Integrations_and_Use_Cases/Optimize & Update Printify Title and Description Workflow.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Optimize & Update Printify Title and Description Workflow.txt rename to Other_Integrations_and_Use_Cases/Optimize & Update Printify Title and Description Workflow.json diff --git a/Other_Integrations_and_Use_Cases/Qualify replies from Pipedrive persons with AI.txt b/Other_Integrations_and_Use_Cases/Qualify replies from Pipedrive persons with AI.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Qualify replies from Pipedrive persons with AI.txt rename to Other_Integrations_and_Use_Cases/Qualify replies from Pipedrive persons with AI.json diff --git a/Other_Integrations_and_Use_Cases/Siri AI Agent_ Apple Shortcuts powered voice template.txt b/Other_Integrations_and_Use_Cases/Siri AI Agent_ Apple Shortcuts powered voice template.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Siri AI Agent_ Apple Shortcuts powered voice template.txt rename to Other_Integrations_and_Use_Cases/Siri AI Agent_ Apple Shortcuts powered voice template.json diff --git a/Other_Integrations_and_Use_Cases/Text automations using Apple Shortcuts (1).txt b/Other_Integrations_and_Use_Cases/Text automations using Apple Shortcuts.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Text automations using Apple Shortcuts (1).txt rename to Other_Integrations_and_Use_Cases/Text automations using Apple Shortcuts.json diff --git a/Other_Integrations_and_Use_Cases/Text automations using Apple Shortcuts.txt b/Other_Integrations_and_Use_Cases/Text automations using Apple Shortcuts.txt deleted file mode 100644 index 45c3dda..0000000 --- a/Other_Integrations_and_Use_Cases/Text automations using Apple Shortcuts.txt +++ /dev/null @@ -1,504 +0,0 @@ -{ -"meta": { -"instanceId": "f4f5d195bb2162a0972f737368404b18be694648d365d6c6771d7b4909d28167" -}, -"nodes": [ -{ -"id": "b165115d-5505-4e03-bf41-c21320cb8b09", -"name": "Sticky Note1", -"type": "n8n-nodes-base.stickyNote", -"position": [ -80, -40 -], -"parameters": { -"color": 7, -"width": 681.8337349708484, -"height": 843.1482165886073, -"content": "## Workflow: Text automations using Apple Shortcuts\n\n**Overview**\n- This workflow answers user requests sent via Apple Shortcuts\n- Several Shortcuts call the same webhook, with a query and a type of query\n- Types of query are:\n - translate to english\n - translate to spanish\n - correct grammar (without changing the actual content)\n - make content shorter\n - make content longer\n\n\n**How it works**\n- Select a text you are writing\n- Launch the shortcut\n- The text is sent to the webhook\n- Depending on the type of request, a different prompt is used\n- Each request is sent to an OpenAI node\n- The workflow responds to the request with the response from GPT\n- Shortcut replace the selected text with the new one\n\n**How to use it**\n- Activate the workflow\n- Download [this Shortcut template](https://drive.usercontent.google.com/u/0/uc?id=16zs5iJX7KeX_4e0SoV49_KfbU7-EF0NE&export=download)\n- Install the shortcut\n- In step 2 of the shortcut, change the url of the Webhook\n- In Shortcut details, \"add Keyboard Shortcut\" with the key you want to use to launch the shortcut\n- Go to settings, advanced, check \"Allow running scripts\"\n- You are ready to use the shortcut. Select a text and hit the keyboard shortcut you just defined\n\n\n**Notes**\n- If you use rich formatting, you'll have to test multiple ways to replace characters in the output. For example, you might use `{{ $json.message.content.output.replaceAll('\\n', \"
\") }}` in the \"Respond to Shortcut\" node depending on the app you use most.\n- This is a basic example that you can extend and modify at your will\n- You can duplicate and modify the example shortcut based on your need, as well as making new automations in this workflow." -}, -"typeVersion": 1 -}, -{ -"id": "c45400b8-d3b8-47f7-81c6-d791bce4c266", -"name": "Switch", -"type": "n8n-nodes-base.switch", -"position": [ -1020, -380 -], -"parameters": { -"rules": { -"values": [ -{ -"outputKey": "spanish", -"conditions": { -"options": { -"version": 2, -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"operator": { -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.body.type }}", -"rightValue": "spanish" -} -] -}, -"renameOutput": true -}, -{ -"outputKey": "english", -"conditions": { -"options": { -"version": 2, -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "bedb302f-646c-4dcd-8246-1fcfecfe3f2e", -"operator": { -"name": "filter.operator.equals", -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.body.type }}", -"rightValue": "english" -} -] -}, -"renameOutput": true -}, -{ -"outputKey": "grammar", -"conditions": { -"options": { -"version": 2, -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "94e6cf7d-576d-4ad9-85b0-c6b945eb41b7", -"operator": { -"name": "filter.operator.equals", -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.body.type }}", -"rightValue": "grammar" -} -] -}, -"renameOutput": true -}, -{ -"outputKey": "shorter", -"conditions": { -"options": { -"version": 2, -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "1ed0d1e1-2df0-4f8d-b102-4004a25919ed", -"operator": { -"name": "filter.operator.equals", -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.body.type }}", -"rightValue": "shorter" -} -] -}, -"renameOutput": true -}, -{ -"outputKey": "longer", -"conditions": { -"options": { -"version": 2, -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "4756df03-7e7c-4e28-9b37-14684326b083", -"operator": { -"name": "filter.operator.equals", -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.body.type }}", -"rightValue": "longer" -} -] -}, -"renameOutput": true -} -] -}, -"options": {} -}, -"typeVersion": 3.2 -}, -{ -"id": "48e0e58e-6293-4e11-a488-ca9943b53484", -"name": "Respond to Shortcut", -"type": "n8n-nodes-base.respondToWebhook", -"position": [ -1840, -400 -], -"parameters": { -"options": {}, -"respondWith": "text", -"responseBody": "={{ $json.message.content.output.replaceAll('\\n', '
') }}" -}, -"typeVersion": 1.1 -}, -{ -"id": "2655b782-9538-416c-ae65-35f8c77889c7", -"name": "Webhook from Shortcut", -"type": "n8n-nodes-base.webhook", -"position": [ -840, -400 -], -"webhookId": "e4ddadd2-a127-4690-98ca-e9ee75c1bdd6", -"parameters": { -"path": "shortcut-global-as", -"options": {}, -"httpMethod": "POST", -"responseMode": "responseNode" -}, -"typeVersion": 2 -}, -{ -"id": "880ed4a2-0756-4943-a51f-368678e22273", -"name": "OpenAI - Make Shorter", -"type": "@n8n/n8n-nodes-langchain.openAi", -"position": [ -1300, -540 -], -"parameters": { -"modelId": { -"__rl": true, -"mode": "list", -"value": "gpt-4o-mini", -"cachedResultName": "GPT-4O-MINI" -}, -"options": {}, -"messages": { -"values": [ -{ -"role": "system", -"content": "Summarize this content a little bit (5% shorter)\nOutput a JSON with a single field: output" -}, -{ -"content": "={{ $json.body.content }}" -} -] -}, -"jsonOutput": true -}, -"credentials": { -"openAiApi": { -"id": "WqzqjezKh8VtxdqA", -"name": "OpenAi account - Baptiste" -} -}, -"typeVersion": 1.4 -}, -{ -"id": "c6c6d988-7aab-4677-af1f-880d05691ec3", -"name": "OpenAI - Make Longer", -"type": "@n8n/n8n-nodes-langchain.openAi", -"position": [ -1300, -680 -], -"parameters": { -"modelId": { -"__rl": true, -"mode": "list", -"value": "gpt-4o-mini", -"cachedResultName": "GPT-4O-MINI" -}, -"options": {}, -"messages": { -"values": [ -{ -"role": "system", -"content": "Make this content a little longer (5% longer)\nOutput a JSON with a single field: output" -}, -{ -"content": "={{ $json.body.content }}" -} -] -}, -"jsonOutput": true -}, -"credentials": { -"openAiApi": { -"id": "WqzqjezKh8VtxdqA", -"name": "OpenAi account - Baptiste" -} -}, -"typeVersion": 1.4 -}, -{ -"id": "8e6de4b7-22c3-45c9-a8d7-d498cf829b6f", -"name": "OpenAI - Correct Grammar", -"type": "@n8n/n8n-nodes-langchain.openAi", -"position": [ -1300, -400 -], -"parameters": { -"modelId": { -"__rl": true, -"mode": "list", -"value": "gpt-4o-mini", -"cachedResultName": "GPT-4O-MINI" -}, -"options": {}, -"messages": { -"values": [ -{ -"role": "system", -"content": "Correct grammar only, don't change the actual contents.\nOutput a JSON with a single field: output" -}, -{ -"content": "={{ $json.body.content }}" -} -] -}, -"jsonOutput": true -}, -"credentials": { -"openAiApi": { -"id": "WqzqjezKh8VtxdqA", -"name": "OpenAi account - Baptiste" -} -}, -"typeVersion": 1.4 -}, -{ -"id": "bc006b36-5a96-4c3a-9a28-2778a6c49f10", -"name": "OpenAI - To Spanish", -"type": "@n8n/n8n-nodes-langchain.openAi", -"position": [ -1300, -120 -], -"parameters": { -"modelId": { -"__rl": true, -"mode": "list", -"value": "gpt-4o-mini", -"cachedResultName": "GPT-4O-MINI" -}, -"options": {}, -"messages": { -"values": [ -{ -"role": "system", -"content": "Translate this message to Spanish.\nOutput a JSON with a single field: output" -}, -{ -"content": "={{ $json.body.content }}" -} -] -}, -"jsonOutput": true -}, -"credentials": { -"openAiApi": { -"id": "WqzqjezKh8VtxdqA", -"name": "OpenAi account - Baptiste" -} -}, -"typeVersion": 1.4 -}, -{ -"id": "330d2e40-1e52-4517-94e0-ce96226697fa", -"name": "OpenAI - To English", -"type": "@n8n/n8n-nodes-langchain.openAi", -"position": [ -1300, -260 -], -"parameters": { -"modelId": { -"__rl": true, -"mode": "list", -"value": "gpt-4o-mini", -"cachedResultName": "GPT-4O-MINI" -}, -"options": {}, -"messages": { -"values": [ -{ -"role": "system", -"content": "Translate this message to English.\nOutput a JSON with a single field: output" -}, -{ -"content": "={{ $json.body.content }}" -} -] -}, -"jsonOutput": true -}, -"credentials": { -"openAiApi": { -"id": "WqzqjezKh8VtxdqA", -"name": "OpenAi account - Baptiste" -} -}, -"typeVersion": 1.4 -}, -{ -"id": "925e4b55-ac26-4c16-941f-66d17b6794ab", -"name": "Sticky Note", -"type": "n8n-nodes-base.stickyNote", -"position": [ -80, -900 -], -"parameters": { -"color": 7, -"width": 469.15174499329123, -"height": 341.88919758842485, -"content": "### Check these explanations [< 3 min]\n\n[![Check the explanations](https://cdn.loom.com/sessions/thumbnails/c5b657568af64bb1b50fa8e8a91c45d1-1db3990a618986c9-full-play.gif)](https://www.loom.com/share/c5b657568af64bb1b50fa8e8a91c45d1?sid=a406be73-55eb-4754-9f51-9ddf49b22d69)" -}, -"typeVersion": 1 -} -], -"pinData": {}, -"connections": { -"Switch": { -"main": [ -[ -{ -"node": "OpenAI - To Spanish", -"type": "main", -"index": 0 -} -], -[ -{ -"node": "OpenAI - To English", -"type": "main", -"index": 0 -} -], -[ -{ -"node": "OpenAI - Correct Grammar", -"type": "main", -"index": 0 -} -], -[ -{ -"node": "OpenAI - Make Shorter", -"type": "main", -"index": 0 -} -], -[ -{ -"node": "OpenAI - Make Longer", -"type": "main", -"index": 0 -} -] -] -}, -"OpenAI - To English": { -"main": [ -[ -{ -"node": "Respond to Shortcut", -"type": "main", -"index": 0 -} -] -] -}, -"OpenAI - To Spanish": { -"main": [ -[ -{ -"node": "Respond to Shortcut", -"type": "main", -"index": 0 -} -] -] -}, -"OpenAI - Make Longer": { -"main": [ -[ -{ -"node": "Respond to Shortcut", -"type": "main", -"index": 0 -} -] -] -}, -"OpenAI - Make Shorter": { -"main": [ -[ -{ -"node": "Respond to Shortcut", -"type": "main", -"index": 0 -} -] -] -}, -"Webhook from Shortcut": { -"main": [ -[ -{ -"node": "Switch", -"type": "main", -"index": 0 -} -] -] -}, -"OpenAI - Correct Grammar": { -"main": [ -[ -{ -"node": "Respond to Shortcut", -"type": "main", -"index": 0 -} -] -] -} -} -} \ No newline at end of file diff --git a/Other_Integrations_and_Use_Cases/UTM Link Creator & QR Code Generator with Scheduled Google Analytics Reports.txt b/Other_Integrations_and_Use_Cases/UTM Link Creator & QR Code Generator with Scheduled Google Analytics Reports.json similarity index 100% rename from Other_Integrations_and_Use_Cases/UTM Link Creator & QR Code Generator with Scheduled Google Analytics Reports.txt rename to Other_Integrations_and_Use_Cases/UTM Link Creator & QR Code Generator with Scheduled Google Analytics Reports.json diff --git a/Other_Integrations_and_Use_Cases/Use AI to organize your Todoist Inbox.txt b/Other_Integrations_and_Use_Cases/Use AI to organize your Todoist Inbox.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Use AI to organize your Todoist Inbox.txt rename to Other_Integrations_and_Use_Cases/Use AI to organize your Todoist Inbox.json diff --git a/Other_Integrations_and_Use_Cases/Using External Workflows as Tools in n8n.txt b/Other_Integrations_and_Use_Cases/Using External Workflows as Tools in n8n.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Using External Workflows as Tools in n8n.txt rename to Other_Integrations_and_Use_Cases/Using External Workflows as Tools in n8n.json diff --git a/Other_Integrations_and_Use_Cases/Visualize your SQL Agent queries with OpenAI and Quickchart.io.txt b/Other_Integrations_and_Use_Cases/Visualize your SQL Agent queries with OpenAI and Quickchart.io.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Visualize your SQL Agent queries with OpenAI and Quickchart.io.txt rename to Other_Integrations_and_Use_Cases/Visualize your SQL Agent queries with OpenAI and Quickchart.io.json diff --git a/Other_Integrations_and_Use_Cases/Zoom AI Meeting Assistant creates mail summary, ClickUp tasks and follow-up call.txt b/Other_Integrations_and_Use_Cases/Zoom AI Meeting Assistant creates mail summary, ClickUp tasks and follow-up call.json similarity index 100% rename from Other_Integrations_and_Use_Cases/Zoom AI Meeting Assistant creates mail summary, ClickUp tasks and follow-up call.txt rename to Other_Integrations_and_Use_Cases/Zoom AI Meeting Assistant creates mail summary, ClickUp tasks and follow-up call.json diff --git a/PDF_and_Document_Processing/Ask questions about a PDF using AI.txt b/PDF_and_Document_Processing/Ask questions about a PDF using AI.json similarity index 100% rename from PDF_and_Document_Processing/Ask questions about a PDF using AI.txt rename to PDF_and_Document_Processing/Ask questions about a PDF using AI.json diff --git a/PDF_and_Document_Processing/Breakdown Documents into Study Notes using Templating MistralAI and Qdrant.txt b/PDF_and_Document_Processing/Breakdown Documents into Study Notes using Templating MistralAI and Qdrant.json similarity index 100% rename from PDF_and_Document_Processing/Breakdown Documents into Study Notes using Templating MistralAI and Qdrant.txt rename to PDF_and_Document_Processing/Breakdown Documents into Study Notes using Templating MistralAI and Qdrant.json diff --git a/PDF_and_Document_Processing/CV Resume PDF Parsing with Multimodal Vision AI.txt b/PDF_and_Document_Processing/CV Resume PDF Parsing with Multimodal Vision AI.json similarity index 100% rename from PDF_and_Document_Processing/CV Resume PDF Parsing with Multimodal Vision AI.txt rename to PDF_and_Document_Processing/CV Resume PDF Parsing with Multimodal Vision AI.json diff --git a/PDF_and_Document_Processing/Chat with PDF docs using AI (quoting sources).txt b/PDF_and_Document_Processing/Chat with PDF docs using AI (quoting sources).json similarity index 100% rename from PDF_and_Document_Processing/Chat with PDF docs using AI (quoting sources).txt rename to PDF_and_Document_Processing/Chat with PDF docs using AI (quoting sources).json diff --git a/PDF_and_Document_Processing/Convert URL HTML to Markdown Format and Get Page Links.txt b/PDF_and_Document_Processing/Convert URL HTML to Markdown Format and Get Page Links.json similarity index 100% rename from PDF_and_Document_Processing/Convert URL HTML to Markdown Format and Get Page Links.txt rename to PDF_and_Document_Processing/Convert URL HTML to Markdown Format and Get Page Links.json diff --git a/PDF_and_Document_Processing/ETL pipeline for text processing.txt b/PDF_and_Document_Processing/ETL pipeline for text processing.json similarity index 100% rename from PDF_and_Document_Processing/ETL pipeline for text processing.txt rename to PDF_and_Document_Processing/ETL pipeline for text processing.json diff --git a/PDF_and_Document_Processing/Extract and process information directly from PDF using Claude and Gemini.txt b/PDF_and_Document_Processing/Extract and process information directly from PDF using Claude and Gemini.json similarity index 100% rename from PDF_and_Document_Processing/Extract and process information directly from PDF using Claude and Gemini.txt rename to PDF_and_Document_Processing/Extract and process information directly from PDF using Claude and Gemini.json diff --git a/PDF_and_Document_Processing/Extract data from resume and create PDF with Gotenberg.txt b/PDF_and_Document_Processing/Extract data from resume and create PDF with Gotenberg.json similarity index 100% rename from PDF_and_Document_Processing/Extract data from resume and create PDF with Gotenberg.txt rename to PDF_and_Document_Processing/Extract data from resume and create PDF with Gotenberg.json diff --git a/PDF_and_Document_Processing/Extract license plate number from image uploaded via an n8n form.txt b/PDF_and_Document_Processing/Extract license plate number from image uploaded via an n8n form.json similarity index 100% rename from PDF_and_Document_Processing/Extract license plate number from image uploaded via an n8n form.txt rename to PDF_and_Document_Processing/Extract license plate number from image uploaded via an n8n form.json diff --git a/PDF_and_Document_Processing/Extract text from PDF and image using Vertex AI (Gemini) into CSV.txt b/PDF_and_Document_Processing/Extract text from PDF and image using Vertex AI (Gemini) into CSV.json similarity index 100% rename from PDF_and_Document_Processing/Extract text from PDF and image using Vertex AI (Gemini) into CSV.txt rename to PDF_and_Document_Processing/Extract text from PDF and image using Vertex AI (Gemini) into CSV.json diff --git a/PDF_and_Document_Processing/Invoice data extraction with LlamaParse and OpenAI (1).txt b/PDF_and_Document_Processing/Invoice data extraction with LlamaParse and OpenAI.json similarity index 100% rename from PDF_and_Document_Processing/Invoice data extraction with LlamaParse and OpenAI (1).txt rename to PDF_and_Document_Processing/Invoice data extraction with LlamaParse and OpenAI.json diff --git a/PDF_and_Document_Processing/Invoice data extraction with LlamaParse and OpenAI.txt b/PDF_and_Document_Processing/Invoice data extraction with LlamaParse and OpenAI.txt deleted file mode 100644 index 8566ba6..0000000 --- a/PDF_and_Document_Processing/Invoice data extraction with LlamaParse and OpenAI.txt +++ /dev/null @@ -1,991 +0,0 @@ -{ -"meta": { -"instanceId": "26ba763460b97c249b82942b23b6384876dfeb9327513332e743c5f6219c2b8e" -}, -"nodes": [ -{ -"id": "7076854e-c7e8-45b5-9e5e-16678bffa254", -"name": "OpenAI Model", -"type": "@n8n/n8n-nodes-langchain.lmOpenAi", -"position": [ -2420, -480 -], -"parameters": { -"model": { -"__rl": true, -"mode": "list", -"value": "gpt-3.5-turbo-1106", -"cachedResultName": "gpt-3.5-turbo-1106" -}, -"options": { -"temperature": 0 -} -}, -"credentials": { -"openAiApi": { -"id": "8gccIjcuf3gvaoEr", -"name": "OpenAi account" -} -}, -"typeVersion": 1 -}, -{ -"id": "00819f1c-2c60-4b7c-b395-445ec05fd898", -"name": "Structured Output Parser", -"type": "@n8n/n8n-nodes-langchain.outputParserStructured", -"position": [ -2600, -480 -], -"parameters": { -"jsonSchema": "{\n \"Invoice date\": { \"type\": \"date\" },\n \"invoice number\": { \"type\": \"string\" },\n \"Purchase order number\": { \"type\": \"string\" },\n \"Supplier name\": { \"type\": \"string\" },\n \"Supplier address\": {\n \"type\": \"object\",\n \"properties\": {\n \"address 1\": { \"type\": \"string\" },\n \"address 2\": { \"type\": \"string\" },\n \"city\": { \"type\": \"string\" },\n \"postcode\": { \"type\": \"string\" }\n }\n },\n \"Supplier VAT identification number\": { \"type\": \"string\" },\n \"Customer name\": { \"type\": \"string\" },\n \"Customer address\": {\n \"type\": \"object\",\n \"properties\": {\n \"address 1\": { \"type\": \"string\" },\n \"address 2\": { \"type\": \"string\" },\n \"city\": { \"type\": \"string\" },\n \"postcode\": { \"type\": \"string\" }\n }\n },\n \"Customer VAT identification number\": { \"type\": \"string\" }, \n \"Shipping addresses\": {\n \"type\": \"array\",\n \"items\": {\n \"type\": \"object\",\n \"properties\": {\n \"address 1\": { \"type\": \"string\" },\n \"address 2\": { \"type\": \"string\" },\n \"city\": { \"type\": \"string\" },\n \"postcode\": { \"type\": \"string\" }\n }\n }\n },\n \"Line items\": {\n \"type\": \"array\",\n \"items\": {\n \"name\": \"string\",\n \"description\": \"string\",\n \"price\": \"number\",\n \"discount\": \"number\"\n }\n },\n \"Subtotal without VAT\": { \"type\": \"number\" },\n \"Subtotal with VAT\": { \"type\": \"number\" },\n \"Total price\": { \"type\": \"number\" }\n}" -}, -"typeVersion": 1.1 -}, -{ -"id": "3b40d506-aabc-4105-853a-a318375cea73", -"name": "Upload to LlamaParse", -"type": "n8n-nodes-base.httpRequest", -"position": [ -1620, -420 -], -"parameters": { -"url": "https://api.cloud.llamaindex.ai/api/parsing/upload", -"method": "POST", -"options": {}, -"sendBody": true, -"contentType": "multipart-form-data", -"sendHeaders": true, -"authentication": "genericCredentialType", -"bodyParameters": { -"parameters": [ -{ -"name": "file", -"parameterType": "formBinaryData", -"inputDataFieldName": "=attachment_0" -} -] -}, -"genericAuthType": "httpHeaderAuth", -"headerParameters": { -"parameters": [ -{ -"name": "accept", -"value": "application/json" -} -] -} -}, -"credentials": { -"httpHeaderAuth": { -"id": "pZ4YmwFIkyGnbUC7", -"name": "LlamaIndex API" -} -}, -"typeVersion": 4.2 -}, -{ -"id": "57a5d331-8838-4d44-8fac-a44dba35fcc4", -"name": "Sticky Note", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1540, -140 -], -"parameters": { -"color": 7, -"width": 785.9525375246163, -"height": 623.4951418211454, -"content": "## 2. Advanced PDF Processing with LlamaParse\n[Read more about using HTTP Requests](https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.httprequest/)\n\nLlamaIndex's LlamaCloud is a cloud-based service that allows you to upload,\nparse, and index document. LlamaParse is a tool offered by LlamaCloud\nto parse for complex PDFs with embedded objects ie PDF Tables and figures.\n\nAt time of writing, you can parse 1000 pdfs/day with LlamaCloud's free plan\nby signing up at [https://cloud.llamaindex.ai/](https://cloud.llamaindex.ai/?ref=n8n.io)." -}, -"typeVersion": 1 -}, -{ -"id": "a4504d83-da3b-41bc-891f-f8f9314a6af5", -"name": "Receiving Invoices", -"type": "n8n-nodes-base.gmailTrigger", -"position": [ -780, -400 -], -"parameters": { -"simple": false, -"filters": { -"q": "has:attachment", -"sender": "invoices@paypal.com" -}, -"options": { -"downloadAttachments": true -}, -"pollTimes": { -"item": [ -{ -"mode": "everyMinute" -} -] -} -}, -"credentials": { -"gmailOAuth2": { -"id": "Sf5Gfl9NiFTNXFWb", -"name": "Gmail account" -} -}, -"typeVersion": 1 -}, -{ -"id": "02bd4636-f35b-4a3a-8a5f-9ae7aeed2bf4", -"name": "Append to Reconciliation Sheet", -"type": "n8n-nodes-base.googleSheets", -"position": [ -2960, -320 -], -"parameters": { -"columns": { -"value": {}, -"schema": [ -{ -"id": "Invoice date", -"type": "string", -"display": true, -"removed": false, -"required": false, -"displayName": "Invoice date", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "invoice number", -"type": "string", -"display": true, -"removed": false, -"required": false, -"displayName": "invoice number", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Purchase order number", -"type": "string", -"display": true, -"removed": false, -"required": false, -"displayName": "Purchase order number", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Supplier name", -"type": "string", -"display": true, -"removed": false, -"required": false, -"displayName": "Supplier name", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Supplier address", -"type": "string", -"display": true, -"removed": false, -"required": false, -"displayName": "Supplier address", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Supplier VAT identification number", -"type": "string", -"display": true, -"removed": false, -"required": false, -"displayName": "Supplier VAT identification number", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Customer name", -"type": "string", -"display": true, -"removed": false, -"required": false, -"displayName": "Customer name", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Customer address", -"type": "string", -"display": true, -"removed": false, -"required": false, -"displayName": "Customer address", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Customer VAT identification number", -"type": "string", -"display": true, -"removed": false, -"required": false, -"displayName": "Customer VAT identification number", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Shipping addresses", -"type": "string", -"display": true, -"removed": false, -"required": false, -"displayName": "Shipping addresses", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Line items", -"type": "string", -"display": true, -"removed": false, -"required": false, -"displayName": "Line items", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Subtotal without VAT", -"type": "string", -"display": true, -"removed": false, -"required": false, -"displayName": "Subtotal without VAT", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Subtotal with VAT", -"type": "string", -"display": true, -"removed": false, -"required": false, -"displayName": "Subtotal with VAT", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Total price", -"type": "string", -"display": true, -"removed": false, -"required": false, -"displayName": "Total price", -"defaultMatch": false, -"canBeUsedToMatch": true -} -], -"mappingMode": "autoMapInputData", -"matchingColumns": [ -"output" -] -}, -"options": {}, -"operation": "append", -"sheetName": { -"__rl": true, -"mode": "id", -"value": "gid=0" -}, -"documentId": { -"__rl": true, -"mode": "list", -"value": "1omHDl1jpjHyrtga2ZHBddUkbkdatEr1ga9vHc4fQ1pI", -"cachedResultUrl": "https://docs.google.com/spreadsheets/d/1omHDl1jpjHyrtga2ZHBddUkbkdatEr1ga9vHc4fQ1pI/edit?usp=drivesdk", -"cachedResultName": "Invoice Reconciliation" -} -}, -"credentials": { -"googleSheetsOAuth2Api": { -"id": "XHvC7jIRR8A2TlUl", -"name": "Google Sheets account" -} -}, -"typeVersion": 4.3 -}, -{ -"id": "cdb0a7ee-068d-465a-b4ae-d5221d5e7400", -"name": "Get Processing Status", -"type": "n8n-nodes-base.httpRequest", -"position": [ -1800, -420 -], -"parameters": { -"url": "=https://api.cloud.llamaindex.ai/api/parsing/job/{{ $json.id }}", -"options": {}, -"sendHeaders": true, -"authentication": "genericCredentialType", -"genericAuthType": "httpHeaderAuth", -"headerParameters": { -"parameters": [ -{ -"name": "accept", -"value": "application/json" -} -] -} -}, -"credentials": { -"httpHeaderAuth": { -"id": "pZ4YmwFIkyGnbUC7", -"name": "LlamaIndex API" -} -}, -"typeVersion": 4.2 -}, -{ -"id": "b68a01ab-d8e6-42f4-ab1d-81e746695eef", -"name": "Wait to stay within service limits", -"type": "n8n-nodes-base.wait", -"position": [ -2120, -560 -], -"webhookId": "17a96ed6-b5ff-47bb-a8a2-39c1eb40185a", -"parameters": { -"amount": 1 -}, -"typeVersion": 1.1 -}, -{ -"id": "41bd28d2-665a-4f71-a456-98eeb26b6655", -"name": "Is Job Ready?", -"type": "n8n-nodes-base.switch", -"position": [ -1960, -420 -], -"parameters": { -"rules": { -"values": [ -{ -"outputKey": "SUCCESS", -"conditions": { -"options": { -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "300fce8c-b19a-4d0c-86e8-f62853c70ce2", -"operator": { -"name": "filter.operator.equals", -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.status }}", -"rightValue": "SUCCESS" -} -] -}, -"renameOutput": true -}, -{ -"outputKey": "ERROR", -"conditions": { -"options": { -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "e6058aa0-a3e2-4ce3-9bed-6ff41a5be052", -"operator": { -"name": "filter.operator.equals", -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.status }}", -"rightValue": "ERROR" -} -] -}, -"renameOutput": true -}, -{ -"outputKey": "CANCELED", -"conditions": { -"options": { -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "ceb6338f-4261-40ac-be11-91f61c7302ba", -"operator": { -"name": "filter.operator.equals", -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.status }}", -"rightValue": "CANCELED" -} -] -}, -"renameOutput": true -}, -{ -"outputKey": "PENDING", -"conditions": { -"options": { -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "0fa97d86-432a-409a-917e-5f1a002b1ab9", -"operator": { -"name": "filter.operator.equals", -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.status }}", -"rightValue": "PENDING" -} -] -}, -"renameOutput": true -} -] -}, -"options": { -"allMatchingOutputs": true -} -}, -"typeVersion": 3 -}, -{ -"id": "f7157abe-b1ee-46b3-adb2-1be056d9d75d", -"name": "Sticky Note1", -"type": "n8n-nodes-base.stickyNote", -"position": [ -694.0259411218055, -139.97202236910687 -], -"parameters": { -"color": 7, -"width": 808.8727491350096, -"height": 709.5781339256318, -"content": "## 1. Watch for Invoice Emails\n[Read more about Gmail Triggers](https://docs.n8n.io/integrations/builtin/trigger-nodes/n8n-nodes-base.gmailtrigger)\n\nThe Gmail node can watch for all incoming messages and filter based on a condition. We'll set our Gmail node to wait for:\n* a message from particular email address.\n* having an attachment which should be the invoice PDF\n* not having a label \"invoice synced\", which is what we use to avoid duplicate processing." -}, -"typeVersion": 1 -}, -{ -"id": "ff7cb6e4-5a60-4f12-b15e-74e7a4a302ce", -"name": "Sticky Note2", -"type": "n8n-nodes-base.stickyNote", -"position": [ -2360, -70.48792658995046 -], -"parameters": { -"color": 7, -"width": 805.0578351924228, -"height": 656.5014186128178, -"content": "## 3. Use LLMs to Extract Values from Data\n[Read more about Basic LLM Chain](https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.chainllm/)\n\nLarge language models are perfect for data extraction tasks as they can work across a range of document layouts without human intervention. The extracted data can then be sent to a variety of datastores such as spreadsheets, accounting systems and/or CRMs.\n\n**Tip:** The \"Structured Output Parser\" ensures the AI output can be\ninserted to our spreadsheet without additional clean up and/or formatting. " -}, -"typeVersion": 1 -}, -{ -"id": "0d510631-440b-41f5-b1aa-9b7279e9c8e3", -"name": "Sticky Note3", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1934, -774 -], -"parameters": { -"color": 5, -"width": 394.15089838126653, -"height": 154.49585536070904, -"content": "### 🙋‍♂️ Why not just use the built-in PDF convertor?\nA common issue with PDF-to-text convertors are that they ignore important data structures like tables. These structures can be important for data extraction. For example, being able to distinguish between seperate line items in an invoice." -}, -"typeVersion": 1 -}, -{ -"id": "fe7fdb90-3c85-4f29-a7d3-16f927f48682", -"name": "Sticky Note4", -"type": "n8n-nodes-base.stickyNote", -"position": [ -3200, -157.65172434465347 -], -"parameters": { -"color": 7, -"width": 362.3535748101346, -"height": 440.3435768155051, -"content": "## 4. Add Label to Avoid Duplication\n[Read more about working with Gmail](https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-base.gmail/)\n\nTo finish off the workflow, we'll add the \"invoice synced\" label to the original invoice email to flag that the extraction was successful. This can be useful if working with a shared inbox and for quality control purposes later." -}, -"typeVersion": 1 -}, -{ -"id": "1acf2c60-c2b9-4f78-94a4-0711c8bd71ab", -"name": "Sticky Note5", -"type": "n8n-nodes-base.stickyNote", -"position": [ -300, -140 -], -"parameters": { -"width": 360.0244620907562, -"height": 573.2443601155958, -"content": "## Try Me Out!\n\n**This workflow does the following:**\n* Waits for email invoices with PDF attachments.\n* Uses the LlamaParse service to convert the invoice PDF into a markdown file.\n* Uses a LLM to extract invoice data from the Markdown file.\n* Exports the extracted data to a Google Sheet.\n\n### Follow along with the blog here\nhttps://blog.n8n.io/how-to-extract-data-from-pdf-to-excel-spreadsheet-advance-parsing-with-n8n-io-and-llamaparse/\n\n### Good to know\n* You'll need to create the label \"invoice synced\" in gmail before using this workflow.\n\n### Need Help?\nJoin the [Discord](https://discord.com/invite/XPKeKXeB7d) or ask in the [Forum](https://community.n8n.io/)!\n\nHappy Hacking!" -}, -"typeVersion": 1 -}, -{ -"id": "3802c538-acf9-48d8-b011-bfe2fb817350", -"name": "Add \"invoice synced\" Label", -"type": "n8n-nodes-base.gmail", -"position": [ -3320, -400 -], -"parameters": { -"labelIds": [ -"Label_5511644430826409825" -], -"messageId": "={{ $('Receiving Invoices').item.json.id }}", -"operation": "addLabels" -}, -"credentials": { -"gmailOAuth2": { -"id": "Sf5Gfl9NiFTNXFWb", -"name": "Gmail account" -} -}, -"typeVersion": 2.1 -}, -{ -"id": "ffabd8c5-c440-4473-8e44-b849426c70cf", -"name": "Get Parsed Invoice Data", -"type": "n8n-nodes-base.httpRequest", -"position": [ -2160, -280 -], -"parameters": { -"url": "=https://api.cloud.llamaindex.ai/api/parsing/job/{{ $json.id }}/result/markdown", -"options": { -"redirect": { -"redirect": {} -} -}, -"authentication": "genericCredentialType", -"genericAuthType": "httpHeaderAuth" -}, -"credentials": { -"httpHeaderAuth": { -"id": "pZ4YmwFIkyGnbUC7", -"name": "LlamaIndex API" -} -}, -"typeVersion": 4.2 -}, -{ -"id": "5f9b507f-4dc1-4853-bf71-a64f2f4b55c1", -"name": "Map Output", -"type": "n8n-nodes-base.set", -"position": [ -2760, -320 -], -"parameters": { -"mode": "raw", -"options": {}, -"jsonOutput": "={{ $json.output }}" -}, -"typeVersion": 3.3 -}, -{ -"id": "d22744cd-151d-4b92-b4f2-4a5b9ceb4ee7", -"name": "Apply Data Extraction Rules", -"type": "@n8n/n8n-nodes-langchain.chainLlm", -"position": [ -2420, -320 -], -"parameters": { -"text": "=Given the following invoice in the xml tags, extract the following information as listed below.\nIf you cannot the information for a specific item, then leave blank and skip to the next. \n\n* Invoice date\n* invoice number\n* Purchase order number\n* Supplier name\n* Supplier address\n* Supplier VAT identification number\n* Customer name\n* Customer address\n* Customer VAT identification number\n* Shipping addresses\n* Line items, including a description of the goods or services rendered\n* Price with and without VAT\n* Total price\n\n{{ $json.markdown }}", -"promptType": "define", -"hasOutputParser": true -}, -"typeVersion": 1.4 -}, -{ -"id": "3735a124-9fab-4400-8b94-8b5aa9f951fe", -"name": "Should Process Email?", -"type": "n8n-nodes-base.if", -"position": [ -1340, -400 -], -"parameters": { -"options": {}, -"conditions": { -"options": { -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "e5649a2b-6e12-4cc4-8001-4639cc9cc2c2", -"operator": { -"name": "filter.operator.equals", -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $input.item.binary.attachment_0.mimeType }}", -"rightValue": "application/pdf" -}, -{ -"id": "4c57ab9b-b11c-455a-a63d-daf48418b06e", -"operator": { -"type": "array", -"operation": "notContains", -"rightType": "any" -}, -"leftValue": "={{ $json.labels }}", -"rightValue": "invoice synced" -} -] -} -}, -"typeVersion": 2 -}, -{ -"id": "12a23527-39f3-4f72-8691-3d5cf59f9909", -"name": "Split Out Labels", -"type": "n8n-nodes-base.splitOut", -"position": [ -980, -400 -], -"parameters": { -"options": {}, -"fieldToSplitOut": "labelIds" -}, -"typeVersion": 1 -}, -{ -"id": "88ff6e22-d3d3-403d-b0b2-2674487140a7", -"name": "Get Labels Names", -"type": "n8n-nodes-base.gmail", -"position": [ -980, -540 -], -"parameters": { -"labelId": "={{ $json.labelIds }}", -"resource": "label", -"operation": "get" -}, -"credentials": { -"gmailOAuth2": { -"id": "Sf5Gfl9NiFTNXFWb", -"name": "Gmail account" -} -}, -"typeVersion": 2.1 -}, -{ -"id": "88accb8e-6531-40be-8d35-1bba594149af", -"name": "Combine Label Names", -"type": "n8n-nodes-base.aggregate", -"position": [ -980, -680 -], -"parameters": { -"options": {}, -"fieldsToAggregate": { -"fieldToAggregate": [ -{ -"renameField": true, -"outputFieldName": "labels", -"fieldToAggregate": "name" -} -] -} -}, -"typeVersion": 1 -}, -{ -"id": "d233ff33-cabf-434e-876d-879693ecaf58", -"name": "Email with Label Names", -"type": "n8n-nodes-base.merge", -"position": [ -1160, -400 -], -"parameters": { -"mode": "combine", -"options": {}, -"combinationMode": "multiplex" -}, -"typeVersion": 2.1 -}, -{ -"id": "733fc285-e069-4e4e-b13e-dfc1c259ac12", -"name": "Sticky Note6", -"type": "n8n-nodes-base.stickyNote", -"position": [ -2540, -460 -], -"parameters": { -"width": 192.26896179623753, -"height": 213.73043662572252, -"content": "\n\n\n\n\n\n\n\n\n\n\n\n**Need more attributes?**\nChange it here!" -}, -"typeVersion": 1 -}, -{ -"id": "83aa6ed0-ce3b-48d7-aded-475c337ae86e", -"name": "Sticky Note7", -"type": "n8n-nodes-base.stickyNote", -"position": [ -2880, -300 -], -"parameters": { -"width": 258.29345180972877, -"height": 397.0641952938746, -"content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n🚨**Required**\n* Set Your Google Sheet URL here\n* Set the Name of your Sheet\n\n\n**Don't use GSheets?**\nSwap this for Excel, Airtable or a Database!" -}, -"typeVersion": 1 -}, -{ -"id": "720070f6-2d6c-45ef-80c2-e950862a002b", -"name": "Sticky Note8", -"type": "n8n-nodes-base.stickyNote", -"position": [ -740, -380 -], -"parameters": { -"width": 174.50671517518518, -"height": 274.6295678979021, -"content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n🚨**Required**\n* Change the email filters here!" -}, -"typeVersion": 1 -} -], -"pinData": {}, -"connections": { -"Map Output": { -"main": [ -[ -{ -"node": "Append to Reconciliation Sheet", -"type": "main", -"index": 0 -} -] -] -}, -"OpenAI Model": { -"ai_languageModel": [ -[ -{ -"node": "Apply Data Extraction Rules", -"type": "ai_languageModel", -"index": 0 -} -] -] -}, -"Is Job Ready?": { -"main": [ -[ -{ -"node": "Get Parsed Invoice Data", -"type": "main", -"index": 0 -} -], -null, -null, -[ -{ -"node": "Wait to stay within service limits", -"type": "main", -"index": 0 -} -] -] -}, -"Get Labels Names": { -"main": [ -[ -{ -"node": "Combine Label Names", -"type": "main", -"index": 0 -} -] -] -}, -"Split Out Labels": { -"main": [ -[ -{ -"node": "Get Labels Names", -"type": "main", -"index": 0 -} -] -] -}, -"Receiving Invoices": { -"main": [ -[ -{ -"node": "Split Out Labels", -"type": "main", -"index": 0 -}, -{ -"node": "Email with Label Names", -"type": "main", -"index": 0 -} -] -] -}, -"Combine Label Names": { -"main": [ -[ -{ -"node": "Email with Label Names", -"type": "main", -"index": 1 -} -] -] -}, -"Upload to LlamaParse": { -"main": [ -[ -{ -"node": "Get Processing Status", -"type": "main", -"index": 0 -} -] -] -}, -"Get Processing Status": { -"main": [ -[ -{ -"node": "Is Job Ready?", -"type": "main", -"index": 0 -} -] -] -}, -"Should Process Email?": { -"main": [ -[ -{ -"node": "Upload to LlamaParse", -"type": "main", -"index": 0 -} -] -] -}, -"Email with Label Names": { -"main": [ -[ -{ -"node": "Should Process Email?", -"type": "main", -"index": 0 -} -] -] -}, -"Get Parsed Invoice Data": { -"main": [ -[ -{ -"node": "Apply Data Extraction Rules", -"type": "main", -"index": 0 -} -] -] -}, -"Structured Output Parser": { -"ai_outputParser": [ -[ -{ -"node": "Apply Data Extraction Rules", -"type": "ai_outputParser", -"index": 0 -} -] -] -}, -"Apply Data Extraction Rules": { -"main": [ -[ -{ -"node": "Map Output", -"type": "main", -"index": 0 -} -] -] -}, -"Append to Reconciliation Sheet": { -"main": [ -[ -{ -"node": "Add \"invoice synced\" Label", -"type": "main", -"index": 0 -} -] -] -}, -"Wait to stay within service limits": { -"main": [ -[ -{ -"node": "Get Processing Status", -"type": "main", -"index": 0 -} -] -] -} -} -} \ No newline at end of file diff --git a/PDF_and_Document_Processing/Manipulate PDF with Adobe developer API.txt b/PDF_and_Document_Processing/Manipulate PDF with Adobe developer API.json similarity index 100% rename from PDF_and_Document_Processing/Manipulate PDF with Adobe developer API.txt rename to PDF_and_Document_Processing/Manipulate PDF with Adobe developer API.json diff --git a/PDF_and_Document_Processing/Parse PDF with LlamaParse and save to Airtable.txt b/PDF_and_Document_Processing/Parse PDF with LlamaParse and save to Airtable.json similarity index 100% rename from PDF_and_Document_Processing/Parse PDF with LlamaParse and save to Airtable.txt rename to PDF_and_Document_Processing/Parse PDF with LlamaParse and save to Airtable.json diff --git a/PDF_and_Document_Processing/Prepare CSV files with GPT-4Prepare CSV files with GPT-4.txt b/PDF_and_Document_Processing/Prepare CSV files with GPT-4Prepare CSV files with GPT-4.json similarity index 100% rename from PDF_and_Document_Processing/Prepare CSV files with GPT-4Prepare CSV files with GPT-4.txt rename to PDF_and_Document_Processing/Prepare CSV files with GPT-4Prepare CSV files with GPT-4.json diff --git a/PDF_and_Document_Processing/Remove Personally Identifiable Information (PII) from CSV Files with OpenAI.txt b/PDF_and_Document_Processing/Remove Personally Identifiable Information (PII) from CSV Files with OpenAI.json similarity index 100% rename from PDF_and_Document_Processing/Remove Personally Identifiable Information (PII) from CSV Files with OpenAI.txt rename to PDF_and_Document_Processing/Remove Personally Identifiable Information (PII) from CSV Files with OpenAI.json diff --git a/PDF_and_Document_Processing/Transcribe Audio Files, Summarize with GPT-4, and Store in Notion.txt b/PDF_and_Document_Processing/Transcribe Audio Files, Summarize with GPT-4, and Store in Notion.json similarity index 100% rename from PDF_and_Document_Processing/Transcribe Audio Files, Summarize with GPT-4, and Store in Notion.txt rename to PDF_and_Document_Processing/Transcribe Audio Files, Summarize with GPT-4, and Store in Notion.json diff --git a/PDF_and_Document_Processing/Transcribing Bank Statements To Markdown Using Gemini Vision AI.txt b/PDF_and_Document_Processing/Transcribing Bank Statements To Markdown Using Gemini Vision AI.json similarity index 100% rename from PDF_and_Document_Processing/Transcribing Bank Statements To Markdown Using Gemini Vision AI.txt rename to PDF_and_Document_Processing/Transcribing Bank Statements To Markdown Using Gemini Vision AI.json diff --git a/README-zh.md b/README-zh.md new file mode 100644 index 0000000..0005789 --- /dev/null +++ b/README-zh.md @@ -0,0 +1,344 @@ +# n8n_automations + +[![English](https://img.shields.io/badge/English-Click-yellow)](README.md) +[![中文文档](https://img.shields.io/badge/中文文档-点击查看-orange)](README-zh.md) + +本仓库包含从互联网收集的n8n自动化模板集合。这些模板旨在帮助使用[n8n](https://n8n.partnerlinks.io/h1pwwf5m4toe)自动化各种任务和工作流程,使用户更容易发现和使用各种平台和服务的现成自动化方案。 + +👉 [加入n8n,立即开始自动化!💎](https://n8n.partnerlinks.io/h1pwwf5m4toe) + + n8n + + + +--- + +## 免责声明 +本仓库中的所有自动化模板均来自网络,仅在此上传以便于访问和分享。所有模板均非由仓库作者创建或拥有。如果您在使用这些模板时遇到任何问题、错误或损害,仓库作者不承担任何责任或义务。所有原始模板的权利均属于其各自的创建者。 + +--- + +☕ 在深入了解长长的类别和模板列表之前,先来杯咖啡吧——如果您喜欢我的工作,别忘了给我买杯咖啡! + +Buy Me A Coffee + +--- + +## 类别和模板列表 + +--- + +🤖 想要自动翻译、重写和重新发布Twitter (X)线程吗? + +想要轻松地将整个Twitter (X)线程转换并以多种语言发布吗? +查看我的[n8n](https://n8n.partnerlinks.io/h1pwwf5m4toe) Twitter线程(洪水)翻译器和发布工作流程——它在一个无缝过程中自动化提取、翻译、重写和发布。 +非常适合创作者、营销人员和任何希望以最少的努力和超低成本接触新受众的人! + +👉 [在此尝试工作流程](https://n8n.io/workflows/4233-translate-and-repost-twitter-threads-in-multiple-languages-with-openai/) + +--- + +## 🧵 如果您想要抓取Twitter (X)线程,一定要查看这个工作流程 + +想要快速且经济地提取和合并整个Twitter (X)线程吗? +查看我的[n8n Twitter线程获取器工作流程](https://n8n.io/workflows/4088-extract-and-merge-twitter-x-threads-using-twitterapiio/)——它闪电般快速、成本效益高,非常适合自动化Twitter线程提取用于研究、内容策划或存档! + +--- + +### Gmail和Email自动化 + +| 标题 | 描述 | 部门 | 链接 | +|-------|-------------|------------|------| +| 使用AI节点自动标记传入的Gmail消息 | 使用AI自动标记传入的Gmail消息。工作流程检索消息内容,建议标签如合作伙伴或询问,并分配它们以更好地组织。 | 运营 | [模板链接](Gmail_and_Email_Automation/Auto-label%20incoming%20Gmail%20messages%20with%20AI%20nodes.json) | +| 使用OpenAI和Gmail API的基本自动Gmail邮件标记 | 使用OpenAI和Gmail API触发新邮件,分析内容,并自动分配或创建标签。帮助使用AI高效分类邮件。 | 运营 | [模板链接](Gmail_and_Email_Automation/Basic%20Automatic%20Gmail%20Email%20Labelling%20with%20OpenAI%20and%20Gmail%20API.json) | +| 使用OpenAI助手在Gmail中撰写回复草稿 | 使用OpenAI在Gmail中生成回复草稿。触发新邮件,提取内容,并创建建议的回复草稿以简化回复。 | 执行 | [模板链接](Gmail_and_Email_Automation/Compose%20reply%20draft%20in%20Gmail%20with%20OpenAI%20Assistant.json) | +| 使用ChatGPT分析和分类可疑邮件内容 | 使用ChatGPT分析可疑邮件,对它们进行分类,并可以生成截图以供审查。帮助识别和分类潜在的危险邮件。 | 安全 | [模板链接](Gmail_and_Email_Automation/Analyze%20&%20Sort%20Suspicious%20Email%20Contents%20with%20ChatGPT.json) | +| 使用ChatGPT Vision分析可疑邮件内容 | 使用文本和图像分析(ChatGPT Vision)来评估可疑邮件。提取截图,分析标题和内容,并标记钓鱼尝试。 | 安全 | [模板链接](Gmail_and_Email_Automation/Analyze%20Suspicious%20Email%20Contents%20with%20ChatGPT%20Vision.json) | +| 使用AI和IMAP的非常简单"人在回路"邮件回复系统 | 实现简单的人机交互邮件回复工作流程。使用IMAP获取邮件,用AI总结内容,并在发送前起草专业回复以供审查。 | 支持 | [模板链接](Gmail_and_Email_Automation/A%20Very%20Simple%20_Human%20in%20the%20Loop_%20Email%20Response%20System%20Using%20AI%20and%20IMAP.json) | +| 使用AI自动分类Outlook邮件 | 使用AI模型自动分类Outlook邮件。根据内容移动消息到文件夹并分配类别,减少手动分类。 | 运营 | [模板链接](Gmail_and_Email_Automation/Auto%20Categorise%20Outlook%20Emails%20with%20AI.json) | +| 使用Monday和Airtable联系支持的Microsoft Outlook AI邮件助手 | 一个由AI驱动的Outlook助手,处理邮件,清理内容,并使用来自Airtable的规则分配类别。与Monday.com集成以提供联系支持。 | 运营 | [模板链接](Gmail_and_Email_Automation/Microsoft%20Outlook%20AI%20Email%20Assistant%20with%20contact%20support%20from%20Monday%20and%20Airtable.json) | +| 📈 从FT.com接收每日市场新闻到您的Microsoft Outlook收件箱 | 从FT.com提取财经新闻并将每日更新发送到您的Outlook收件箱。自动化内容提取和邮件传递以获取及时的市场洞察。 | 执行 | [模板链接](Gmail_and_Email_Automation/📈%20Receive%20Daily%20Market%20News%20from%20FT.com%20to%20your%20Microsoft%20outlook%20inbox.json) | + +### Telegram + +| 标题 | 描述 | 部门 | 链接 | +|-------|-------------|------------|------| +| 使用LangChain节点和新工具的代理Telegram AI机器人 | 一个高级Telegram机器人,利用LangChain和OpenAI进行对话AI。支持记忆、动态工具使用,并处理传入事件以进行丰富的、上下文感知的聊天交互。 | 支持 | [模板链接](Telegram/Agentic%20Telegram%20AI%20bot%20with%20with%20LangChain%20nodes%20and%20new%20tools.json) | +| Telegram上的AI驱动阿拉伯语儿童故事讲述 | 一个使用OpenAI生成和讲述阿拉伯语儿童故事的Telegram机器人,使故事讲述对年轻用户具有互动性和教育性。 | 支持 | [模板链接](Telegram/AI-Powered%20Children_s%20Arabic%20Storytelling%20on%20Telegram.json) | +| 使用OpenAI在Telegram上进行AI驱动的英语儿童故事讲述 | 使用OpenAI创建和讲述英语儿童故事,以互动方式吸引年轻受众。 | 支持 | [模板链接](Telegram/AI-Powered%20Children_s%20English%20Storytelling%20on%20Telegram%20with%20OpenAI.json) | +| 通过Telegram进行自动AI图像分析和响应 | 让用户向Telegram发送图像并自动接收基于AI的分析和反馈。 | 运营 | [模板链接](Telegram/Automated%20AI%20image%20analysis%20and%20response%20via%20Telegram.json) | +| Angie,带有Telegram语音和文本的个人AI助手 | 个人语音和文本助手机器人,回答查询,管理任务,并使用AI自然交互。 | 支持 | [模板链接](Telegram/Angie,%20Personal%20AI%20Assistant%20with%20Telegram%20Voice%20and%20Text.json) | +| 通过简单Telegram机器人与OpenAI的GPT聊天 | 一个最小的Telegram机器人,将用户消息转发给GPT并返回AI生成的回复。AI聊天的理想起点。 | 支持 | [模板链接](Telegram/Chat%20with%20OpenAIs%20GPT%20via%20a%20simple%20Telegram%20Bot.json) | +| Telegram AI机器人助手:语音和文本消息的现成模板 | 现成的助手机器人,处理语音和文本输入,利用AI在Telegram中进行智能对话回复。 | 支持 | [模板链接](Telegram/Telegram%20AI%20bot%20assistant_%20ready-made%20template%20for%20voice%20&%20text%20messages.json) | +| Telegram AI机器人:NeurochainAI文本和图像 | 集成NeurochainAI API以在Telegram内进行文本和图像生成,实现创意媒体交互。 | 营销 | [模板链接](Telegram/Telegram%20AI%20Bot_%20NeurochainAI%20Text%20&%20Image%20-%20NeurochainAI%20Basic%20API%20Integration.json) | +| 使用LangChain节点的Telegram AI机器人 | 使用LangChain节点在Telegram中进行高级AI对话和工具使用。 | 支持 | [模板链接](Telegram/Telegram%20AI%20bot%20with%20LangChain%20nodes.json) | +| Telegram AI聊天机器人 | 一个通用的AI聊天机器人模板,可用于Telegram的各种用例。 | 支持 | [模板链接](Telegram/Telegram%20AI%20Chatbot.json) | +| 带有Supabase记忆和OpenAI助手集成的Telegram机器人 | 为Telegram机器人添加Supabase长期记忆,结合OpenAI进行丰富的、上下文感知的对话。 | 支持 | [模板链接](Telegram/Telegram%20Bot%20with%20Supabase%20memory%20and%20OpenAI%20assistant%20integration.json) | +| 与PDF的Telegram聊天 | 允许用户向Telegram上传PDF并使用AI驱动的总结和问答与其内容聊天。 | 运营 | [模板链接](Telegram/Telegram%20chat%20with%20PDF.json) | +| 🤖 用于文本_音频_图像的Telegram消息代理 | 多模态代理,在Telegram聊天中使用AI处理文本、音频和图像以进行回复。 | 支持 | [模板链接](Telegram/%F0%9F%A4%96%20Telegram%20Messaging%20Agent%20for%20Text_Audio_Images.json) | +| 使用OpenAI从Telegram到Spotify | 让用户在Telegram中请求歌曲或播放列表,并通过OpenAI自动在Spotify中创建它们。 | 营销 | [模板链接](Telegram/Telegram%20to%20Spotify%20with%20OpenAI.json) | +| 每天向Telegram发送一个随机食谱 | 定时工作流程,每天获取一个随机食谱并将其发布到Telegram聊天。 | 营销 | [模板链接](Telegram/Send%20a%20random%20recipe%20once%20a%20day%20to%20Telegram.json) | +| 检测Telegram消息中的有毒语言 | 监控Telegram聊天并使用AI审核标记包含有毒语言的消息。 | 安全 | [模板链接](Telegram/Detect%20toxic%20language%20in%20Telegram%20messages.json) | +| 使用AI翻译Telegram音频消息(支持55种语言) | 接收语音消息,转录它们,并以超过50种语言发送回翻译。 | 支持 | [模板链接](Telegram/Translate%20Telegram%20audio%20messages%20with%20AI%20(55%20supported%20languages).json) | +| 使用长期记忆和动态工具路由增强您的AI聊天机器人 | 外部工作流程,通过长期记忆和动态工具路由功能增强AI聊天机器人。 | 支持 | [模板链接](https://n8n.io/workflows/3025-empower-your-ai-chatbot-with-long-term-memory-and-dynamic-tool-routing/) | + +### Google Drive和Google Sheets + +| 标题 | 描述 | 部门 | 链接 | +|-------|-------------|------------|------| +| 使用Google Drive集成的OpenAI模型端到端微调自动化 | 通过集成Google Drive进行数据输入和输出来自动化OpenAI模型的微调,简化自定义AI模型训练。 | 工程 | [模板链接](Google_Drive_and_Google_Sheets/Automated%20End-to-End%20Fine-Tuning%20of%20OpenAI%20Models%20with%20Google%20Drive%20Integration.json) | +| Google Drive中图像的自动背景移除 | 自动移除存储在Google Drive中的图像背景,为产品目录或营销材料等各种用途做准备。 | 营销 | [模板链接](Google_Drive_and_Google_Sheets/Automatic%20Background%20Removal%20for%20Images%20in%20Google%20Drive.json) | +| 使用Google Drive集成构建OpenAI助手 | 演示构建一个访问和利用Google Drive文件的OpenAI助手,使其能够基于文档内容回答问题或执行任务。 | 支持 | [模板链接](Google_Drive_and_Google_Sheets/Build%20an%20OpenAI%20Assistant%20with%20Google%20Drive%20Integration.json) | +| 使用Google Drive和Gemini的公司文档RAG聊天机器人 | 创建一个检索增强生成(RAG)聊天机器人,基于存储在Google Drive中的公司文档回答问题,利用Google Gemini。 | 支持 | [模板链接](Google_Drive_and_Google_Sheets/RAG%20Chatbot%20for%20Company%20Documents%20using%20Google%20Drive%20and%20Gemini.json) | +| RAG_上下文感知分块:通过OpenRouter和Gemini从Google Drive到Pinecone | 为Google Drive文档实现上下文感知分块,将它们发送到Pinecone进行向量存储,并使用OpenRouter和Gemini进行高级RAG。 | 工程 | [模板链接](Google_Drive_and_Google_Sheets/RAG_Context-Aware%20Chunking%20_%20Google%20Drive%20to%20Pinecone%20via%20OpenRouter%20&%20Gemini.json) | +| 总结Google Drive中的新文档并将摘要保存到Google Sheet | 监控Google Drive中的新文档,使用AI总结其内容,并将这些摘要保存到Google Sheet中以快速概览和分析。 | 运营 | [模板链接](Google_Drive_and_Google_Sheets/Summarize%20the%20New%20Documents%20from%20Google%20Drive%20and%20Save%20Summary%20in%20Google%20Sheet.json) | +| 从Google Drive上传到Instagram和Tiktok | 自动化从Google Drive直接上传媒体到Instagram和TikTok,简化社交媒体内容发布。 | 营销 | [模板链接](Google_Drive_and_Google_Sheets/Upload%20to%20Instagram%20and%20Tiktok%20from%20Google%20Drive.json) | +| 从Google Sheets撰写和发布博客文章 | 在Google Sheets中撰写博客文章并自动发布到内容管理系统,简化内容创建和发布。 | 营销 | [模板链接](Google_Drive_and_Google_Sheets/Author%20and%20Publish%20Blog%20Posts%20From%20Google%20Sheets.json) | +| 使用AI与Google Sheet聊天 | 允许用户通过AI模型使用自然语言与Google Sheet中的数据交互和查询,使数据分析更加易于访问。 | 运营 | [模板链接](Google_Drive_and_Google_Sheets/Chat%20with%20a%20Google%20Sheet%20using%20AI.json) | +| 在Telegram中与来自Google Sheets的活动日程聊天 | 将包含活动日程的Google Sheet连接到Telegram,允许用户通过Telegram机器人查询他们的日程。 | 运营 | [模板链接](Google_Drive_and_Google_Sheets/Chat%20with%20your%20event%20schedule%20from%20Google%20Sheets%20in%20Telegram.json) | +| 通过OpenAI的GPT-4在Google Sheets中筛选新潜在客户 | 使用OpenAI的GPT-4分析并筛选输入到Google Sheet中的新潜在客户,帮助销售团队优先考虑他们的外展活动。 | 销售 | [模板链接](Google_Drive_and_Google_Sheets/Qualify%20new%20leads%20in%20Google%20Sheets%20via%20OpenAI_s%20GPT-4.json) | +| 使用AI筛选申请人,通知HR并将他们保存在Google Sheet中 | 使用AI自动化筛选工作申请人,通知HR合格的候选人,并将申请人数据保存到Google Sheet中。 | HR | [模板链接](Google_Drive_and_Google_Sheets/Screen%20Applicants%20With%20AI,%20notify%20HR%20and%20save%20them%20in%20a%20Google%20Sheet.json) | +| 通过OpenAI的GPT-4总结Google Sheets表单反馈 | 使用OpenAI的GPT-4总结通过Google Forms收集并存储在Google Sheets中的反馈,从调查回复中提供快速洞察。 | 营销 | [模板链接](Google_Drive_and_Google_Sheets/Summarize%20Google%20Sheets%20form%20feedback%20via%20OpenAI_s%20GPT-4.json) | + +--- + +*随着项目扩展,可以在下面添加更多部分和表格。 + +### WordPress + +| 标题 | 描述 | 部门 | 链接 | +|-------|-------------|------------|------| +| 使用AI自动分类WordPress博客文章 | 此工作流程使用AI自动化WordPress博客文章的分类,简化内容组织和管理。 | 营销/内容 | [模板链接](WordPress/Auto-Categorize%20blog%20posts%20in%20wordpress%20using%20A.I..json) | +| 使用AI自动标记WordPress博客文章 | 此工作流程使用AI自动标记WordPress博客文章,改善SEO和内容可发现性。 | 营销/内容 | [模板链接](WordPress/Auto-Tag%20Blog%20Posts%20in%20WordPress%20with%20AI.json) | +| 使用AI自动化品牌语音博客创建 | 此工作流程自动化博客文章的创建,确保它们使用AI遵循特定的品牌语音。 | 营销/内容 | [模板链接](WordPress/Automate%20Blog%20Creation%20in%20Brand%20Voice%20with%20AI.json) | +| 使用DeepSeek R1为WordPress自动化内容生成器 | 此工作流程使用DeepSeek R1 AI模型为WordPress自动化内容生成,实现快速内容创建。 | 营销/内容 | [模板链接](WordPress/Automate%20Content%20Generator%20for%20WordPress%20with%20DeepSeek%20R1.json) | +| WordPress - AI聊天机器人增强用户体验 - 使用Supabase和OpenAI | 此工作流程使用Supabase和OpenAI将AI聊天机器人集成到WordPress中,通过提供智能交互来增强用户体验。 | 客户支持/营销 | [模板链接](WordPress/WordPress%20-%20AI%20Chatbot%20to%20enhance%20user%20experience%20-%20with%20Supabase%20and%20OpenAI.json) | + +### PDF和文档处理 + +| 标题 | 描述 | 部门 | 链接 | +|-------|-------------|------------|------| +| 使用AI询问PDF问题 | 此工作流程从Google Drive获取PDF,将其分割成块,使用OpenAI嵌入嵌入块,并启用与文档内容的聊天交互。 | 客户支持/知识管理 | [模板链接](PDF_and_Document_Processing/Ask%20questions%20about%20a%20PDF%20using%20AI.json) | +| 使用模板化MistralAI和Qdrant将文档分解为学习笔记 | 此工作流程在新文件上触发,使用MistralAI嵌入处理文档,并将数据存储在Qdrant向量存储中以生成学习笔记。 | 教育/知识管理 | [模板链接](PDF_and_Document_Processing/Breakdown%20Documents%20into%20Study%20Notes%20using%20Templating%20MistralAI%20and%20Qdrant.json) | +| 使用多模态视觉AI进行CV简历PDF解析 | 此工作流程将候选人简历PDF转换为图像,使用视觉语言模型评估候选人适合性,并包括绕过简历中隐藏AI提示的逻辑。 | HR | [模板链接](PDF_and_Document_Processing/CV%20Resume%20PDF%20Parsing%20with%20Multimodal%20Vision%20AI.json) | +| 使用AI与PDF文档聊天(引用来源) | 此工作流程启用与PDF文档的聊天交互,允许用户提问并接收带有文档引用来源的答案。 | 客户支持/知识管理 | [模板链接](PDF_and_Document_Processing/Chat%20with%20PDF%20docs%20using%20AI%20(quoting%20sources).json) | +| 将URL HTML转换为Markdown格式并获取页面链接 | 此工作流程将给定URL的HTML内容转换为Markdown格式并提取所有页面链接,对内容抓取和分析有用。 | 营销/内容 | [模板链接](PDF_and_Document_Processing/Convert%20URL%20HTML%20to%20Markdown%20Format%20and%20Get%20Page%20Links.json) | +| 文本处理的ETL管道 | 此工作流程实现文本处理的ETL管道,从Twitter提取数据,将其存储在MongoDB和PostgreSQL中,并根据情感分析向Slack发送警报。 | 数据分析/IT | [模板链接](PDF_and_Document_Processing/ETL%20pipeline%20for%20text%20processing.json) | +| 使用Claude和Gemini直接从PDF提取和处理信息 | 此工作流程使用Claude和Gemini等高级AI模型直接从PDF提取和处理信息,实现智能文档分析。 | 数据提取/IT | [模板链接](PDF_and_Document_Processing/Extract%20and%20process%20information%20directly%20from%20PDF%20using%20Claude%20and%20Gemini.json) | +| 从简历提取数据并使用Gotenberg创建PDF | 此工作流程使用AI从简历中提取结构化数据,将其转换为HTML,然后使用Gotenberg生成格式良好的PDF。 | HR | [模板链接](PDF_and_Document_Processing/Extract%20data%20from%20resume%20and%20create%20PDF%20with%20Gotenberg.json) | +| 从通过n8n表单上传的图像中提取车牌号 | 此工作流程使用视觉语言模型从通过n8n表单上传的图像中提取车牌号,然后显示提取的信息。 | 运营/物流 | [模板链接](PDF_and_Document_Processing/Extract%20license%20plate%20number%20from%20image%20uploaded%20via%20an%20n8n%20form.json) | +| 使用Vertex AI(Gemini)从PDF和图像提取文本到CSV | 此工作流程使用Vertex AI(Gemini)从PDF和图像中提取文本,基于文件类型路由,并将提取的数据转换为CSV格式。 | 数据提取/IT | [模板链接](PDF_and_Document_Processing/Extract%20text%20from%20PDF%20and%20image%20using%20Vertex%20AI%20(Gemini)%20into%20CSV.json) | +| 使用LlamaParse和OpenAI进行发票数据提取 | 此工作流程使用LlamaParse和OpenAI从发票中提取结构化数据,然后使用结构化输出解析器进行详细的发票数据提取。 | 财务/管理 | [模板链接](PDF_and_Document_Processing/Invoice%20data%20extraction%20with%20LlamaParse%20and%20OpenAI.json) | +| 使用AI撰写WordPress文章(从几个关键词开始) | 此工作流程使用AI基于几个关键词撰写WordPress文章,简化内容创建过程。 | 营销/内容 | [模板链接](WordPress/Write%20a%20WordPress%20post%20with%20AI%20(starting%20from%20a%20few%20keywords).json) | + +### Discord + +| 标题 | 描述 | 部门 | 链接 | +|-------|-------------|------------|------| +| Discord AI驱动机器人 | 此工作流程创建一个AI驱动的Discord机器人,对用户消息进行分类(成功故事、紧急问题、工单)并将它们路由到相应的部门(客户成功、IT、客户支持)。 | 客户支持 | [模板链接](Discord/Discord%20AI-powered%20bot.json) | +| 向Discord发送每日翻译的Calvin和Hobbes漫画 | 此工作流程自动化每日检索Calvin和Hobbes漫画,将对话翻译成英语和韩语(或其他语言),并将它们发布到Discord。 | 营销/内容 | [模板链接](Discord/Send%20daily%20translated%20Calvin%20and%20Hobbes%20Comics%20to%20Discord.json) | +| 在Discord上分享带有AI摘要的YouTube视频 | 此工作流程自动在Discord上分享新的YouTube视频以及AI生成的内容摘要,利用字幕数据。 | 营销 | [模板链接](Discord/Share%20YouTube%20Videos%20with%20AI%20Summaries%20on%20Discord.json) | + +### 数据库和存储 + +| 标题 | 描述 | 部门 | 链接 | +|-------|-------------|------------|------| +| 与PostgreSQL数据库聊天 | 此工作流程使AI助手能够与PostgreSQL数据库聊天,允许用户使用自然语言查询和检索数据。它支持自定义SQL查询和模式内省。 | 数据分析 | [模板链接](Database_and_Storage/Chat%20with%20Postgresql%20Database.json) | +| 仅从模式生成SQL查询 - AI驱动 | 此工作流程使用AI基于给定的数据库模式生成SQL查询,使与数据库交互更容易,无需手动编写查询。 | 工程 | [模板链接](Database_and_Storage/Generate%20SQL%20queries%20from%20schema%20only%20-%20AI-powered.json) | +| MongoDB AI代理 - 智能电影推荐 | 此工作流程创建一个AI代理,通过与MongoDB数据库交互提供智能电影推荐,使用聚合管道获取相关电影数据。 | 数据分析 | [模板链接](Database_and_Storage/MongoDB%20AI%20Agent%20-%20Intelligent%20Movie%20Recommendations.json) | +| Supabase插入和更新及检索 | 此工作流程演示如何对Supabase执行插入、更新和检索操作,特别是处理向量嵌入和关联元数据。 | 工程 | [模板链接](Database_and_Storage/Supabase%20Insertion%20&%20Upsertion%20&%20Retrieval.json) | +| 使用LangChain AI代理与您的SQLite数据库对话 | 此工作流程允许用户使用LangChain AI代理与SQLite数据库交互,实现自然语言查询和从数据库检索数据。 | 数据分析 | [模板链接](Database_and_Storage/Talk%20to%20your%20SQLite%20database%20with%20a%20LangChain%20AI%20Agent.json) | + +### Airtable + +| 标题 | 描述 | 部门 | 链接 | +|-------|-------------|------------|------| +| 使用Airtable和Fireflies的项目管理和会议AI代理 | 此工作流程使用AI代理通过分析来自Fireflies的通话记录来自动化项目管理任务和会议跟进。它在Airtable中创建任务并通知客户他们的任务。 | 运营 | [模板链接](Airtable/AI%20Agent%20for%20project%20management%20and%20meetings%20with%20Airtable%20and%20Fireflies.json) | +| 与Airtable聊天并分析数据的AI代理 | 此工作流程创建一个可以与Airtable聊天、分析数据并根据用户请求执行查询的AI代理。它可以处理聚合函数并生成图表/图像。 | 数据分析 | [模板链接](Airtable/AI%20Agent%20to%20chat%20with%20Airtable%20and%20analyze%20data.json) | +| 通过AI和Obsidian Notes获取Airtable数据 | 此工作流程使用AI代理从Airtable检索数据并将其与Obsidian Notes集成,允许在Obsidian内无缝访问和组织数据。 | 生产力 | [模板链接](Airtable/Get%20Airtable%20data%20via%20AI%20and%20Obsidian%20Notes.json) | +| 使用AI和n8n Forms处理工作申请提交 | 此工作流程通过使用AI从简历(PDF)中提取信息来自动化工作申请提交的处理,将其解析为结构化格式,并可能存储在Airtable中。 | HR | [模板链接](Airtable/Handling%20Job%20Application%20Submissions%20with%20AI%20and%20n8n%20Forms.json) | +| 使用OpenAi和Airtable的Hubspot聊天vAssistant | 此工作流程将OpenAI助手与HubSpot聊天和Airtable集成,以提供自动回复并管理客户交互。它获取聊天消息,用AI处理它们,并可以在Airtable中存储相关信息。 | 销售 | [模板链接](Airtable/vAssistant%20for%20Hubspot%20Chat%20using%20OpenAi%20and%20Airtable.json) | + +### Notion + +| 标题 | 描述 | 部门 | 链接 | +|-------|-------------|------------|------| +| 向Notion表格添加积极反馈消息 | 从Typeform捕获积极反馈,使用Google Cloud Natural Language分析情感,并将其添加到Notion表格中,对高分反馈发送Slack通知。 | 支持 | [模板链接](Notion/Add%20positive%20feedback%20messages%20to%20a%20table%20in%20Notion.json) | +| 使用AI分析Hugging Face论文并将其存储在Notion中 | 自动获取和分析来自Hugging Face的论文,使用AI提取关键信息,并将结构化数据存储在Notion数据库中。 | 工程 | [模板链接](Notion/Analyse%20papers%20from%20Hugging%20Face%20with%20AI%20and%20store%20them%20in%20Notion.json) | +| 使用Exa.ai、Notion和AI代理自动化竞争对手研究 | 使用Exa.ai构建竞争对手研究代理以找到类似公司。AI代理然后搜索互联网获取公司概览、产品提供和客户评论,将报告编译到Notion表格中。 | 营销 | [模板链接](Notion/Automate%20Competitor%20Research%20with%20Exa.ai,%20Notion%20and%20AI%20Agents.json) | +| 使用Notion和OpenAI自动化LinkedIn外展 | 通过从Notion数据库获取每日帖子,使用OpenAI为LinkedIn参与格式化它们,然后将它们发布到LinkedIn来自动化LinkedIn外展。 | 营销 | [模板链接](Notion/Automate%20LinkedIn%20Outreach%20with%20Notion%20and%20OpenAI.json) | +| Notion AI助手生成器 | 为特定的Notion数据库模式生成自定义AI助手聊天机器人工作流程,允许用户与他们的Notion数据聊天。 | 工程 | [模板链接](Notion/Notion%20AI%20Assistant%20Generator.json) | +| Notion知识库AI助手 | 创建一个可以搜索和从Notion知识库检索信息的AI助手,为用户查询提供答案。 | 支持 | [模板链接](Notion/Notion%20knowledge%20base%20AI%20assistant.json) | +| Notion到Pinecone向量存储集成 | 将Notion与Pinecone集成,允许Notion页面转换为向量嵌入并存储在Pinecone中以进行高级搜索和检索。 | 工程 | [模板链接](Notion/Notion%20to%20Pinecone%20Vector%20Store%20Integration.json) | +| 使用OpenAI将Notion页面作为向量文档存储到Supabase | 自动化将Notion页面作为向量文档存储在Supabase数据库中,使用OpenAI为内容生成嵌入。 | 工程 | [模板链接](Notion/Store%20Notion_s%20Pages%20as%20Vector%20Documents%20into%20Supabase%20with%20OpenAI.json) | +| 将邮件转换为Notion中的AI增强任务(多用户支持)与Gmail、Airtable和Softr | 将邮件转换为Notion中的AI增强任务,支持多用户。它与Gmail集成以触发邮件,与Airtable集成以路由,与Softr集成以提供用户界面。 | 运营 | [模板链接](Notion/Turn%20Emails%20into%20AI-Enhanced%20Tasks%20in%20Notion%20(Multi-User%20Support)%20with%20Gmail,%20Airtable%20and%20Softr.json) | +| 使用Supabase和Notion在向量存储中更新大型文档 | 通过将大型文档分割成块,生成嵌入,并将它们更新到Supabase向量存储中来管理大型文档,Notion作为文档源。 | 工程 | [模板链接](Notion/Upsert%20huge%20documents%20in%20a%20vector%20store%20with%20Supabase%20and%20Notion.json) | + +### Slack + +| 标题 | 描述 | 部门 | 链接 | +|-------|-------------|------------|------| +| 使用OpenAI、Google Sheets、Jina AI和Slack的AI驱动信息监控 | 监控RSS源,使用OpenAI和Jina AI总结文章,对它们进行分类,并向Slack发送格式化通知,实现AI驱动的信息监控。 | 营销 | [模板链接](Slack/AI-Powered%20Information%20Monitoring%20with%20OpenAI,%20Google%20Sheets,%20Jina%20AI%20and%20Slack.json) | +| 使用Google Gemini创建AI Slack机器人 | 使用Google Gemini构建AI Slack机器人,处理webhook,集成AI代理,管理记忆,并响应Slack消息。 | 工程 | [模板链接](Slack/Creating%20a%20AI%20Slack%20Bot%20with%20Google%20Gemini.json) | +| 使用Slack和Linear的客户支持渠道和工单系统 | 通过查询Slack中带有工单表情符号的消息来自动化客户支持,决定是否需要新的Linear工单,创建或更新工单,并通知Slack。 | 支持 | [模板链接](Slack/Customer%20Support%20Channel%20and%20Ticketing%20System%20with%20Slack%20and%20Linear.json) | +| 使用Qualys Slack快捷机器人增强安全运营! | 为Qualys创建Slack快捷机器人以增强安全运营,允许用户直接从Slack触发创建报告或开始漏洞扫描等操作。 | 安全 | [模板链接](Slack/Enhance%20Security%20Operations%20with%20the%20Qualys%20Slack%20Shortcut%20Bot!.json) | +| 使用OpenAI GPT-4o丰富Pipedrive的组织数据并在Slack中通知 | 通过抓取网站内容,使用OpenAI GPT-4o生成摘要,并将其作为笔记添加到Pipedrive中来丰富Pipedrive组织数据,然后通知Slack频道。 | 销售 | [模板链接](Slack/Enrich%20Pipedrive_s%20Organization%20Data%20with%20OpenAI%20GPT-4o%20&%20Notify%20it%20in%20Slack.json) | +| IT运营AI SlackBot工作流程 - 与您的知识库聊天 | 为IT运营创建AI Slackbot,使用户能够与知识库聊天以检索信息并在Slack内直接获得答案。 | IT | [模板链接](Slack/IT%20Ops%20AI%20SlackBot%20Workflow%20-%20Chat%20with%20your%20knowledge%20base.json) | +| 使用Linear和Slack对支持问题进行情感分析跟踪 | 通过与Linear和Slack集成,对Linear评论使用OpenAI进行情感分析,并通知相关Slack频道来跟踪支持问题的情感。 | 支持 | [模板链接](Slack/Sentiment%20Analysis%20Tracking%20on%20Support%20Issues%20with%20Linear%20and%20Slack.json) | +| Slack斜杠命令AI聊天机器人 | 实现可通过Slack斜杠命令访问的AI聊天机器人,处理用户命令,与AI模型交互,并在Slack内响应。 | IT | [模板链接](Slack/Slack%20slash%20commands%20AI%20Chat%20Bot.json) | +| Venafi Cloud Slack证书机器人 | 提供与Venafi Cloud交互以进行证书管理的Slack机器人,允许用户检查证书状态、接收警报或通过Slack请求证书操作。 | 安全 | [模板链接](Slack/Venafi%20Cloud%20Slack%20Cert%20Bot.json) | + +### OpenAI和LLMs + +| 标题 | 描述 | 部门 | 链接 | +|---|---|---|---| +| 高级AI演示(在AI开发者#14聚会上展示) | 高级AI功能演示。 | AI/开发 | [模板链接](OpenAI_and_LLMs/Advanced%20AI%20Demo%20(Presented%20at%20AI%20Developers%20%2314%20meetup).json) | +| AI代理聊天 | 基本AI聊天代理。 | AI/客户服务 | [模板链接](OpenAI_and_LLMs/AI%20agent%20chat.json) | +| 可以抓取网页的AI代理 | 用于网页抓取的AI代理。 | AI/数据提取 | [模板链接](OpenAI_and_LLMs/AI%20agent%20that%20can%20scrape%20webpages.json) | +| AI Crew自动化基本面股票分析 - 问答工作流程 | 股票分析自动化。 | 财务/AI/数据分析 | [模板链接](OpenAI_and_LLMs/AI%20Crew%20to%20Automate%20Fundamental%20Stock%20Analysis%20-%20Q&A%20Workflow.json) | +| AI客户反馈情感分析 | 客户反馈的情感分析。 | 客户服务/营销/数据分析 | [模板链接](OpenAI_and_LLMs/AI%20Customer%20feedback%20sentiment%20analysis.json) | +| 使用动态提示和Airtable的AI数据提取 | 带有Airtable集成的AI驱动数据提取。 | AI/数据提取/数据库 | [模板链接](OpenAI_and_LLMs/AI%20Data%20Extraction%20with%20Dynamic%20Prompts%20and%20Airtable.json) | +| 使用动态提示和Baserow的AI数据提取 | 带有Baserow集成的AI驱动数据提取。 | AI/数据提取/数据库 | [模板链接](OpenAI_and_LLMs/AI%20Data%20Extraction%20with%20Dynamic%20Prompts%20and%20Baserow.json) | +| 使用ERPNext和n8n的AI驱动潜在客户管理和查询自动化 | 潜在客户管理自动化。 | 销售/CRM/AI | [模板链接](OpenAI_and_LLMs/AI-Driven%20Lead%20Management%20and%20Inquiry%20Automation%20with%20ERPNext%20&%20n8n.json) | +| AI健身教练Strava数据分析和个人化训练洞察 | 通过Strava数据分析进行健身指导。 | 健身/AI/数据分析 | [模板链接](OpenAI_and_LLMs/AI%20Fitness%20Coach%20Strava%20Data%20Analysis%20and%20Personalized%20Training%20Insights.json) | +| ERPNext的AI驱动候选人筛选自动化 | 候选人筛选自动化。 | HR/AI/招聘 | [模板链接](OpenAI_and_LLMs/AI-Powered%20Candidate%20Shortlisting%20Automation%20for%20ERPNext.json) | +| 商业AI驱动邮件自动化:使用RAG总结和回复 | 带有总结和回复的邮件自动化。 | 商业自动化/AI/沟通 | [模板链接](OpenAI_and_LLMs/AI-Powered%20Email%20Automation%20for%20Business_%20Summarize%20&%20Respond%20with%20RAG.json) | +| 股票收益报告分析的AI驱动RAG工作流程 | 使用RAG进行股票收益报告分析。 | 财务/AI/数据分析 | [模板链接](OpenAI_and_LLMs/AI-Powered%20RAG%20Workflow%20For%20Stock%20Earnings%20Report%20Analysis.json) | +| AI驱动社交媒体放大器 | 使用AI放大社交媒体存在。 | 营销/AI/社交媒体 | [模板链接](OpenAI_and_LLMs/AI-Powered%20Social%20Media%20Amplifier.json) | +| AI驱动的WooCommerce支持代理 | 为WooCommerce商店创建AI驱动的支持代理。 | 电子商务/AI/客户服务 | [模板链接](OpenAI_and_LLMs/AI-powered%20WooCommerce%20Support-Agent.json) | +| AI驱动的YouTube视频总结和分析 | 使用AI总结和分析YouTube视频。 | 内容创建/AI/数据分析 | [模板链接](OpenAI_and_LLMs/%E2%9A%A1AI-Powered%20YouTube%20Video%20Summarization%20&%20Analysis.json) | +| AI:询问任何数据源的问题(使用n8n工作流程检索器) | 允许用户使用n8n工作流程检索器询问各种数据源的问题。 | AI/数据分析/工作流程自动化 | [模板链接](OpenAI_and_LLMs/AI_%20Ask%20questions%20about%20any%20data%20source%20(using%20the%20n8n%20workflow%20retriever).json) | +| AI:总结播客集并使用维基百科增强 | 使用AI总结播客集并使用维基百科的信息增强摘要。 | 内容创建/AI/数据分析 | [模板链接](OpenAI_and_LLMs/AI_%20Summarize%20podcast%20episode%20and%20enhance%20using%20Wikipedia.json) | + +### WhatsApp + +| 标题 | 描述 | 部门 | 链接 | +|---|---|---|---| +| 使用AI和APIFY自动化销售会议准备并发送到WhatsApp | 此工作流程使用AI和Apify自动化销售会议准备,将相关信息发送到WhatsApp。 | 销售/AI/自动化 | [模板链接](./WhatsApp/Automate%20Sales%20Meeting%20Prep%20with%20AI%20&%20APIFY%20Sent%20To%20WhatsApp.json) | +| 构建您的第一个WhatsApp聊天机器人 | 此工作流程指导您构建第一个WhatsApp聊天机器人。 | 客户服务/开发 | [模板链接](./WhatsApp/Building%20Your%20First%20WhatsApp%20Chatbot.json) | +| 使用OpenAI构建完整的商业WhatsApp AI驱动RAG聊天机器人 | 此工作流程使用OpenAI构建完整的商业WhatsApp AI驱动RAG聊天机器人。 | 客户服务/AI/开发 | [模板链接](./WhatsApp/Complete%20business%20WhatsApp%20AI-Powered%20RAG%20Chatbot%20using%20OpenAI.json) | +| 像专业人士一样使用AI回复WhatsApp消息! | 此工作流程实现专业的AI驱动WhatsApp消息回复。 | 客户服务/AI/沟通 | [模板链接](./WhatsApp/Respond%20to%20WhatsApp%20Messages%20with%20AI%20Like%20a%20Pro!.json) | + +### Instagram、Twitter、社交媒体 + +| 标题 | 描述 | 部门 | 链接 | +|-------|-------------|------------|------| +| Instagram DM收件箱的AI代理。Manychat + Open AI集成 | 集成Manychat与OpenAI以创建管理Instagram直接消息的AI代理。 | 营销/客户服务/AI | [模板链接](Instagram_Twitter_Social_Media/AI%20agent%20for%20Instagram%20DM_inbox.%20Manychat%20%2B%20Open%20AI%20integration.json) | +| 创建动态Twitter个人资料横幅 | 自动化动态Twitter个人资料横幅的创建。 | 营销/社交媒体 | [模板链接](Instagram_Twitter_Social_Media/Create%20dynamic%20Twitter%20profile%20banner.json) | +| 使用AI图像生成从热门趋势生成Instagram内容 | 通过分析热门趋势并使用AI生成相关图像来创建Instagram内容。 | 营销/AI/内容 | [模板链接](Instagram_Twitter_Social_Media/Generate%20Instagram%20Content%20from%20Top%20Trends%20with%20AI%20Image%20Generation.json) | +| OpenAI驱动的推文生成器 | 使用OpenAI的语言模型生成推文。 | 营销/社交媒体/AI | [模板链接](Instagram_Twitter_Social_Media/OpenAI-powered%20tweet%20generator.json) | +| 将新YouTube视频发布到X | 自动将新YouTube视频发布到X(前Twitter)。 | 营销/社交媒体 | [模板链接](Instagram_Twitter_Social_Media/Post%20New%20YouTube%20Videos%20to%20X.json) | +| Reddit AI摘要 | 创建Reddit内容的AI生成摘要。 | 营销/内容/AI | [模板链接](Instagram_Twitter_Social_Media/Reddit%20AI%20digest.json) | +| 社交媒体分析和自动邮件生成 | 分析社交媒体数据并生成自动邮件报告。 | 营销/分析 | [模板链接](Instagram_Twitter_Social_Media/Social%20Media%20Analysis%20and%20Automated%20Email%20Generation.json) | +| 使用BannerBear.com加速社交媒体横幅 | 使用BannerBear.com自动化社交媒体横幅的创建。 | 营销/设计 | [模板链接](Instagram_Twitter_Social_Media/Speed%20Up%20Social%20Media%20Banners%20With%20BannerBear.com.json) | +| Twitter虚拟AI影响者 | 管理虚拟AI影响者的Twitter账户。 | 营销/AI | [模板链接](Instagram_Twitter_Social_Media/Twitter%20Virtual%20AI%20Influencer.json) | +| 使用HTTP请求更新Twitter横幅 | 使用HTTP请求更新Twitter横幅。 | 营销/开发 | [模板链接](Instagram_Twitter_Social_Media/Update%20Twitter%20banner%20using%20HTTP%20request.json) | + +### 其他集成和用例 + +| 标题 | 描述 | 部门 | 链接 | +|-------|-------------|------------|------| +| API模式提取器 | 从Web服务提取API模式以用于文档或集成目的。 | 开发/集成 | [模板链接](Other_Integrations_and_Use_Cases/API%20Schema%20Extractor.json) | +| 分析反馈并在Mattermost上发送消息 | 分析用户反馈并向Mattermost频道发送通知。 | 支持/沟通 | [模板链接](Other_Integrations_and_Use_Cases/Analyze%20feedback%20and%20send%20a%20message%20on%20Mattermost.json) | +| 使用AWS Comprehend分析反馈 | 使用AWS Comprehend对反馈进行情感分析并将结果发送到Mattermost。 | 支持/AI | [模板链接](Other_Integrations_and_Use_Cases/Analyze%20feedback%20using%20AWS%20Comprehend%20and%20send%20it%20to%20a%20Mattermost%20channel.json) | +| 自动化Pinterest分析和AI驱动内容建议 | 分析Pinterest数据并提供AI驱动的内容建议。 | 营销/AI | [模板链接](Other_Integrations_and_Use_Cases/Automate%20Pinterest%20Analysis%20%26%20AI-Powered%20Content%20Suggestions%20With%20Pinterest%20API.json) | +| 自动化SIEM警报丰富 | 使用MITRE ATT&CK数据丰富SIEM警报并与Zendesk集成。 | 安全/IT | [模板链接](Other_Integrations_and_Use_Cases/Automate%20SIEM%20Alert%20Enrichment%20with%20MITRE%20ATT%26CK,%20Qdrant%20%26%20Zendesk%20in%20n8n.json) | +| 使用URLbox自动化截图并用AI分析 | 截取网页截图并使用AI分析它们。 | 开发/营销 | [模板链接](Other_Integrations_and_Use_Cases/Automate%20Screenshots%20with%20URLbox%20%26%20Analyze%20them%20with%20AI.json) | +| 在Strapi中自动化推荐 | 自动化在Strapi中收集和管理推荐的过程。 | 营销/内容 | [模板链接](Other_Integrations_and_Use_Cases/Automate%20testimonials%20in%20Strapi%20with%20n8n.json) | +| Bitrix24聊天机器人应用程序 | 使用webhook集成创建Bitrix24聊天机器人的示例工作流程。 | 商业/沟通 | [模板链接](Other_Integrations_and_Use_Cases/Bitrix24%20Chatbot%20Application%20Workflow%20example%20with%20Webhook%20Integration.json) | +| Gitlab MR中的ChatGPT自动代码审查 | 使用ChatGPT自动化GitLab合并请求中的代码审查。 | 开发/DevOps | [模板链接](Other_Integrations_and_Use_Cases/ChatGPT%20Automatic%20Code%20Review%20in%20Gitlab%20MR.json) | +| 使用OpenAI的GPT-4在Linear中分类新错误 | 使用AI自动分类和路由Linear中的新错误报告。 | 开发/QA | [模板链接](Other_Integrations_and_Use_Cases/Classify%20new%20bugs%20in%20Linear%20with%20OpenAI_s%20GPT-4%20and%20move%20them%20to%20the%20right%20team.json) | +| 在Humantic AI中创建、更新和获取个人资料 | 在Humantic AI平台中管理用户个人资料。 | 营销/AI | [模板链接](Other_Integrations_and_Use_Cases/Create,%20update,%20and%20get%20a%20profile%20in%20Humantic%20AI.json) | +| 使用Twilio和Redis增强客户聊天 | 使用Twilio和Redis为客户聊天实现消息缓冲。 | 支持/开发 | [模板链接](Other_Integrations_and_Use_Cases/Enhance%20Customer%20Chat%20by%20Buffering%20Messages%20with%20Twilio%20and%20Redis.json) | +| Hacker News回顾机器 | 显示往年这一天在Hacker News上流行的内容。 | 开发/社区 | [模板链接](Other_Integrations_and_Use_Cases/Hacker%20News%20Throwback%20Machine%20-%20See%20What%20Was%20Hot%20on%20This%20Day,%20Every%20Year!.json) | +| 使用Twilio、Cal.com和AI处理预约潜在客户 | 使用Twilio和Cal.com管理预约安排和跟进。 | 销售/支持 | [模板链接](Other_Integrations_and_Use_Cases/Handling%20Appointment%20Leads%20and%20Follow-up%20With%20Twilio,%20Cal.com%20and%20AI.json) | +| 将AI与Open-Meteo API集成 | 通过AI分析增强天气预报。 | 数据科学/天气 | [模板链接](Other_Integrations_and_Use_Cases/Integrating%20AI%20with%20Open-Meteo%20API%20for%20Enhanced%20Weather%20Forecasting.json) | +| HTTP工具介绍 | n8n中使用HTTP工具的基本教程。 | 开发 | [模板链接](Other_Integrations_and_Use_Cases/Introduction%20to%20the%20HTTP%20Tool.json) | +| KB工具 - Confluence知识库 | 与Confluence集成以进行知识库管理。 | 文档/IT | [模板链接](Other_Integrations_and_Use_Cases/KB%20Tool%20-%20Confluence%20Knowledge%20Base.json) | +| 带有Google日历和Gmail的LINE助手 | 创建一个与Google日历和Gmail集成的LINE助手。 | 生产力/沟通 | [模板链接](Other_Integrations_and_Use_Cases/LINE%20Assistant%20with%20Google%20Calendar%20and%20Gmail%20Integration.json) | +| 每月Spotify曲目归档 | 将每月Spotify曲目归档并分类到播放列表中。 | 个人/音乐 | [模板链接](Other_Integrations_and_Use_Cases/Monthly%20Spotify%20Track%20Archiving%20and%20Playlist%20Classification.json) | +| Obsidian笔记朗读 | 将Obsidian笔记转换为音频格式作为播客源。 | 生产力/内容 | [模板链接](Other_Integrations_and_Use_Cases/Obsidian%20Notes%20Read%20Aloud%20using%20AI_%20Available%20as%20a%20Podcast%20Feed.json) | +| 优化和更新Printify标题和描述 | 自动化Printify产品标题和描述的优化。 | 电子商务 | [模板链接](Other_Integrations_and_Use_Cases/Optimize%20%26%20Update%20Printify%20Title%20and%20Description%20Workflow.json) | +| 使用AI筛选来自Pipedrive人员的回复 | 使用AI筛选和分类来自Pipedrive联系人的回复。 | 销售/AI | [模板链接](Other_Integrations_and_Use_Cases/Qualify%20replies%20from%20Pipedrive%20persons%20with%20AI.json) | +| 使用Apple快捷方式的Siri AI代理 | 使用Apple快捷方式创建Siri驱动的AI代理。 | 个人/生产力 | [模板链接](Other_Integrations_and_Use_Cases/Siri%20AI%20Agent_%20Apple%20Shortcuts%20powered%20voice%20template.json) | +| 使用Apple快捷方式的文本自动化 | 使用Apple快捷方式实现基于文本的自动化。 | 个人/生产力 | [模板链接](Other_Integrations_and_Use_Cases/Text%20automations%20using%20Apple%20Shortcuts.json) | +| UTM链接创建器和二维码生成器 | 创建UTM链接,生成二维码,并安排Google Analytics报告。 | 营销/分析 | [模板链接](Other_Integrations_and_Use_Cases/UTM%20Link%20Creator%20%26%20QR%20Code%20Generator%20with%20Scheduled%20Google%20Analytics%20Reports.json) | +| 使用AI组织您的Todoist收件箱 | 使用AI自动组织Todoist中的任务。 | 生产力 | [模板链接](Other_Integrations_and_Use_Cases/Use%20AI%20to%20organize%20your%20Todoist%20Inbox.json) | +| 在n8n中使用外部工作流程作为工具 | 演示如何在n8n中使用外部工作流程作为工具。 | 开发 | [模板链接](Other_Integrations_and_Use_Cases/Using%20External%20Workflows%20as%20Tools%20in%20n8n.json) | +| 使用OpenAI和Quickchart.io可视化SQL代理查询 | 使用OpenAI和Quickchart.io从SQL查询创建可视化。 | 数据分析/可视化 | [模板链接](Other_Integrations_and_Use_Cases/Visualize%20your%20SQL%20Agent%20queries%20with%20OpenAI%20and%20Quickchart.io.json) | +| Zoom AI会议助手 | 从Zoom会议创建会议摘要、ClickUp任务并安排跟进。 | 生产力/沟通 | [模板链接](Other_Integrations_and_Use_Cases/Zoom%20AI%20Meeting%20Assistant%20creates%20mail%20summary,%20ClickUp%20tasks%20and%20follow-up%20call.json) | + +### 表单和调查 + +| 标题 | 描述 | 部门 | 链接 | +|-------|-------------|------------|------| +| 使用AI代理和n8n表单进行对话式访谈 | 使用n8n表单实现AI驱动的对话式访谈以进行交互式数据收集。 | 研究/营销 | [模板链接](Forms_and_Surveys/Conversational%20Interviews%20with%20AI%20Agents%20and%20n8n%20Forms.json) | +| 使用n8n表单、Airtable和AI的邮件订阅服务 | 使用n8n表单管理邮件订阅,在Airtable中存储数据,并使用AI进行处理。 | 营销/沟通 | [模板链接](Forms_and_Surveys/Email%20Subscription%20Service%20with%20n8n%20Forms,%20Airtable%20and%20AI.json) | +| 使用AI和n8n表单筛选预约请求 | 使用AI筛选和处理通过n8n表单提交的预约请求。 | 销售/支持 | [模板链接](Forms_and_Surveys/Qualifying%20Appointment%20Requests%20with%20AI%20&%20n8n%20Forms.json) | + +### AI研究、RAG和数据分析 + +| 工作流程标题 | 描述 | 部门 | 模板链接 | +|---|---|---|---| +| 使用Chrome扩展、N8N和OpenAI分析tradingview.com图表 | 使用Chrome扩展、n8n和OpenAI分析TradingView图表以获取自动化洞察。 | 数据分析 | [分析tradingview.com图表.txt](./AI_Research_RAG_and_Data_Analysis/Analyze%20tradingview.com%20charts%20with%20Chrome%20extension,%20N8N%20and%20OpenAI.json) | +| 自动化Hugging Face论文摘要获取和分类工作流程 | 自动化获取、总结和分类来自Hugging Face的研究论文。 | AI研究 | [自动化Hugging Face论文摘要获取和分类工作流程.txt](./AI_Research_RAG_and_Data_Analysis/Automated%20Hugging%20Face%20Paper%20Summary%20Fetching%20%26%20Categorization%20Workflow.json) | +| 自主AI爬虫 | 用于数据收集和分析的自主AI驱动网络爬虫。 | AI研究 | [自主AI爬虫.txt](./AI_Research_RAG_and_Data_Analysis/Autonomous%20AI%20crawler.json) | +| 使用AI对象检测、CDN和ElasticSearch构建您自己的图像搜索 | 使用AI对象检测、CDN和Elasticsearch构建图像搜索引擎以进行高效图像检索。 | AI研究 | [构建您自己的图像搜索.txt](./AI_Research_RAG_and_Data_Analysis/Build%20Your%20Own%20Image%20Search%20Using%20AI%20Object%20Detection,%20CDN%20and%20ElasticSearchBuild%20Your%20Own%20Image%20Search%20Using%20AI%20Object%20Detection,%20CDN%20and%20ElasticSearch.json) | +| 使用Qdrant和Mistral.ai构建财务文档助手 | 使用Qdrant进行向量搜索和Mistral.ai进行语言处理创建财务文档分析的AI助手。 | 财务,AI研究 | [构建财务文档助手.txt](./AI_Research_RAG_and_Data_Analysis/Build%20a%20Financial%20Documents%20Assistant%20using%20Qdrant%20and%20Mistral.ai.json) | +| 使用Qdrant、Mistral.ai和OpenAI构建税务代码助手 | 使用Qdrant、Mistral.ai和OpenAI开发税务代码查询的AI助手以提供全面回复。 | 财务,AI研究 | [构建税务代码助手.txt](./AI_Research_RAG_and_Data_Analysis/Build%20a%20Tax%20Code%20Assistant%20with%20Qdrant,%20Mistral.ai%20and%20OpenAI.json) | +| 使用Qdrant和Open AI构建电影推荐RAG聊天机器人 | 构建基于RAG的电影推荐聊天机器人,利用Qdrant进行检索和OpenAI进行生成。 | AI研究,娱乐 | [构建电影推荐RAG聊天机器人.txt](./AI_Research_RAG_and_Data_Analysis/Building%20RAG%20Chatbot%20for%20Movie%20Recommendations%20with%20Qdrant%20and%20Open%20AI.json) | +| 与GitHub API文档聊天:使用Pinecone和OpenAI的RAG驱动聊天机器人 | 实现使用Pinecone和OpenAI与GitHub API文档交互的RAG驱动聊天机器人。 | 开发,AI研究 | [与GitHub API文档聊天.txt](./AI_Research_RAG_and_Data_Analysis/Chat%20with%20GitHub%20API%20Documentation_%20RAG-Powered%20Chatbot%20with%20Pinecone%20%26%20OpenAI.json) | +| 使用AI创建Google Analytics数据报告并发送到邮件和Telegram | 使用AI生成Google Analytics数据报告并通过邮件和Telegram发送。 | 数据分析,营销 | [创建Google Analytics数据报告.txt](./AI_Research_RAG_and_Data_Analysis/Create%20a%20Google%20Analytics%20Data%20Report%20with%20AI%20and%20sent%20it%20to%20E-Mail%20and%20Telegram.json) | +| 使用Qdrant、Python和信息提取器的客户洞察 | 使用Qdrant、Python和信息提取模块提取客户洞察。 | 数据分析,客户服务 | [客户洞察.txt](./AI_Research_RAG_and_Data_Analysis/Customer%20Insights%20with%20Qdrant,%20Python%20and%20Information%20Extractor.json) | +| 使用AI去重抓取AI资助资格 | 使用AI自动化抓取的AI资助数据的去重和资格评估。 | AI研究,数据管理 | [去重抓取AI资助资格.txt](./AI_Research_RAG_and_Data_Analysis/Deduplicate%20Scraping%20AI%20Grants%20for%20Eligibility%20using%20AI.json) | +| 使用图像识别和AI代理丰富物业库存调查 | 使用图像识别和AI代理增强物业库存调查以进行自动数据丰富。 | 房地产,AI研究 | [丰富物业库存调查.txt](./AI_Research_RAG_and_Data_Analysis/Enrich%20Property%20Inventory%20Survey%20with%20Image%20Recognition%20and%20AI%20Agent.json) | +| 通过AI代理聊天提取洞察并分析YouTube评论 | 通过AI代理聊天界面提取洞察并分析YouTube评论。 | 社交媒体,数据分析 | [提取洞察并分析YouTube评论.txt](./AI_Research_RAG_and_Data_Analysis/Extract%20insights%20%26%20analyse%20YouTube%20comments%20via%20AI%20Agent%20chat.json) | +| 使用AI生成SEO种子关键词 | 使用AI生成SEO种子关键词以优化搜索引擎内容。 | 营销,AI研究 | [生成SEO种子关键词.txt](./AI_Research_RAG_and_Data_Analysis/Generate%20SEO%20Seed%20Keywords%20Using%20AI.json) | +| Hacker News工作列表抓取器和解析器 | 为求职者或招聘人员抓取和解析Hacker News的工作列表。 | 数据收集,HR | [Hacker News工作列表抓取器.txt](./AI_Research_RAG_and_Data_Analysis/Hacker%20News%20Job%20Listing%20Scraper%20and%20Parser.json) | +| Hacker News到视频内容 | 自动将Hacker News文章转换为视频内容。 | 内容创建,媒体 | [Hacker News到视频内容.txt](./AI_Research_RAG_and_Data_Analysis/Hacker%20News%20to%20Video%20Content.json) | +| 使用n8n、Apify和OpenAI o3托管您自己的AI深度研究代理 | 使用n8n、Apify和OpenAI设置自主托管的AI深度研究代理。 | AI研究,自动化 | [托管您自己的AI深度研究代理.txt](./AI_Research_RAG_and_Data_Analysis/Host%20Your%20Own%20AI%20Deep%20Research%20Agent%20with%20n8n,%20Apify%20and%20OpenAI%20o3.json) | +| 使用Brave和Google Gemini的智能网络查询和语义重新排序流程 | 使用Brave浏览器和Google Gemini AI执行智能网络查询和语义重新排序。 | AI研究,数据分析 | [智能网络查询和语义重新排序流程.txt](./AI_Research_RAG_and_Data_Analysis/Intelligent%20Web%20Query%20and%20Semantic%20Re-Ranking%20Flow%20using%20Brave%20and%20Google%20Gemini.json) | +| 从HN学习任何东西 - 从Hacker News获取顶级资源推荐 | 从Hacker News提取顶级资源推荐以促进任何主题的学习。 | 教育,数据分析 | [从HN学习任何东西.txt](./AI_Research_RAG_and_Data_Analysis/Learn%20Anything%20from%20HN%20-%20Get%20Top%20Resource%20Recommendations%20from%20Hacker%20News.json) | +| 为文件检索RAG制作OpenAI引用 | 使用OpenAI为RAG系统中的文件检索生成引用。 | AI研究,文档 | [制作OpenAI引用.txt](./AI_Research_RAG_and_Data_Analysis/Make%20OpenAI%20Citation%20for%20File%20Retrieval%20RAG.json) | +| 开放深度研究 - AI驱动的自主研究工作流程 | 用于进行深度研究的AI驱动自主工作流程。 | AI研究,自动化 | [开放深度研究.txt](./AI_Research_RAG_and_Data_Analysis/Open%20Deep%20Research%20-%20AI-Powered%20Autonomous%20Research%20Workflow.json) | +| 从您的n8n工作流程查询Perplexity AI | 将Perplexity AI集成到n8n工作流程中以进行高级查询功能。 | AI研究,自动化 | [查询Perplexity AI.txt](./AI_Research_RAG_and_Data_Analysis/Query%20Perplexity%20AI%20from%20your%20n8n%20workflows.json) | +| 使用Qdrant和Mistral的食谱推荐 | 使用Qdrant进行向量搜索和Mistral AI进行内容生成提供食谱推荐。 | 食品,AI研究 | [食谱推荐.txt](./AI_Research_RAG_and_Data_Analysis/Recipe%20Recommendations%20with%20Qdrant%20and%20Mistral.json) | +| 使用本地Excel电子表格和OpenAI对租金支付进行对账 | 通过比较本地Excel电子表格与OpenAI处理的数据来对租金支付进行对账。 | 财务,数据管理 | [对账租金支付.txt](./AI_Research_RAG_and_Data_Analysis/Reconcile%20Rent%20Payments%20with%20Local%20Excel%20Spreadsheet%20and%20OpenAI.json) | +| 使用DeepSeek抓取Trustpilot评论,使用OpenAI分析情感 | 使用DeepSeek抓取Trustpilot评论并使用OpenAI分析情感。 | 营销,数据分析 | [抓取Trustpilot评论.txt](./AI_Research_RAG_and_Data_Analysis/Scrape%20Trustpilot%20Reviews%20with%20DeepSeek,%20Analyze%20Sentiment%20with%20OpenAI.json) | +| 使用AI抓取和总结没有RSS源的新闻网站帖子并保存到NocoDB | 使用AI抓取和总结没有RSS源的新闻帖子,将输出保存到NocoDB。 | 内容策划,数据管理 | [抓取和总结新闻帖子.txt](./AI_Research_RAG_and_Data_Analysis/Scrape%20and%20summarize%20posts%20of%20a%20news%20site%20without%20RSS%20feed%20using%20AI%20and%20save%20them%20to%20a%20NocoDB.json) | +| 使用AI抓取和总结网页 | 使用AI抓取和总结网页内容。 | 内容策划,数据分析 | [抓取和总结网页.txt](./AI_Research_RAG_and_Data_Analysis/Scrape%20and%20summarize%20webpages%20with%20AI.json) | +| 将Google Analytics数据发送给AI进行分析,然后将结果保存在Baserow中 | 将Google Analytics数据发送给AI进行分析,并将结果保存在Baserow中。 | 数据分析,营销 | [发送Google Analytics数据.txt](./AI_Research_RAG_and_Data_Analysis/Send%20Google%20analytics%20data%20to%20A.I.%20to%20analyze%20then%20save%20results%20in%20Baserow.json) | +| 使用AI发现工作场所歧视模式 | 使用AI驱动分析识别工作场所歧视模式。 | HR,AI研究 | [发现工作场所歧视模式.txt](./AI_Research_RAG_and_Data_Analysis/Spot%20Workplace%20Discrimination%20Patterns%20with%20AI.json) | +| 使用AI总结SERPBear数据(通过Openrouter)并保存到Baserow | 使用AI(通过Openrouter)总结SERPBear数据并将洞察保存到Baserow。 | SEO,数据分析 | [总结SERPBear数据.txt](./AI_Research_RAG_and_Data_Analysis/Summarize%20SERPBear%20data%20with%20AI%20(via%20Openrouter)%20and%20save%20it%20to%20Baserow.json) | +| 使用AI总结Umami数据(通过Openrouter)并保存到Baserow | 使用AI(通过Openrouter)总结Umami分析数据并将洞察保存到Baserow。 | 数据分析,营销 | [总结Umami数据.txt](./AI_Research_RAG_and_Data_Analysis/Summarize%20Umami%20data%20with%20AI%20(via%20Openrouter)%20and%20save%20it%20to%20Baserow.json) | +| 使用Qdrant、Python和信息提取器的调查洞察 | 使用Qdrant、Python和信息提取器从调查数据中提取和分析洞察。 | 数据分析,市场研究 | [调查洞察.txt](./AI_Research_RAG_and_Data_Analysis/Survey%20Insights%20with%20Qdrant,%20Python%20and%20Information%20Extractor.json) | +| n8n的终极抓取工作流程 | 用于从各种源提取数据的n8n综合抓取工作流程。 | 数据收集,自动化 | [终极抓取工作流程.txt](./AI_Research_RAG_and_Data_Analysis/Ultimate%20Scraper%20Workflow%20for%20n8n.json) | +| 向量数据库作为AI代理的大数据分析工具[1/3异常][1/2 KNN] | 利用向量数据库进行大数据分析,专注于AI代理的异常检测和KNN分类。 | AI研究,数据分析 | [向量数据库作为大数据分析工具1.txt](./AI_Research_RAG_and_Data_Analysis/Vector%20Database%20as%20a%20Big%20Data%20Analysis%20Tool%20for%20AI%20Agents%20[1_3%20anomaly][1_2%20KNN].json) | +| 向量数据库作为AI代理的大数据分析工具[2/2 KNN] | 继续使用向量数据库进行大数据分析,专注于AI代理的KNN分类。 | AI研究,数据分析 | [向量数据库作为大数据分析工具2.txt](./AI_Research_RAG_and_Data_Analysis/Vector%20Database%20as%20a%20Big%20Data%20Analysis%20Tool%20for%20AI%20Agents%20[2_2%20KNN].json) | +| 向量数据库作为AI代理的大数据分析工具[2/3 - 异常] | 探索使用向量数据库进行大数据分析,专注于AI代理的异常检测。 | AI研究,数据分析 | [向量数据库作为大数据分析工具3.txt](./AI_Research_RAG_and_Data_Analysis/Vector%20Database%20as%20a%20Big%20Data%20Analysis%20Tool%20for%20AI%20Agents%20[2_3%20-%20anomaly].json) | +| 向量数据库作为AI代理的大数据分析工具[3/3 - 异常] | 总结使用向量数据库进行大数据分析,专注于AI代理的异常检测。 | AI研究,数据分析 | [向量数据库作为大数据分析工具4.txt](./AI_Research_RAG_and_Data_Analysis/Vector%20Database%20as%20a%20Big%20Data%20Analysis%20Tool%20for%20AI%20Agents%20[3_3%20-%20anomaly].json) | +| 使用Apify和AI视觉模型进行视觉回归测试 | 使用Apify和AI视觉模型执行视觉回归测试以检测UI变化。 | QA,AI研究 | [视觉回归测试.txt](./AI_Research_RAG_and_Data_Analysis/Visual%20Regression%20Testing%20with%20Apify%20and%20AI%20Vision%20Model.json) | +| 🔍 Perplexity研究到HTML:AI驱动的内容创建 | 将Perplexity AI研究转换为HTML内容以进行AI驱动的内容创建。 | 内容创建,AI研究 | [🔍 Perplexity研究到HTML.txt](./AI_Research_RAG_and_Data_Analysis/%F0%9F%94%8D%20Perplexity%20Research%20to%20HTML_%20AI-Powered%20Content%20Creation.json) | + + +### **其他** + +- ALL_unique_nodes.txt (节点参考) + +... 还有更多! + +--- + +如果您想贡献其他模板或建议新类别,请随时提出issue或pull request! + +--- + +👉 [加入n8n,立即开始自动化!💎](https://n8n.partnerlinks.io/h1pwwf5m4toe) + + n8n + + +--- + +### **赞助商** +- [mahezsh](https://github.com/mahezsh) diff --git a/README.md b/README.md index 0f93ac4..c7ee012 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,8 @@ # n8n_automations + +[![English](https://img.shields.io/badge/English-Click-yellow)](README.md) +[![中文文档](https://img.shields.io/badge/中文文档-点击查看-orange)](README-zh.md) + This repository contains a collection of n8n automation templates sourced from the internet. These templates are designed to help automate a wide range of tasks and workflows using [n8n](https://n8n.partnerlinks.io/h1pwwf5m4toe), making it easier for users to discover and use ready-made automations for various platforms and services. 👉 [Join n8n and start automating now! 💎](https://n8n.partnerlinks.io/h1pwwf5m4toe) @@ -40,208 +44,281 @@ Looking to extract and merge entire Twitter (X) threads quickly and affordably? Check out my [n8n Twitter Thread Fetcher workflow](https://n8n.io/workflows/4088-extract-and-merge-twitter-x-threads-using-twitterapiio/)-it’s lightning-fast, cost-effective, and perfect for automating Twitter thread extraction for research, content curation, or archiving! --- +### Gmail & Email Automation -### **Gmail & Email Automation** +| Title | Description | Department | Link | +|-------|-------------|------------|------| +| Auto-label incoming Gmail messages with AI nodes | Automatically labels incoming Gmail messages using AI. The workflow retrieves message content, suggests labels like Partnership or Inquiry, and assigns them for better organization. | Ops | [Link to Template](Gmail_and_Email_Automation/Auto-label%20incoming%20Gmail%20messages%20with%20AI%20nodes.json) | +| Basic Automatic Gmail Email Labelling with OpenAI and Gmail API | Uses OpenAI and Gmail API to trigger on new emails, analyze content, and assign or create labels automatically. Helps categorize emails efficiently using AI. | Ops | [Link to Template](Gmail_and_Email_Automation/Basic%20Automatic%20Gmail%20Email%20Labelling%20with%20OpenAI%20and%20Gmail%20API.json) | +| Compose reply draft in Gmail with OpenAI Assistant | Generates draft replies in Gmail using OpenAI. Triggers on new emails, extracts content, and creates a suggested reply draft to streamline responses. | Executive | [Link to Template](Gmail_and_Email_Automation/Compose%20reply%20draft%20in%20Gmail%20with%20OpenAI%20Assistant.json) | +| Analyze & Sort Suspicious Email Contents with ChatGPT | Analyzes suspicious emails using ChatGPT, classifies them, and can generate screenshots for review. Helps identify and sort potentially dangerous emails. | Security | [Link to Template](Gmail_and_Email_Automation/Analyze%20&%20Sort%20Suspicious%20Email%20Contents%20with%20ChatGPT.json) | +| Analyze Suspicious Email Contents with ChatGPT Vision | Uses both text and image analysis (ChatGPT Vision) to evaluate suspicious emails. Extracts screenshots, analyzes headers and content, and flags phishing attempts. | Security | [Link to Template](Gmail_and_Email_Automation/Analyze%20Suspicious%20Email%20Contents%20with%20ChatGPT%20Vision.json) | +| A Very Simple "Human in the Loop" Email Response System Using AI and IMAP | Implements a simple workflow for human-in-the-loop email responses. Uses IMAP to fetch emails, summarizes content with AI, and drafts professional replies for review before sending. | Support | [Link to Template](Gmail_and_Email_Automation/A%20Very%20Simple%20_Human%20in%20the%20Loop_%20Email%20Response%20System%20Using%20AI%20and%20IMAP.json) | +| Auto Categorise Outlook Emails with AI | Automatically categorizes Outlook emails using AI models. Moves messages to folders and assigns categories based on content, reducing manual sorting. | Ops | [Link to Template](Gmail_and_Email_Automation/Auto%20Categorise%20Outlook%20Emails%20with%20AI.json) | +| Microsoft Outlook AI Email Assistant with contact support from Monday and Airtable | An AI-powered assistant for Outlook that processes emails, sanitizes content, and assigns categories using rules from Airtable. Integrates with Monday.com for contact support. | Ops | [Link to Template](Gmail_and_Email_Automation/Microsoft%20Outlook%20AI%20Email%20Assistant%20with%20contact%20support%20from%20Monday%20and%20Airtable.json) | +| 📈 Receive Daily Market News from FT.com to your Microsoft outlook inbox | Extracts financial news from FT.com and delivers daily updates to your Outlook inbox. Automates content extraction and email delivery for timely market insights. | Executive | [Link to Template](Gmail_and_Email_Automation/📈%20Receive%20Daily%20Market%20News%20from%20FT.com%20to%20your%20Microsoft%20outlook%20inbox.json) | -- Auto-label incoming Gmail messages with AI nodes -- Basic Automatic Gmail Email Labelling with OpenAI and Gmail API -- Compose reply draft in Gmail with OpenAI Assistant -- Extract spending history from gmail to google sheet -- Gmail AI Auto-Responder: Create Draft Replies to incoming emails -- Send specific PDF attachments from Gmail to Google Drive using OpenAI -- Summarize your emails with A.I. (via Openrouter) and send to Line messenger -- Effortless Email Management with AI-Powered Summarization & Review -- Analyze & Sort Suspicious Email Contents with ChatGPT -- Analyze Suspicious Email Contents with ChatGPT Vision -- A Very Simple "Human in the Loop" Email Response System Using AI and IMAP -- Auto Categorise Outlook Emails with AI -- Microsoft Outlook AI Email Assistant with contact support from Monday and Airtable -- 📈 Receive Daily Market News from FT.com to your Microsoft outlook inbox +### Telegram -### **Telegram** +| Title | Description | Department | Link | +|-------|-------------|------------|------| +| Agentic Telegram AI bot with LangChain nodes and new tools | An advanced Telegram bot leveraging LangChain and OpenAI for conversational AI. Supports memory, dynamic tool use, and handles incoming events for rich, context-aware chat interactions. | Support | [Link to Template](Telegram/Agentic%20Telegram%20AI%20bot%20with%20with%20LangChain%20nodes%20and%20new%20tools.json) | +| AI-Powered Children’s Arabic Storytelling on Telegram | A Telegram bot that uses OpenAI to generate and narrate children’s stories in Arabic, making storytelling interactive and educational for young users. | Support | [Link to Template](Telegram/AI-Powered%20Children_s%20Arabic%20Storytelling%20on%20Telegram.json) | +| AI-Powered Children’s English Storytelling on Telegram with OpenAI | Creates and tells children’s stories in English using OpenAI to engage young audiences in an interactive way. | Support | [Link to Template](Telegram/AI-Powered%20Children_s%20English%20Storytelling%20on%20Telegram%20with%20OpenAI.json) | +| Automated AI image analysis and response via Telegram | Lets users send images to Telegram and receive AI-based analysis and feedback automatically. | Ops | [Link to Template](Telegram/Automated%20AI%20image%20analysis%20and%20response%20via%20Telegram.json) | +| Angie, Personal AI Assistant with Telegram Voice and Text | Personal voice & text assistant bot that answers queries, manages tasks, and interacts naturally using AI. | Support | [Link to Template](Telegram/Angie,%20Personal%20AI%20Assistant%20with%20Telegram%20Voice%20and%20Text.json) | +| Chat with OpenAI’s GPT via a simple Telegram Bot | A minimal Telegram bot that forwards user messages to GPT and returns AI-generated replies. Ideal starting point for AI chat. | Support | [Link to Template](Telegram/Chat%20with%20OpenAIs%20GPT%20via%20a%20simple%20Telegram%20Bot.json) | +| Telegram AI bot assistant: ready-made template for voice & text messages | Ready-made assistant bot handling both voice and text input, leveraging AI for smart conversational responses in Telegram. | Support | [Link to Template](Telegram/Telegram%20AI%20bot%20assistant_%20ready-made%20template%20for%20voice%20&%20text%20messages.json) | +| Telegram AI Bot: NeurochainAI Text & Image | Integrates NeurochainAI API for text and image generation inside Telegram, enabling creative media interactions. | Marketing | [Link to Template](Telegram/Telegram%20AI%20Bot_%20NeurochainAI%20Text%20&%20Image%20-%20NeurochainAI%20Basic%20API%20Integration.json) | +| Telegram AI bot with LangChain nodes | Uses LangChain nodes for advanced AI conversations and tool use in Telegram. | Support | [Link to Template](Telegram/Telegram%20AI%20bot%20with%20LangChain%20nodes.json) | +| Telegram AI Chatbot | A general-purpose AI chatbot template for Telegram that can be customized for various use cases. | Support | [Link to Template](Telegram/Telegram%20AI%20Chatbot.json) | +| Telegram Bot with Supabase memory and OpenAI assistant integration | Adds long-term memory with Supabase to a Telegram bot, coupled with OpenAI for rich, context-aware conversations. | Support | [Link to Template](Telegram/Telegram%20Bot%20with%20Supabase%20memory%20and%20OpenAI%20assistant%20integration.json) | +| Telegram chat with PDF | Allows users to upload a PDF to Telegram and chat with its contents using AI-powered summarization and Q&A. | Ops | [Link to Template](Telegram/Telegram%20chat%20with%20PDF.json) | +| 🤖 Telegram Messaging Agent for Text_Audio_Images | Multi-modal agent that processes text, audio, and images in Telegram chats using AI for responses. | Support | [Link to Template](Telegram/%F0%9F%A4%96%20Telegram%20Messaging%20Agent%20for%20Text_Audio_Images.json) | +| Telegram to Spotify with OpenAI | Lets users request songs or playlists in Telegram and automatically create them in Spotify via OpenAI. | Marketing | [Link to Template](Telegram/Telegram%20to%20Spotify%20with%20OpenAI.json) | +| Send a random recipe once a day to Telegram | Scheduled workflow that fetches a random recipe daily and posts it to a Telegram chat. | Marketing | [Link to Template](Telegram/Send%20a%20random%20recipe%20once%20a%20day%20to%20Telegram.json) | +| Detect toxic language in Telegram messages | Monitors Telegram chats and flags messages containing toxic language using AI moderation. | Security | [Link to Template](Telegram/Detect%20toxic%20language%20in%20Telegram%20messages.json) | +| Translate Telegram audio messages with AI (55 supported languages) | Receives voice messages, transcribes them, and sends back translations in over 50 languages. | Support | [Link to Template](Telegram/Translate%20Telegram%20audio%20messages%20with%20AI%20(55%20supported%20languages).json) | +| Empower Your AI Chatbot with Long-Term Memory and Dynamic Tool Routing | External workflow enhancing an AI chatbot with long-term memory and dynamic tool routing capabilities. | Support | [Link to Template](https://n8n.io/workflows/3025-empower-your-ai-chatbot-with-long-term-memory-and-dynamic-tool-routing/) | -- Agentic Telegram AI bot with LangChain nodes and new tools -- [Empower Your AI Chatbot with Long-Term Memory and Dynamic Tool Routing (n8n workflow template)](https://n8n.io/workflows/3025-empower-your-ai-chatbot-with-long-term-memory-and-dynamic-tool-routing/) -- AI-Powered Children’s Arabic Storytelling on Telegram -- AI-Powered Children’s English Storytelling on Telegram with OpenAI -- Automated AI image analysis and response via Telegram -- Angie, Personal AI Assistant with Telegram Voice and Text -- Chat with OpenAIs GPT via a simple Telegram Bot -- Telegram AI bot assistant: ready-made template for voice & text messages -- Telegram AI Bot: NeurochainAI Text & Image - NeurochainAI Basic API Integration -- Telegram AI bot with LangChain nodes -- Telegram AI Chatbot -- Telegram Bot with Supabase memory and OpenAI assistant integration -- Telegram chat with PDF -- 🤖 Telegram Messaging Agent for Text_Audio_Images -- Telegram to Spotify with OpenAI -- Send a random recipe once a day to Telegram -- Detect toxic language in Telegram messages -- Translate Telegram audio messages with AI (55 supported languages) +### Google Drive & Google Sheets -### **Google Drive & Google Sheets** +| Title | Description | Department | Link | +|-------|-------------|------------|------| +| Automated End-to-End Fine-Tuning of OpenAI Models with Google Drive Integration | Automates the fine-tuning of OpenAI models by integrating with Google Drive for data input and output, streamlining custom AI model training. | Engineering | [Link to Template](Google_Drive_and_Google_Sheets/Automated%20End-to-End%20Fine-Tuning%20of%20OpenAI%20Models%20with%20Google%20Drive%20Integration.json) | +| Automatic Background Removal for Images in Google Drive | Automatically removes backgrounds from images stored in Google Drive, preparing them for various uses like product catalogs or marketing materials. | Marketing | [Link to Template](Google_Drive_and_Google_Sheets/Automatic%20Background%20Removal%20for%20Images%20in%20Google%20Drive.json) | +| Build an OpenAI Assistant with Google Drive Integration | Demonstrates building an OpenAI Assistant that accesses and utilizes files in Google Drive, enabling it to answer questions or perform tasks based on document content. | Support | [Link to Template](Google_Drive_and_Google_Sheets/Build%20an%20OpenAI%20Assistant%20with%20Google%20Drive%20Integration.json) | +| RAG Chatbot for Company Documents using Google Drive and Gemini | Creates a Retrieval-Augmented Generation (RAG) chatbot that answers questions based on company documents stored in Google Drive, leveraging Google Gemini. | Support | [Link to Template](Google_Drive_and_Google_Sheets/RAG%20Chatbot%20for%20Company%20Documents%20using%20Google%20Drive%20and%20Gemini.json) | +| RAG_Context-Aware Chunking: Google Drive to Pinecone via OpenRouter & Gemini | Implements context-aware chunking for Google Drive documents, sending them to Pinecone for vector storage and using OpenRouter & Gemini for advanced RAG. | Engineering | [Link to Template](Google_Drive_and_Google_Sheets/RAG_Context-Aware%20Chunking%20_%20Google%20Drive%20to%20Pinecone%20via%20OpenRouter%20&%20Gemini.json) | +| Summarize the New Documents from Google Drive and Save Summary in Google Sheet | Monitors Google Drive for new documents, summarizes their content using AI, and saves these summaries into a Google Sheet for quick overview and analysis. | Ops | [Link to Template](Google_Drive_and_Google_Sheets/Summarize%20the%20New%20Documents%20from%20Google%20Drive%20and%20Save%20Summary%20in%20Google%20Sheet.json) | +| Upload to Instagram and Tiktok from Google Drive | Automates uploading media from Google Drive directly to Instagram and TikTok, streamlining social media content publishing. | Marketing | [Link to Template](Google_Drive_and_Google_Sheets/Upload%20to%20Instagram%20and%20Tiktok%20from%20Google%20Drive.json) | +| Author and Publish Blog Posts From Google Sheets | Enables authoring blog posts in Google Sheets and automatically publishing them to a content management system, simplifying content creation and publishing. | Marketing | [Link to Template](Google_Drive_and_Google_Sheets/Author%20and%20Publish%20Blog%20Posts%20From%20Google%20Sheets.json) | +| Chat with a Google Sheet using AI | Allows users to interact with and query data within a Google Sheet using natural language via an AI model, making data analysis more accessible. | Ops | [Link to Template](Google_Drive_and_Google_Sheets/Chat%20with%20a%20Google%20Sheet%20using%20AI.json) | +| Chat with your event schedule from Google Sheets in Telegram | Connects a Google Sheet containing an event schedule to Telegram, allowing users to query their schedule through a Telegram bot. | Ops | [Link to Template](Google_Drive_and_Google_Sheets/Chat%20with%20your%20event%20schedule%20from%20Google%20Sheets%20in%20Telegram.json) | +| Qualify new leads in Google Sheets via OpenAI’s GPT-4 | Uses OpenAI's GPT-4 to analyze and qualify new leads entered into a Google Sheet, helping sales teams prioritize their outreach. | Sales | [Link to Template](Google_Drive_and_Google_Sheets/Qualify%20new%20leads%20in%20Google%20Sheets%20via%20OpenAI_s%20GPT-4.json) | +| Screen Applicants With AI, notify HR and save them in a Google Sheet | Automates the screening of job applicants using AI, notifies HR of qualified candidates, and saves applicant data into a Google Sheet. | HR | [Link to Template](Google_Drive_and_Google_Sheets/Screen%20Applicants%20With%20AI,%20notify%20HR%20and%20save%20them%20in%20a%20Google%20Sheet.json) | +| Summarize Google Sheets form feedback via OpenAI’s GPT-4 | Summarizes feedback collected through Google Forms and stored in Google Sheets using OpenAI's GPT-4, providing quick insights from survey responses. | Marketing | [Link to Template](Google_Drive_and_Google_Sheets/Summarize%20Google%20Sheets%20form%20feedback%20via%20OpenAI_s%20GPT-4.json) | -- Automated End-to-End Fine-Tuning of OpenAI Models with Google Drive Integration -- Automatic Background Removal for Images in Google Drive -- Build an OpenAI Assistant with Google Drive Integration -- RAG Chatbot for Company Documents using Google Drive and Gemini -- RAG_Context-Aware Chunking: Google Drive to Pinecone via OpenRouter & Gemini -- Summarize the New Documents from Google Drive and Save Summary in Google Sheet -- Upload to Instagram and Tiktok from Google Drive -- Author and Publish Blog Posts From Google Sheets -- Chat with a Google Sheet using AI -- Chat with your event schedule from Google Sheets in Telegram -- Qualify new leads in Google Sheets via OpenAI’s GPT-4 -- Screen Applicants With AI, notify HR and save them in a Google Sheet -- Summarize Google Sheets form feedback via OpenAI’s GPT-4 +--- -### **Slack** +*More sections and tables can be added below as the project expands. -- Automated Notion task reminders via Slack -- AI-Powered Information Monitoring with OpenAI, Google Sheets, Jina AI and Slack -- Creating an AI Slack Bot with Google Gemini -- Customer Support Channel and Ticketing System with Slack and Linear -- Enhance Security Operations with the Qualys Slack Shortcut Bot! -- Sentiment Analysis Tracking on Support Issues with Linear and Slack -- IT Ops AI SlackBot Workflow - Chat with your knowledge base +### WordPress -### **Notion** +| Title | Description | Department | Link | +|-------|-------------|------------|------| +| Auto-Categorize blog posts in wordpress using A.I. | This workflow automates the categorization of WordPress blog posts using AI, streamlining content organization and management. | Marketing/Content | [Link to Template](WordPress/Auto-Categorize%20blog%20posts%20in%20wordpress%20using%20A.I..json) | +| Auto-Tag Blog Posts in WordPress with AI | This workflow automatically tags WordPress blog posts using AI, improving SEO and content discoverability. | Marketing/Content | [Link to Template](WordPress/Auto-Tag%20Blog%20Posts%20in%20WordPress%20with%20AI.json) | +| Automate Blog Creation in Brand Voice with AI | This workflow automates the creation of blog posts, ensuring they adhere to a specific brand voice using AI. | Marketing/Content | [Link to Template](WordPress/Automate%20Blog%20Creation%20in%20Brand%20Voice%20with%20AI.json) | +| Automate Content Generator for WordPress with DeepSeek R1 | This workflow automates content generation for WordPress using the DeepSeek R1 AI model, enabling rapid content creation. | Marketing/Content | [Link to Template](WordPress/Automate%20Content%20Generator%20for%20WordPress%20with%20DeepSeek%20R1.json) | +| WordPress - AI Chatbot to enhance user experience - with Supabase and OpenAI | This workflow integrates an AI chatbot into WordPress using Supabase and OpenAI to enhance user experience by providing intelligent interactions. | Customer Support/Marketing | [Link to Template](WordPress/WordPress%20-%20AI%20Chatbot%20to%20enhance%20user%20experience%20-%20with%20Supabase%20and%20OpenAI.json) | -- Add positive feedback messages to a table in Notion -- Analyse papers from Hugging Face with AI and store them in Notion -- Notion AI Assistant Generator -- Notion knowledge base AI assistant -- Notion to Pinecone Vector Store Integration -- Store Notion’s Pages as Vector Documents into Supabase with OpenAI -- Upsert huge documents in a vector store with Supabase and Notion -- Turn Emails into AI-Enhanced Tasks in Notion (Multi-User Support) with Gmail, Airtable and Softr -- Automate Competitor Research with Exa.ai, Notion and AI Agents +### PDF & Document Processing -### **Airtable** +| Title | Description | Department | Link | +|-------|-------------|------------|------| +| Ask questions about a PDF using AI | This workflow fetches a PDF from Google Drive, splits it into chunks, embeds the chunks using OpenAI embeddings, and enables chat interactions with the document content. | Customer Support/Knowledge Management | [Link to Template](PDF_and_Document_Processing/Ask%20questions%20about%20a%20PDF%20using%20AI.json) | +| Breakdown Documents into Study Notes using Templating MistralAI and Qdrant | This workflow triggers on new files, processes documents with MistralAI embeddings, and stores data in Qdrant vector store for study note generation. | Education/Knowledge Management | [Link to Template](PDF_and_Document_Processing/Breakdown%20Documents%20into%20Study%20Notes%20using%20Templating%20MistralAI%20and%20Qdrant.json) | +| CV Resume PDF Parsing with Multimodal Vision AI | This workflow converts candidate resume PDFs to images, uses a Vision Language Model to assess candidate fit, and includes logic to bypass hidden AI prompts in resumes. | HR | [Link to Template](PDF_and_Document_Processing/CV%20Resume%20PDF%20Parsing%20with%20Multimodal%20Vision%20AI.json) | +| Chat with PDF docs using AI (quoting sources) | This workflow enables chat interactions with PDF documents, allowing users to ask questions and receive answers with quoted sources from the document. | Customer Support/Knowledge Management | [Link to Template](PDF_and_Document_Processing/Chat%20with%20PDF%20docs%20using%20AI%20(quoting%20sources).json) | +| Convert URL HTML to Markdown Format and Get Page Links | This workflow converts HTML content from a given URL into Markdown format and extracts all page links, useful for content scraping and analysis. | Marketing/Content | [Link to Template](PDF_and_Document_Processing/Convert%20URL%20HTML%20to%20Markdown%20Format%20and%20Get%20Page%20Links.json) | +| ETL pipeline for text processing | This workflow implements an ETL pipeline for text processing, extracting data from Twitter, storing it in MongoDB and PostgreSQL, and sending alerts to Slack based on sentiment analysis. | Data Analytics/IT | [Link to Template](PDF_and_Document_Processing/ETL%20pipeline%20for%20text%20processing.json) | +| Extract and process information directly from PDF using Claude and Gemini | This workflow extracts and processes information directly from PDFs using advanced AI models like Claude and Gemini, enabling intelligent document analysis. | Data Extraction/IT | [Link to Template](PDF_and_Document_Processing/Extract%20and%20process%20information%20directly%20from%20PDF%20using%20Claude%20and%20Gemini.json) | +| Extract data from resume and create PDF with Gotenberg | This workflow extracts structured data from resumes using AI, converts it into HTML, and then generates a well-formatted PDF using Gotenberg. | HR | [Link to Template](PDF_and_Document_Processing/Extract%20data%20from%20resume%20and%20create%20PDF%20with%20Gotenberg.json) | +| Extract license plate number from image uploaded via an n8n form | This workflow extracts license plate numbers from images uploaded via an n8n form using a Vision Language Model, then displays the extracted information. | Operations/Logistics | [Link to Template](PDF_and_Document_Processing/Extract%20license%20plate%20number%20from%20image%20uploaded%20via%20an%20n8n%20form.json) | +| Extract text from PDF and image using Vertex AI (Gemini) into CSV | This workflow extracts text from PDFs and images using Vertex AI (Gemini), routes based on file type, and converts the extracted data into a CSV format. | Data Extraction/IT | [Link to Template](PDF_and_Document_Processing/Extract%20text%20from%20PDF%20and%20image%20using%20Vertex%20AI%20(Gemini)%20into%20CSV.json) | +| Invoice data extraction with LlamaParse and OpenAI | This workflow extracts structured data from invoices using LlamaParse and OpenAI, then processes it with a structured output parser for detailed invoice data extraction. | Finance/Admin | [Link to Template](PDF_and_Document_Processing/Invoice%20data%20extraction%20with%20LlamaParse%20and%20OpenAI.json) | +| Write a WordPress post with AI (starting from a few keywords) | This workflow uses AI to write WordPress posts based on a few keywords, simplifying the content creation process. | Marketing/Content | [Link to Template](WordPress/Write%20a%20WordPress%20post%20with%20AI%20(starting%20from%20a%20few%20keywords).json) | -- AI Agent for project management and meetings with Airtable and Fireflies -- AI Agent to chat with Airtable and analyze data -- Get Airtable data via AI and Obsidian Notes -- Turn Emails into AI-Enhanced Tasks in Notion (Multi-User Support) with Gmail, Airtable and Softr -- vAssistant for Hubspot Chat using OpenAi and Airtable -- Handling Job Application Submissions with AI and [n8n](https://n8n.partnerlinks.io/h1pwwf5m4toe) Forms +### Discord -### **OpenAI & LLMs** +| Title | Description | Department | Link | +|-------|-------------|------------|------| +| Discord AI-powered bot | This workflow creates an AI-powered Discord bot that categorizes user messages (success story, urgent issue, ticket) and routes them to the appropriate department (customer success, IT, customer support). | Customer Support | [Link to Template](Discord/Discord%20AI-powered%20bot.json) | +| Send daily translated Calvin and Hobbes Comics to Discord | This workflow automates the daily retrieval of Calvin and Hobbes comics, translates the dialogues into English and Korean (or other languages), and posts them to Discord. | Marketing/Content | [Link to Template](Discord/Send%20daily%20translated%20Calvin%20and%20Hobbes%20Comics%20to%20Discord.json) | +| Share YouTube Videos with AI Summaries on Discord | This workflow automatically shares new YouTube videos on Discord along with AI-generated summaries of their content, leveraging caption data. | Marketing | [Link to Template](Discord/Share%20YouTube%20Videos%20with%20AI%20Summaries%20on%20Discord.json) | -- Advanced AI Demo (Presented at AI Developers #14 meetup) -- AI agent chat -- AI agent that can scrape webpages -- AI Crew to Automate Fundamental Stock Analysis - Q&A Workflow -- AI Customer feedback sentiment analysis -- AI Data Extraction with Dynamic Prompts and Airtable -- AI Data Extraction with Dynamic Prompts and Baserow -- AI-Driven Lead Management and Inquiry Automation with ERPNext & [n8n](https://n8n.partnerlinks.io/h1pwwf5m4toe) -- AI Fitness Coach Strava Data Analysis and Personalized Training Insights -- AI-Powered Candidate Shortlisting Automation for ERPNext -- AI-Powered Email Automation for Business: Summarize & Respond with RAG -- AI-powered email processing autoresponder and response approval (Yes/No) -- AI-Powered RAG Workflow For Stock Earnings Report Analysis -- AI-Powered Social Media Amplifier -- AI Powered Web Scraping with Jina, Google Sheets and OpenAI: the EASY way -- AI-powered WooCommerce Support-Agent -- ⚡AI-Powered YouTube Video Summarization & Analysis -- AI_ Ask questions about any data source (using the [n8n](https://n8n.partnerlinks.io/h1pwwf5m4toe) workflow retriever) -- AI_ Summarize podcast episode and enhance using Wikipedia -- AI Voice Chatbot with ElevenLabs & OpenAI for Customer Service and Restaurants -- AI Voice Chat using Webhook, Memory Manager, OpenAI, Google Gemini & ElevenLabs -- AI web researcher for sales -- AI Youtube Trend Finder Based On Niche -- AI Automated HR Workflow for CV Analysis and Candidate Evaluation -- AI Agent with Ollama for current weather and wiki -- AI Agent _ Google calendar assistant using OpenAI -- AI Agent to chat with Supabase_PostgreSQL DB -- AI Agent To Chat With Files In Supabase Storage -- AI Agent to chat with you Search Console Data, using OpenAI and Postgres -- AI Social Media Caption Creator creates social media post captions in Airtable -- AI-Generated Summary Block for WordPress Posts -- AI Automated HR Workflow for CV Analysis and Candidate Evaluation +### Database & Storage -### **WhatsApp** +| Title | Description | Department | Link | +|-------|-------------|------------|------| +| Chat with Postgresql Database | This workflow enables an AI assistant to chat with a PostgreSQL database, allowing users to query and retrieve data using natural language. It supports custom SQL queries and schema introspection. | Data Analytics | [Link to Template](Database_and_Storage/Chat%20with%20Postgresql%20Database.json) | +| Generate SQL queries from schema only - AI-powered | This workflow uses AI to generate SQL queries based on a given database schema, making it easier to interact with databases without manual query writing. | Engineering | [Link to Template](Database_and_Storage/Generate%20SQL%20queries%20from%20schema%20only%20-%20AI-powered.json) | +| MongoDB AI Agent - Intelligent Movie Recommendations | This workflow creates an AI agent that provides intelligent movie recommendations by interacting with a MongoDB database, using aggregation pipelines to fetch relevant movie data. | Data Analytics | [Link to Template](Database_and_Storage/MongoDB%20AI%20Agent%20-%20Intelligent%20Movie%20Recommendations.json) | +| Supabase Insertion & Upsertion & Retrieval | This workflow demonstrates how to perform insertion, upsertion, and retrieval operations with Supabase, specifically for handling vector embeddings and associated metadata. | Engineering | [Link to Template](Database_and_Storage/Supabase%20Insertion%20&%20Upsertion%20&%20Retrieval.json) | +| Talk to your SQLite database with a LangChain AI Agent | This workflow allows users to interact with a SQLite database using a LangChain AI agent, enabling natural language queries and data retrieval from the database. | Data Analytics | [Link to Template](Database_and_Storage/Talk%20to%20your%20SQLite%20database%20with%20a%20LangChain%20AI%20Agent.json) | -- Building Your First WhatsApp Chatbot -- Complete business WhatsApp AI-Powered RAG Chatbot using OpenAI -- Respond to WhatsApp Messages with AI Like a Pro! -- Automate Sales Meeting Prep with AI & APIFY Sent To WhatsApp +### Airtable -### **Discord** +| Title | Description | Department | Link | +|-------|-------------|------------|------| +| AI Agent for project management and meetings with Airtable and Fireflies | This workflow uses an AI agent to automate project management tasks and meeting follow-ups by analyzing call transcripts from Fireflies. It creates tasks in Airtable and notifies clients about their tasks. | Operations | [Link to Template](Airtable/AI%20Agent%20for%20project%20management%20and%20meetings%20with%20Airtable%20and%20Fireflies.json) | +| AI Agent to chat with Airtable and analyze data | This workflow creates an AI agent that can chat with Airtable, analyze data, and perform queries based on user requests. It can handle aggregation functions and generate graphs/images. | Data Analytics | [Link to Template](Airtable/AI%20Agent%20to%20chat%20with%20Airtable%20and%20analyze%20data.json) | +| Get Airtable data via AI and Obsidian Notes | This workflow retrieves data from Airtable using an AI agent and integrates it with Obsidian Notes, allowing for seamless data access and organization within Obsidian. | Productivity | [Link to Template](Airtable/Get%20Airtable%20data%20via%20AI%20and%20Obsidian%20Notes.json) | +| Handling Job Application Submissions with AI and n8n Forms | This workflow automates the handling of job application submissions by extracting information from resumes (PDFs) using AI, parsing it into a structured format, and potentially storing it in Airtable. | HR | [Link to Template](Airtable/Handling%20Job%20Application%20Submissions%20with%20AI%20and%20n8n%20Forms.json) | +| vAssistant for Hubspot Chat using OpenAi and Airtable | This workflow integrates an OpenAI assistant with HubSpot Chat and Airtable to provide automated responses and manage customer interactions. It fetches chat messages, processes them with AI, and can store relevant information in Airtable. | Sales | [Link to Template](Airtable/vAssistant%20for%20Hubspot%20Chat%20using%20OpenAi%20and%20Airtable.json) | -- Discord AI-powered bot -- Send daily translated Calvin and Hobbes Comics to Discord -- Share YouTube Videos with AI Summaries on Discord +### Notion -### **WordPress** +| Title | Description | Department | Link | +|-------|-------------|------------|------| +| Add positive feedback messages to a table in Notion | Captures positive feedback from Typeform, analyzes sentiment with Google Cloud Natural Language, and adds it to a Notion table, with Slack notifications for high-scoring feedback. | Support | [Link to Template](Notion/Add%20positive%20feedback%20messages%20to%20a%20table%20in%20Notion.json) | +| Analyse papers from Hugging Face with AI and store them in Notion | Automatically fetches and analyzes papers from Hugging Face, extracts key information using AI, and stores the structured data in a Notion database. | Engineering | [Link to Template](Notion/Analyse%20papers%20from%20Hugging%20Face%20with%20AI%20and%20store%20them%20in%20Notion.json) | +| Automate Competitor Research with Exa.ai, Notion and AI Agents | Builds a competitor research agent using Exa.ai to find similar companies. AI agents then scour the internet for company overviews, product offerings, and customer reviews, compiling a report into a Notion table. | Marketing | [Link to Template](Notion/Automate%20Competitor%20Research%20with%20Exa.ai,%20Notion%20and%20AI%20Agents.json) | +| Automate LinkedIn Outreach with Notion and OpenAI | Automates LinkedIn outreach by fetching daily posts from a Notion database, formatting them with OpenAI for LinkedIn engagement, and then posting them to LinkedIn. | Marketing | [Link to Template](Notion/Automate%20LinkedIn%20Outreach%20with%20Notion%20and%20OpenAI.json) | +| Notion AI Assistant Generator | Generates a custom AI Assistant chatbot workflow for a specific Notion database schema, allowing users to chat with their Notion data. | Engineering | [Link to Template](Notion/Notion%20AI%20Assistant%20Generator.json) | +| Notion knowledge base AI assistant | Creates an AI assistant that can search and retrieve information from a Notion knowledge base, providing answers to user queries. | Support | [Link to Template](Notion/Notion%20knowledge%20base%20AI%20assistant.json) | +| Notion to Pinecone Vector Store Integration | Integrates Notion with Pinecone, allowing Notion pages to be converted into vector embeddings and stored in Pinecone for advanced search and retrieval. | Engineering | [Link to Template](Notion/Notion%20to%20Pinecone%20Vector%20Store%20Integration.json) | +| Store Notion’s Pages as Vector Documents into Supabase with OpenAI | Automates storing Notion pages as vector documents in a Supabase database, using OpenAI to generate embeddings for the content. | Engineering | [Link to Template](Notion/Store%20Notion_s%20Pages%20as%20Vector%20Documents%20into%20Supabase%20with%20OpenAI.json) | +| Turn Emails into AI-Enhanced Tasks in Notion (Multi-User Support) with Gmail, Airtable and Softr | Transforms emails into AI-enhanced tasks in Notion, supporting multiple users. It integrates with Gmail for email triggers, Airtable for routing, and Softr for a user interface. | Ops | [Link to Template](Notion/Turn%20Emails%20into%20AI-Enhanced%20Tasks%20in%20Notion%20(Multi-User%20Support)%20with%20Gmail,%20Airtable%20and%20Softr.json) | +| Upsert huge documents in a vector store with Supabase and Notion | Manages large documents by splitting them into chunks, generating embeddings, and upserting them into a Supabase vector store, with Notion serving as the document source. | Engineering | [Link to Template](Notion/Upsert%20huge%20documents%20in%20a%20vector%20store%20with%20Supabase%20and%20Notion.json) | -- Auto-Categorize blog posts in wordpress using A.I. -- Auto-Tag Blog Posts in WordPress with AI -- Automate Blog Creation in Brand Voice with AI -- Automate Content Generator for WordPress with DeepSeek R1 -- WordPress - AI Chatbot to enhance user experience - with Supabase and OpenAI -- Write a WordPress post with AI (starting from a few keywords) +### Slack -### **PDF & Document Processing** +| Title | Description | Department | Link | +|-------|-------------|------------|------| +| AI-Powered Information Monitoring with OpenAI, Google Sheets, Jina AI and Slack | Monitors RSS feeds, summarizes articles with OpenAI and Jina AI, classifies them, and sends formatted notifications to Slack, enabling AI-powered information monitoring. | Marketing | [Link to Template](Slack/AI-Powered%20Information%20Monitoring%20with%20OpenAI,%20Google%20Sheets,%20Jina%20AI%20and%20Slack.json) | +| Creating a AI Slack Bot with Google Gemini | Builds an AI Slack bot using Google Gemini, handling webhooks, integrating an AI agent, managing memory, and responding to Slack messages. | Engineering | [Link to Template](Slack/Creating%20a%20AI%20Slack%20Bot%20with%20Google%20Gemini.json) | +| Customer Support Channel and Ticketing System with Slack and Linear | Automates customer support by querying Slack for messages with a ticket emoji, deciding if a new Linear ticket is needed, creating or updating tickets, and notifying Slack. | Support | [Link to Template](Slack/Customer%20Support%20Channel%20and%20Ticketing%20System%20with%20Slack%20and%20Linear.json) | +| Enhance Security Operations with the Qualys Slack Shortcut Bot! | Creates a Slack shortcut bot for Qualys to enhance security operations, allowing users to trigger actions like creating reports or starting vulnerability scans directly from Slack. | Security | [Link to Template](Slack/Enhance%20Security%20Operations%20with%20the%20Qualys%20Slack%20Shortcut%20Bot!.json) | +| Enrich Pipedrive's Organization Data with OpenAI GPT-4o & Notify it in Slack | Enriches Pipedrive organization data by scraping website content, using OpenAI GPT-4o to generate a summary, and adding it as a note in Pipedrive, then notifying a Slack channel. | Sales | [Link to Template](Slack/Enrich%20Pipedrive_s%20Organization%20Data%20with%20OpenAI%20GPT-4o%20&%20Notify%20it%20in%20Slack.json) | +| IT Ops AI SlackBot Workflow - Chat with your knowledge base | Creates an AI Slackbot for IT Operations, enabling users to chat with a knowledge base to retrieve information and get answers directly within Slack. | IT | [Link to Template](Slack/IT%20Ops%20AI%20SlackBot%20Workflow%20-%20Chat%20with%20your%20knowledge%20base.json) | +| Sentiment Analysis Tracking on Support Issues with Linear and Slack | Tracks sentiment on support issues by integrating with Linear and Slack, performing sentiment analysis using OpenAI on Linear comments, and notifying relevant Slack channels. | Support | [Link to Template](Slack/Sentiment%20Analysis%20Tracking%20on%20Support%20Issues%20with%20Linear%20and%20Slack.json) | +| Slack slash commands AI Chat Bot | Implements an AI chatbot accessible via Slack slash commands, processing user commands, interacting with an AI model, and responding within Slack. | IT | [Link to Template](Slack/Slack%20slash%20commands%20AI%20Chat%20Bot.json) | +| Venafi Cloud Slack Cert Bot | Provides a Slack bot that interacts with Venafi Cloud for certificate management, allowing users to check certificate status, receive alerts, or request certificate actions via Slack. | Security | [Link to Template](Slack/Venafi%20Cloud%20Slack%20Cert%20Bot.json) |* -- Ask questions about a PDF using AI -- Chat with PDF docs using AI (quoting sources) -- Convert URL HTML to Markdown Format and Get Page Links -- Extract and process information directly from PDF using Claude and Gemini -- Extract data from resume and create PDF with Gotenberg -- Manipulate PDF with Adobe developer API -- Parse PDF with LlamaParse and save to Airtable -- Transcribe Audio Files, Summarize with GPT-4, and Store in Notion -- Transcribing Bank Statements To Markdown Using Gemini Vision AI +### OpenAI & LLMs -### **Other Integrations & Use Cases** +| Title | Description | Department | Link | +|---|---|---|---| +| Advanced AI Demo (Presented at AI Developers #14 meetup) | Advanced AI capabilities demo. | AI/Development | [Link to Template](OpenAI_and_LLMs/Advanced%20AI%20Demo%20(Presented%20at%20AI%20Developers%20%2314%20meetup).json) | +| AI agent chat | Basic AI chat agent. | AI/Customer Service | [Link to Template](OpenAI_and_LLMs/AI%20agent%20chat.json) | +| AI agent that can scrape webpages | AI agent for web scraping. | AI/Data Extraction | [Link to Template](OpenAI_and_LLMs/AI%20agent%20that%20can%20scrape%20webpages.json) | +| AI Crew to Automate Fundamental Stock Analysis - Q&A Workflow | Stock analysis automation. | Finance/AI/Data Analysis | [Link to Template](OpenAI_and_LLMs/AI%20Crew%20to%20Automate%20Fundamental%20Stock%20Analysis%20-%20Q&A%20Workflow.json) | +| AI Customer feedback sentiment analysis | Sentiment analysis on customer feedback. | Customer Service/Marketing/Data Analysis | [Link to Template](OpenAI_and_LLMs/AI%20Customer%20feedback%20sentiment%20analysis.json) | +| AI Data Extraction with Dynamic Prompts and Airtable | AI-driven data extraction with Airtable integration. | AI/Data Extraction/Database | [Link to Template](OpenAI_and_LLMs/AI%20Data%20Extraction%20with%20Dynamic%20Prompts%20and%20Airtable.json) | +| AI Data Extraction with Dynamic Prompts and Baserow | AI-driven data extraction with Baserow integration. | AI/Data Extraction/Database | [Link to Template](OpenAI_and_LLMs/AI%20Data%20Extraction%20with%20Dynamic%20Prompts%20and%20Baserow.json) | +| AI-Driven Lead Management and Inquiry Automation with ERPNext & n8n | Lead management automation. | Sales/CRM/AI | [Link to Template](OpenAI_and_LLMs/AI-Driven%20Lead%20Management%20and%20Inquiry%20Automation%20with%20ERPNext%20&%20n8n.json) | +| AI Fitness Coach Strava Data Analysis and Personalized Training Insights | Fitness coaching via Strava data analysis. | Fitness/AI/Data Analysis | [Link to Template](OpenAI_and_LLMs/AI%20Fitness%20Coach%20Strava%20Data%20Analysis%20and%20Personalized%20Training%20Insights.json) | +| AI-Powered Candidate Shortlisting Automation for ERPNext | Candidate shortlisting automation. | HR/AI/Recruitment | [Link to Template](OpenAI_and_LLMs/AI-Powered%20Candidate%20Shortlisting%20Automation%20for%20ERPNext.json) | +| AI-Powered Email Automation for Business: Summarize & Respond with RAG | Email automation with summarization and response. | Business Automation/AI/Communication | [Link to Template](OpenAI_and_LLMs/AI-Powered%20Email%20Automation%20for%20Business_%20Summarize%20&%20Respond%20with%20RAG.json) | +| AI-Powered RAG Workflow For Stock Earnings Report Analysis | Stock earnings report analysis with RAG. | Finance/AI/Data Analysis | [Link to Template](OpenAI_and_LLMs/AI-Powered%20RAG%20Workflow%20For%20Stock%20Earnings%20Report%20Analysis.json) | +| AI-Powered Social Media Amplifier | Amplifies social media presence using AI. | Marketing/AI/Social Media | [Link to Template](OpenAI_and_LLMs/AI-Powered%20Social%20Media%20Amplifier.json) | +| AI-powered WooCommerce Support-Agent | Creates an AI-powered support agent for WooCommerce stores. | E-commerce/AI/Customer Service | [Link to Template](OpenAI_and_LLMs/AI-powered%20WooCommerce%20Support-Agent.json) | +| AI-Powered YouTube Video Summarization & Analysis | Summarizes and analyzes YouTube videos using AI. | Content Creation/AI/Data Analysis | [Link to Template](OpenAI_and_LLMs/%E2%9A%A1AI-Powered%20YouTube%20Video%20Summarization%20&%20Analysis.json) | +| AI: Ask questions about any data source (using the n8n workflow retriever) | Allows users to ask questions about various data sources using an n8n workflow retriever. | AI/Data Analysis/Workflow Automation | [Link to Template](OpenAI_and_LLMs/AI_%20Ask%20questions%20about%20any%20data%20source%20(using%20the%20n8n%20workflow%20retriever).json) | +| AI: Summarize podcast episode and enhance using Wikipedia | Summarizes podcast episodes and enhances the summary with information from Wikipedia using AI. | Content Creation/AI/Data Analysis | [Link to Template](OpenAI_and_LLMs/AI_%20Summarize%20podcast%20episode%20and%20enhance%20using%20Wikipedia.json) | -- Zoom AI Meeting Assistant creates mail summary, ClickUp tasks and follow-up call -- Siri AI Agent: Apple Shortcuts powered voice template -- Text automations using Apple Shortcuts -- LINE Assistant with Google Calendar and Gmail Integration -- Monthly Spotify Track Archiving and Playlist Classification -- Automate Pinterest Analysis & AI-Powered Content Suggestions With Pinterest API -- Enhance Customer Chat by Buffering Messages with Twilio and Redis -- Handling Appointment Leads and Follow-up With Twilio, Cal.com and AI -- Visualize your SQL Agent queries with OpenAI and Quickchart.io -- UTM Link Creator & QR Code Generator with Scheduled Google Analytics Reports +### WhatsApp -### **AI Research, RAG, and Data Analysis** +| Title | Description | Department | Link | +|---|---|---|---| +| Automate Sales Meeting Prep with AI & APIFY Sent To WhatsApp | This workflow automates sales meeting preparation using AI and Apify, sending relevant information to WhatsApp. | Sales/AI/Automation | [Link to Template](./WhatsApp/Automate%20Sales%20Meeting%20Prep%20with%20AI%20&%20APIFY%20Sent%20To%20WhatsApp.json) | +| Building Your First WhatsApp Chatbot | This workflow guides you through building your first WhatsApp chatbot. | Customer Service/Development | [Link to Template](./WhatsApp/Building%20Your%20First%20WhatsApp%20Chatbot.json) | +| Complete business WhatsApp AI-Powered RAG Chatbot using OpenAI | This workflow builds a complete business WhatsApp AI-powered RAG chatbot using OpenAI. | Customer Service/AI/Development | [Link to Template](./WhatsApp/Complete%20business%20WhatsApp%20AI-Powered%20RAG%20Chatbot%20using%20OpenAI.json) | +| Respond to WhatsApp Messages with AI Like a Pro! | This workflow enables professional AI-powered responses to WhatsApp messages. | Customer Service/AI/Communication | [Link to Template](./WhatsApp/Respond%20to%20WhatsApp%20Messages%20with%20AI%20Like%20a%20Pro!.json) | -- AI-Powered Information Monitoring with OpenAI, Google Sheets, Jina AI and Slack -- Analyze tradingview.com charts with Chrome extension, [n8n](https://n8n.partnerlinks.io/h1pwwf5m4toe) and OpenAI -- Build a Financial Documents Assistant using Qdrant and Mistral.ai -- Building RAG Chatbot for Movie Recommendations with Qdrant and Open AI -- Deduplicate Scraping AI Grants for Eligibility using AI -- Enrich Pipedrive’s Organization Data with OpenAI GPT-4o & Notify it in Slack -- Extract insights & analyse YouTube comments via AI Agent chat -- Make OpenAI Citation for File Retrieval RAG -- Summarize SERPBear data with AI (via Openrouter) and save it to Baserow -- Summarize Umami data with AI (via Openrouter) and save it to Baserow +### Instagram, Twitter, Social Media -### **Instagram, Twitter, Social Media** +| Title | Description | Department | Link | +|-------|-------------|------------|------| +| AI agent for Instagram DM_inbox. Manychat + Open AI integration | Integrates Manychat with OpenAI to create an AI agent for managing Instagram direct messages. | Marketing/Customer Service/AI | [Link to Template](Instagram_Twitter_Social_Media/AI%20agent%20for%20Instagram%20DM_inbox.%20Manychat%20%2B%20Open%20AI%20integration.json) | +| Create dynamic Twitter profile banner | Automates the creation of dynamic Twitter profile banners. | Marketing/Social Media | [Link to Template](Instagram_Twitter_Social_Media/Create%20dynamic%20Twitter%20profile%20banner.json) | +| Generate Instagram Content from Top Trends with AI Image Generation | Creates Instagram content by analyzing top trends and generating relevant images using AI. | Marketing/AI/Content | [Link to Template](Instagram_Twitter_Social_Media/Generate%20Instagram%20Content%20from%20Top%20Trends%20with%20AI%20Image%20Generation.json) | +| OpenAI-powered tweet generator | Generates tweets using OpenAI's language models. | Marketing/Social Media/AI | [Link to Template](Instagram_Twitter_Social_Media/OpenAI-powered%20tweet%20generator.json) | +| Post New YouTube Videos to X | Automatically posts new YouTube videos to X (formerly Twitter). | Marketing/Social Media | [Link to Template](Instagram_Twitter_Social_Media/Post%20New%20YouTube%20Videos%20to%20X.json) | +| Reddit AI digest | Creates an AI-generated digest of Reddit content. | Marketing/Content/AI | [Link to Template](Instagram_Twitter_Social_Media/Reddit%20AI%20digest.json) | +| Social Media Analysis and Automated Email Generation | Analyzes social media data and generates automated email reports. | Marketing/Analytics | [Link to Template](Instagram_Twitter_Social_Media/Social%20Media%20Analysis%20and%20Automated%20Email%20Generation.json) | +| Speed Up Social Media Banners With BannerBear.com | Automates the creation of social media banners using BannerBear.com. | Marketing/Design | [Link to Template](Instagram_Twitter_Social_Media/Speed%20Up%20Social%20Media%20Banners%20With%20BannerBear.com.json) | +| Twitter Virtual AI Influencer | Manages a virtual AI influencer's Twitter account. | Marketing/AI | [Link to Template](Instagram_Twitter_Social_Media/Twitter%20Virtual%20AI%20Influencer.json) | +| Update Twitter banner using HTTP request | Updates a Twitter banner using HTTP requests. | Marketing/Development | [Link to Template](Instagram_Twitter_Social_Media/Update%20Twitter%20banner%20using%20HTTP%20request.json) | -- AI agent for Instagram DM_inbox. Manychat + Open AI integration -- Generate Instagram Content from Top Trends with AI Image Generation -- Update Twitter banner using HTTP request -- Twitter Virtual AI Influencer -- Create dynamic Twitter profile banner -- Post New YouTube Videos to X +### Other Integrations & Use Cases -### **Forms & Surveys** +| Title | Description | Department | Link | +|-------|-------------|------------|------| +| API Schema Extractor | Extracts API schemas from web services for documentation or integration purposes. | Development/Integration | [Link to Template](Other_Integrations_and_Use_Cases/API%20Schema%20Extractor.json) | +| Analyze feedback and send a message on Mattermost | Analyzes user feedback and sends notifications to Mattermost channels. | Support/Communication | [Link to Template](Other_Integrations_and_Use_Cases/Analyze%20feedback%20and%20send%20a%20message%20on%20Mattermost.json) | +| Analyze feedback using AWS Comprehend | Performs sentiment analysis on feedback using AWS Comprehend and sends results to Mattermost. | Support/AI | [Link to Template](Other_Integrations_and_Use_Cases/Analyze%20feedback%20using%20AWS%20Comprehend%20and%20send%20it%20to%20a%20Mattermost%20channel.json) | +| Automate Pinterest Analysis & AI-Powered Content Suggestions | Analyzes Pinterest data and provides AI-powered content suggestions. | Marketing/AI | [Link to Template](Other_Integrations_and_Use_Cases/Automate%20Pinterest%20Analysis%20%26%20AI-Powered%20Content%20Suggestions%20With%20Pinterest%20API.json) | +| Automate SIEM Alert Enrichment | Enriches SIEM alerts with MITRE ATT&CK data and integrates with Zendesk. | Security/IT | [Link to Template](Other_Integrations_and_Use_Cases/Automate%20SIEM%20Alert%20Enrichment%20with%20MITRE%20ATT%26CK,%20Qdrant%20%26%20Zendesk%20in%20n8n.json) | +| Automate Screenshots with URLbox & Analyze with AI | Takes screenshots of webpages and analyzes them using AI. | Development/Marketing | [Link to Template](Other_Integrations_and_Use_Cases/Automate%20Screenshots%20with%20URLbox%20%26%20Analyze%20them%20with%20AI.json) | +| Automate testimonials in Strapi | Automates the process of collecting and managing testimonials in Strapi. | Marketing/Content | [Link to Template](Other_Integrations_and_Use_Cases/Automate%20testimonials%20in%20Strapi%20with%20n8n.json) | +| Bitrix24 Chatbot Application | Example workflow for creating a Bitrix24 chatbot with webhook integration. | Business/Communication | [Link to Template](Other_Integrations_and_Use_Cases/Bitrix24%20Chatbot%20Application%20Workflow%20example%20with%20Webhook%20Integration.json) | +| ChatGPT Automatic Code Review in Gitlab MR | Automates code reviews in GitLab merge requests using ChatGPT. | Development/DevOps | [Link to Template](Other_Integrations_and_Use_Cases/ChatGPT%20Automatic%20Code%20Review%20in%20Gitlab%20MR.json) | +| Classify new bugs in Linear with OpenAI's GPT-4 | Automatically classifies and routes new bug reports in Linear using AI. | Development/QA | [Link to Template](Other_Integrations_and_Use_Cases/Classify%20new%20bugs%20in%20Linear%20with%20OpenAI_s%20GPT-4%20and%20move%20them%20to%20the%20right%20team.json) | +| Create, update, and get a profile in Humantic AI | Manages user profiles in Humantic AI platform. | Marketing/AI | [Link to Template](Other_Integrations_and_Use_Cases/Create,%20update,%20and%20get%20a%20profile%20in%20Humantic%20AI.json) | +| Enhance Customer Chat with Twilio and Redis | Implements message buffering for customer chats using Twilio and Redis. | Support/Development | [Link to Template](Other_Integrations_and_Use_Cases/Enhance%20Customer%20Chat%20by%20Buffering%20Messages%20with%20Twilio%20and%20Redis.json) | +| Hacker News Throwback Machine | Shows what was popular on Hacker News on this day in previous years. | Development/Community | [Link to Template](Other_Integrations_and_Use_Cases/Hacker%20News%20Throwback%20Machine%20-%20See%20What%20Was%20Hot%20on%20This%20Day,%20Every%20Year!.json) | +| Handling Appointment Leads with Twilio, Cal.com and AI | Manages appointment scheduling and follow-ups using Twilio and Cal.com. | Sales/Support | [Link to Template](Other_Integrations_and_Use_Cases/Handling%20Appointment%20Leads%20and%20Follow-up%20With%20Twilio,%20Cal.com%20and%20AI.json) | +| Integrating AI with Open-Meteo API | Enhances weather forecasting with AI analysis. | Data Science/Weather | [Link to Template](Other_Integrations_and_Use_Cases/Integrating%20AI%20with%20Open-Meteo%20API%20for%20Enhanced%20Weather%20Forecasting.json) | +| Introduction to the HTTP Tool | Basic tutorial on using HTTP tools in n8n. | Development | [Link to Template](Other_Integrations_and_Use_Cases/Introduction%20to%20the%20HTTP%20Tool.json) | +| KB Tool - Confluence Knowledge Base | Integrates with Confluence for knowledge base management. | Documentation/IT | [Link to Template](Other_Integrations_and_Use_Cases/KB%20Tool%20-%20Confluence%20Knowledge%20Base.json) | +| LINE Assistant with Google Calendar and Gmail | Creates a LINE assistant that integrates with Google Calendar and Gmail. | Productivity/Communication | [Link to Template](Other_Integrations_and_Use_Cases/LINE%20Assistant%20with%20Google%20Calendar%20and%20Gmail%20Integration.json) | +| Monthly Spotify Track Archiving | Archives and classifies monthly Spotify tracks into playlists. | Personal/Music | [Link to Template](Other_Integrations_and_Use_Cases/Monthly%20Spotify%20Track%20Archiving%20and%20Playlist%20Classification.json) | +| Obsidian Notes Read Aloud | Converts Obsidian notes into audio format as a podcast feed. | Productivity/Content | [Link to Template](Other_Integrations_and_Use_Cases/Obsidian%20Notes%20Read%20Aloud%20using%20AI_%20Available%20as%20a%20Podcast%20Feed.json) | +| Optimize & Update Printify Title and Description | Automates optimization of Printify product titles and descriptions. | E-commerce | [Link to Template](Other_Integrations_and_Use_Cases/Optimize%20%26%20Update%20Printify%20Title%20and%20Description%20Workflow.json) | +| Qualify replies from Pipedrive persons with AI | Uses AI to qualify and categorize replies from Pipedrive contacts. | Sales/AI | [Link to Template](Other_Integrations_and_Use_Cases/Qualify%20replies%20from%20Pipedrive%20persons%20with%20AI.json) | +| Siri AI Agent with Apple Shortcuts | Creates a Siri-powered AI agent using Apple Shortcuts. | Personal/Productivity | [Link to Template](Other_Integrations_and_Use_Cases/Siri%20AI%20Agent_%20Apple%20Shortcuts%20powered%20voice%20template.json) | +| Text automations using Apple Shortcuts | Implements text-based automations with Apple Shortcuts. | Personal/Productivity | [Link to Template](Other_Integrations_and_Use_Cases/Text%20automations%20using%20Apple%20Shortcuts.json) | +| UTM Link Creator & QR Code Generator | Creates UTM links, generates QR codes, and schedules Google Analytics reports. | Marketing/Analytics | [Link to Template](Other_Integrations_and_Use_Cases/UTM%20Link%20Creator%20%26%20QR%20Code%20Generator%20with%20Scheduled%20Google%20Analytics%20Reports.json) | +| Use AI to organize your Todoist Inbox | Automatically organizes tasks in Todoist using AI. | Productivity | [Link to Template](Other_Integrations_and_Use_Cases/Use%20AI%20to%20organize%20your%20Todoist%20Inbox.json) | +| Using External Workflows as Tools in n8n | Demonstrates how to use external workflows as tools within n8n. | Development | [Link to Template](Other_Integrations_and_Use_Cases/Using%20External%20Workflows%20as%20Tools%20in%20n8n.json) | +| Visualize SQL Agent queries with OpenAI and Quickchart.io | Creates visualizations from SQL queries using OpenAI and Quickchart.io. | Data Analysis/Visualization | [Link to Template](Other_Integrations_and_Use_Cases/Visualize%20your%20SQL%20Agent%20queries%20with%20OpenAI%20and%20Quickchart.io.json) | +| Zoom AI Meeting Assistant | Creates meeting summaries, ClickUp tasks, and schedules follow-ups from Zoom meetings. | Productivity/Communication | [Link to Template](Other_Integrations_and_Use_Cases/Zoom%20AI%20Meeting%20Assistant%20creates%20mail%20summary,%20ClickUp%20tasks%20and%20follow-up%20call.json) | -- Email Subscription Service with [n8n](https://n8n.partnerlinks.io/h1pwwf5m4toe) Forms, Airtable and AI -- Handling Job Application Submissions with AI and [n8n](https://n8n.partnerlinks.io/h1pwwf5m4toe) Forms -- Qualifying Appointment Requests with AI & [n8n](https://n8n.partnerlinks.io/h1pwwf5m4toe) Forms +### Forms & Surveys -### **Database & Storage** +| Title | Description | Department | Link | +|-------|-------------|------------|------| +| Conversational Interviews with AI Agents and n8n Forms | Implements AI-powered conversational interviews using n8n Forms for interactive data collection. | Research/Marketing | [Link to Template](Forms_and_Surveys/Conversational%20Interviews%20with%20AI%20Agents%20and%20n8n%20Forms.json) | +| Email Subscription Service with n8n Forms, Airtable and AI | Manages email subscriptions with n8n Forms, stores data in Airtable, and uses AI for processing. | Marketing/Communication | [Link to Template](Forms_and_Surveys/Email%20Subscription%20Service%20with%20n8n%20Forms,%20Airtable%20and%20AI.json) | +| Qualifying Appointment Requests with AI & n8n Forms | Uses AI to qualify and process appointment requests submitted through n8n Forms. | Sales/Support | [Link to Template](Forms_and_Surveys/Qualifying%20Appointment%20Requests%20with%20AI%20&%20n8n%20Forms.json) | + +### AI Research, RAG, and Data Analysis + +| Workflow Title | Description | Department | Link to Template | +|---|---|---|---| +| Analyze tradingview.com charts with Chrome extension, N8N and OpenAI | Analyzes TradingView charts using a Chrome extension, n8n, and OpenAI for automated insights. | Data Analysis | [Analyze tradingview.com charts with Chrome extension, N8N and OpenAI.txt](./AI_Research_RAG_and_Data_Analysis/Analyze%20tradingview.com%20charts%20with%20Chrome%20extension,%20N8N%20and%20OpenAI.json) | +| Automated Hugging Face Paper Summary Fetching & Categorization Workflow | Automates fetching, summarizing, and categorizing research papers from Hugging Face. | AI Research | [Automated Hugging Face Paper Summary Fetching & Categorization Workflow.txt](./AI_Research_RAG_and_Data_Analysis/Automated%20Hugging%20Face%20Paper%20Summary%20Fetching%20%26%20Categorization%20Workflow.json) | +| Autonomous AI crawler | An autonomous AI-powered web crawler for data collection and analysis. | AI Research | [Autonomous AI crawler.txt](./AI_Research_RAG_and_Data_Analysis/Autonomous%20AI%20crawler.json) | +| Build Your Own Image Search Using AI Object Detection, CDN and ElasticSearch | Builds an image search engine using AI object detection, CDN, and Elasticsearch for efficient image retrieval. | AI Research | [Build Your Own Image Search Using AI Object Detection, CDN and ElasticSearchBuild Your Own Image Search Using AI Object Detection, CDN and ElasticSearch.txt](./AI_Research_RAG_and_Data_Analysis/Build%20Your%20Own%20Image%20Search%20Using%20AI%20Object%20Detection,%20CDN%20and%20ElasticSearchBuild%20Your%20Own%20Image%20Search%20Using%20AI%20Object%20Detection,%20CDN%20and%20ElasticSearch.json) | +| Build a Financial Documents Assistant using Qdrant and Mistral.ai | Creates an AI assistant for financial document analysis using Qdrant for vector search and Mistral.ai for language processing. | Finance, AI Research | [Build a Financial Documents Assistant using Qdrant and Mistral.ai.txt](./AI_Research_RAG_and_Data_Analysis/Build%20a%20Financial%20Documents%20Assistant%20using%20Qdrant%20and%20Mistral.ai.json) | +| Build a Tax Code Assistant with Qdrant, Mistral.ai and OpenAI | Develops an AI assistant for tax code queries using Qdrant, Mistral.ai, and OpenAI for comprehensive responses. | Finance, AI Research | [Build a Tax Code Assistant with Qdrant, Mistral.ai and OpenAI.txt](./AI_Research_RAG_and_Data_Analysis/Build%20a%20Tax%20Code%20Assistant%20with%20Qdrant,%20Mistral.ai%20and%20OpenAI.json) | +| Building RAG Chatbot for Movie Recommendations with Qdrant and Open AI | Constructs a RAG-based chatbot for movie recommendations, leveraging Qdrant for retrieval and OpenAI for generation. | AI Research, Entertainment | [Building RAG Chatbot for Movie Recommendations with Qdrant and Open AI.txt](./AI_Research_RAG_and_Data_Analysis/Building%20RAG%20Chatbot%20for%20Movie%20Recommendations%20with%20Qdrant%20and%20Open%20AI.json) | +| Chat with GitHub API Documentation: RAG-Powered Chatbot with Pinecone & OpenAI | Implements a RAG-powered chatbot for interacting with GitHub API documentation using Pinecone and OpenAI. | Development, AI Research | [Chat with GitHub API Documentation_ RAG-Powered Chatbot with Pinecone & OpenAI.txt](./AI_Research_RAG_and_Data_Analysis/Chat%20with%20GitHub%20API%20Documentation_%20RAG-Powered%20Chatbot%20with%20Pinecone%20%26%20OpenAI.json) | +| Create a Google Analytics Data Report with AI and sent it to E-Mail and Telegram | Generates Google Analytics data reports using AI and sends them via email and Telegram. | Data Analysis, Marketing | [Create a Google Analytics Data Report with AI and sent it to E-Mail and Telegram.txt](./AI_Research_RAG_and_Data_Analysis/Create%20a%20Google%20Analytics%20Data%20Report%20with%20AI%20and%20sent%20it%20to%20E-Mail%20and%20Telegram.json) | +| Customer Insights with Qdrant, Python and Information Extractor | Extracts customer insights using Qdrant, Python, and an information extraction module. | Data Analysis, Customer Service | [Customer Insights with Qdrant, Python and Information Extractor.txt](./AI_Research_RAG_and_Data_Analysis/Customer%20Insights%20with%20Qdrant,%20Python%20and%20Information%20Extractor.json) | +| Deduplicate Scraping AI Grants for Eligibility using AI | Automates the deduplication and eligibility assessment of scraped AI grant data using AI. | AI Research, Data Management | [Deduplicate Scraping AI Grants for Eligibility using AI.txt](./AI_Research_RAG_and_Data_Analysis/Deduplicate%20Scraping%20AI%20Grants%20for%20Eligibility%20using%20AI.json) | +| Enrich Property Inventory Survey with Image Recognition and AI Agent | Enhances property inventory surveys with image recognition and AI agents for automated data enrichment. | Real Estate, AI Research | [Enrich Property Inventory Survey with Image Recognition and AI Agent.txt](./AI_Research_RAG_and_Data_Analysis/Enrich%20Property%20Inventory%20Survey%20with%20Image%20Recognition%20and%20AI%20Agent.json) | +| Extract insights & analyse YouTube comments via AI Agent chat | Extracts insights and analyzes YouTube comments through an AI agent chat interface. | Social Media, Data Analysis | [Extract insights & analyse YouTube comments via AI Agent chat.txt](./AI_Research_RAG_and_Data_Analysis/Extract%20insights%20%26%20analyse%20YouTube%20comments%20via%20AI%20Agent%20chat.json) | +| Generate SEO Seed Keywords Using AI | Generates SEO seed keywords using AI to optimize content for search engines. | Marketing, AI Research | [Generate SEO Seed Keywords Using AI.txt](./AI_Research_RAG_and_Data_Analysis/Generate%20SEO%20Seed%20Keywords%20Using%20AI.json) | +| Hacker News Job Listing Scraper and Parser | Scrapes and parses job listings from Hacker News for job seekers or recruiters. | Data Collection, HR | [Hacker News Job Listing Scraper and Parser.txt](./AI_Research_RAG_and_Data_Analysis/Hacker%20News%20Job%20Listing%20Scraper%20and%20Parser.json) | +| Hacker News to Video Content | Converts Hacker News articles into video content automatically. | Content Creation, Media | [Hacker News to Video Content.txt](./AI_Research_RAG_and_Data_Analysis/Hacker%20News%20to%20Video%20Content.json) | +| Host Your Own AI Deep Research Agent with n8n, Apify and OpenAI o3 | Sets up a self-hosted AI deep research agent using n8n, Apify, and OpenAI. | AI Research, Automation | [Host Your Own AI Deep Research Agent with n8n, Apify and OpenAI o3.txt](./AI_Research_RAG_and_Data_Analysis/Host%20Your%20Own%20AI%20Deep%20Research%20Agent%20with%20n8n,%20Apify%20and%20OpenAI%20o3.json) | +| Intelligent Web Query and Semantic Re-Ranking Flow using Brave and Google Gemini | Performs intelligent web queries and semantic re-ranking using Brave browser and Google Gemini AI. | AI Research, Data Analysis | [Intelligent Web Query and Semantic Re-Ranking Flow using Brave and Google Gemini.txt](./AI_Research_RAG_and_Data_Analysis/Intelligent%20Web%20Query%20and%20Semantic%20Re-Ranking%20Flow%20using%20Brave%20and%20Google%20Gemini.json) | +| Learn Anything from HN - Get Top Resource Recommendations from Hacker News | Extracts top resource recommendations from Hacker News to facilitate learning on any topic. | Education, Data Analysis | [Learn Anything from HN - Get Top Resource Recommendations from Hacker News.txt](./AI_Research_RAG_and_Data_Analysis/Learn%20Anything%20from%20HN%20-%20Get%20Top%20Resource%20Recommendations%20from%20Hacker%20News.json) | +| Make OpenAI Citation for File Retrieval RAG | Generates citations for file retrieval in RAG systems using OpenAI. | AI Research, Documentation | [Make OpenAI Citation for File Retrieval RAG.txt](./AI_Research_RAG_and_Data_Analysis/Make%20OpenAI%20Citation%20for%20File%20Retrieval%20RAG.json) | +| Open Deep Research - AI-Powered Autonomous Research Workflow | An AI-powered autonomous workflow for conducting deep research. | AI Research, Automation | [Open Deep Research - AI-Powered Autonomous Research Workflow.txt](./AI_Research_RAG_and_Data_Analysis/Open%20Deep%20Research%20-%20AI-Powered%20Autonomous%20Research%20Workflow.json) | +| Query Perplexity AI from your n8n workflows | Integrates Perplexity AI into n8n workflows for advanced querying capabilities. | AI Research, Automation | [Query Perplexity AI from your n8n workflows.txt](./AI_Research_RAG_and_Data_Analysis/Query%20Perplexity%20AI%20from%20your%20n8n%20workflows.json) | +| Recipe Recommendations with Qdrant and Mistral | Provides recipe recommendations using Qdrant for vector search and Mistral AI for content generation. | Food, AI Research | [Recipe Recommendations with Qdrant and Mistral.txt](./AI_Research_RAG_and_Data_Analysis/Recipe%20Recommendations%20with%20Qdrant%20and%20Mistral.json) | +| Reconcile Rent Payments with Local Excel Spreadsheet and OpenAI | Reconciles rent payments by comparing local Excel spreadsheets with data processed by OpenAI. | Finance, Data Management | [Reconcile Rent Payments with Local Excel Spreadsheet and OpenAI.txt](./AI_Research_RAG_and_Data_Analysis/Reconcile%20Rent%20Payments%20with%20Local%20Excel%20Spreadsheet%20and%20OpenAI.json) | +| Scrape Trustpilot Reviews with DeepSeek, Analyze Sentiment with OpenAI | Scrapes Trustpilot Reviews using DeepSeek and analyzes sentiment with OpenAI. | Marketing, Data Analysis | [Scrape Trustpilot Reviews with DeepSeek, Analyze Sentiment with OpenAI.txt](./AI_Research_RAG_and_Data_Analysis/Scrape%20Trustpilot%20Reviews%20with%20DeepSeek,%20Analyze%20Sentiment%20with%20OpenAI.json) | +| Scrape and summarize posts of a news site without RSS feed using AI and save them to a NocoDB | Scrapes and summarizes news posts without RSS feeds using AI, saving the output to NocoDB. | Content Curation, Data Management | [Scrape and summarize posts of a news site without RSS feed using AI and save them to a NocoDB.txt](./AI_Research_RAG_and_Data_Analysis/Scrape%20and%20summarize%20posts%20of%20a%20news%20site%20without%20RSS%20feed%20using%20AI%20and%20save%20them%20to%20a%20NocoDB.json) | +| Scrape and summarize webpages with AI | Scrapes and summarizes content from webpages using AI. | Content Curation, Data Analysis | [Scrape and summarize webpages with AI.txt](./AI_Research_RAG_and_Data_Analysis/Scrape%20and%20summarize%20webpages%20with%20AI.json) | +| Send Google analytics data to A.I. to analyze then save results in Baserow | Sends Google Analytics data to AI for analysis and saves the results in Baserow. | Data Analysis, Marketing | [Send Google analytics data to A.I. to analyze then save results in Baserow.txt](./AI_Research_RAG_and_Data_Analysis/Send%20Google%20analytics%20data%20to%20A.I.%20to%20analyze%20then%20save%20results%20in%20Baserow.json) | +| Spot Workplace Discrimination Patterns with AI | Identifies patterns of workplace discrimination using AI-driven analysis. | HR, AI Research | [Spot Workplace Discrimination Patterns with AI.txt](./AI_Research_RAG_and_Data_Analysis/Spot%20Workplace%20Discrimination%20Patterns%20with%20AI.json) | +| Summarize SERPBear data with AI (via Openrouter) and save it to Baserow | Summarizes SERPBear data using AI (via Openrouter) and saves the insights to Baserow. | SEO, Data Analysis | [Summarize SERPBear data with AI (via Openrouter) and save it to Baserow.txt](./AI_Research_RAG_and_Data_Analysis/Summarize%20SERPBear%20data%20with%20AI%20(via%20Openrouter)%20and%20save%20it%20to%20Baserow.json) | +| Summarize Umami data with AI (via Openrouter) and save it to Baserow | Summarizes Umami analytics data using AI (via Openrouter) and saves the insights to Baserow. | Data Analysis, Marketing | [Summarize Umami data with AI (via Openrouter) and save it to Baserow.txt](./AI_Research_RAG_and_Data_Analysis/Summarize%20Umami%20data%20with%20AI%20(via%20Openrouter)%20and%20save%20it%20to%20Baserow.json) | +| Survey Insights with Qdrant, Python and Information Extractor | Extracts and analyzes insights from survey data using Qdrant, Python, and an information extractor. | Data Analysis, Market Research | [Survey Insights with Qdrant, Python and Information Extractor.txt](./AI_Research_RAG_and_Data_Analysis/Survey%20Insights%20with%20Qdrant,%20Python%20and%20Information%20Extractor.json) | +| Ultimate Scraper Workflow for n8n | A comprehensive scraping workflow for n8n to extract data from various sources. | Data Collection, Automation | [Ultimate Scraper Workflow for n8n.txt](./AI_Research_RAG_and_Data_Analysis/Ultimate%20Scraper%20Workflow%20for%20n8n.json) | +| Vector Database as a Big Data Analysis Tool for AI Agents [1/3 anomaly][1/2 KNN] | Utilizes a vector database for big data analysis, focusing on anomaly detection and KNN classification for AI agents. | AI Research, Data Analysis | [Vector Database as a Big Data Analysis Tool for AI Agents [1_3 anomaly][1_2 KNN].txt](./AI_Research_RAG_and_Data_Analysis/Vector%20Database%20as%20a%20Big%20Data%20Analysis%20Tool%20for%20AI%20Agents%20[1_3%20anomaly][1_2%20KNN].json) | +| Vector Database as a Big Data Analysis Tool for AI Agents [2/2 KNN] | Continues the use of a vector database for big data analysis, focusing on KNN classification for AI agents. | AI Research, Data Analysis | [Vector Database as a Big Data Analysis Tool for AI Agents [2_2 KNN].txt](./AI_Research_RAG_and_Data_Analysis/Vector%20Database%20as%20a%20Big%20Data%20Analysis%20Tool%20for%20AI%20Agents%20[2_2%20KNN].json) | +| Vector Database as a Big Data Analysis Tool for AI Agents [2/3 - anomaly] | Explores the use of a vector database for big data analysis, focusing on anomaly detection for AI agents. | AI Research, Data Analysis | [Vector Database as a Big Data Analysis Tool for AI Agents [2_3 - anomaly].txt](./AI_Research_RAG_and_Data_Analysis/Vector%20Database%20as%20a%20Big%20Data%20Analysis%20Tool%20for%20AI%20Agents%20[2_3%20-%20anomaly].json) | +| Vector Database as a Big Data Analysis Tool for AI Agents [3/3 - anomaly] | Concludes the use of a vector database for big data analysis, focusing on anomaly detection for AI agents. | AI Research, Data Analysis | [Vector Database as a Big Data Analysis Tool for AI Agents [3_3 - anomaly].txt](./AI_Research_RAG_and_Data_Analysis/Vector%20Database%20as%20a%20Big%20Data%20Analysis%20Tool%20for%20AI%20Agents%20[3_3%20-%20anomaly].json) | +| Visual Regression Testing with Apify and AI Vision Model | Performs visual regression testing using Apify and an AI vision model to detect UI changes. | QA, AI Research | [Visual Regression Testing with Apify and AI Vision Model.txt](./AI_Research_RAG_and_Data_Analysis/Visual%20Regression%20Testing%20with%20Apify%20and%20AI%20Vision%20Model.json) | +| 🔍 Perplexity Research to HTML: AI-Powered Content Creation | Transforms Perplexity AI research into HTML content for AI-powered content creation. | Content Creation, AI Research | [🔍 Perplexity Research to HTML_ AI-Powered Content Creation.txt](./AI_Research_RAG_and_Data_Analysis/%F0%9F%94%8D%20Perplexity%20Research%20to%20HTML_%20AI-Powered%20Content%20Creation.json) | -- Chat with Postgresql Database -- MongoDB AI Agent - Intelligent Movie Recommendations -- Supabase Insertion & Upsertion & Retrieval -- Upsert huge documents in a vector store with Supabase and Notion ### **Other** @@ -258,4 +335,10 @@ If you would like to contribute additional templates or suggest new categories, 👉 [Join n8n and start automating now! 💎](https://n8n.partnerlinks.io/h1pwwf5m4toe) n8n - \ No newline at end of file + + +--- + +### **Sponsors** +- [mahezsh](https://github.com/mahezsh) +- [Dumpling AI](https://github.com/Dumpling-AI) diff --git a/Slack/AI-Powered Information Monitoring with OpenAI, Google Sheets, Jina AI and Slack.txt b/Slack/AI-Powered Information Monitoring with OpenAI, Google Sheets, Jina AI and Slack.json similarity index 100% rename from Slack/AI-Powered Information Monitoring with OpenAI, Google Sheets, Jina AI and Slack.txt rename to Slack/AI-Powered Information Monitoring with OpenAI, Google Sheets, Jina AI and Slack.json diff --git a/Slack/Creating a AI Slack Bot with Google Gemini.txt b/Slack/Creating a AI Slack Bot with Google Gemini.json similarity index 100% rename from Slack/Creating a AI Slack Bot with Google Gemini.txt rename to Slack/Creating a AI Slack Bot with Google Gemini.json diff --git a/Slack/Customer Support Channel and Ticketing System with Slack and Linear.txt b/Slack/Customer Support Channel and Ticketing System with Slack and Linear.json similarity index 100% rename from Slack/Customer Support Channel and Ticketing System with Slack and Linear.txt rename to Slack/Customer Support Channel and Ticketing System with Slack and Linear.json diff --git a/Slack/Enhance Security Operations with the Qualys Slack Shortcut Bot! (1).txt b/Slack/Enhance Security Operations with the Qualys Slack Shortcut Bot!.json similarity index 100% rename from Slack/Enhance Security Operations with the Qualys Slack Shortcut Bot! (1).txt rename to Slack/Enhance Security Operations with the Qualys Slack Shortcut Bot!.json diff --git a/Slack/Enhance Security Operations with the Qualys Slack Shortcut Bot!.txt b/Slack/Enhance Security Operations with the Qualys Slack Shortcut Bot!.txt deleted file mode 100644 index d8c2b86..0000000 --- a/Slack/Enhance Security Operations with the Qualys Slack Shortcut Bot!.txt +++ /dev/null @@ -1,697 +0,0 @@ -{ -"meta": { -"instanceId": "03e9d14e9196363fe7191ce21dc0bb17387a6e755dcc9acc4f5904752919dca8" -}, -"nodes": [ -{ -"id": "adfda9cb-1d77-4c54-b3ea-e7bf438a48af", -"name": "Parse Webhook", -"type": "n8n-nodes-base.set", -"position": [ -760, -640 -], -"parameters": { -"options": {}, -"assignments": { -"assignments": [ -{ -"id": "e63f9299-a19d-4ba1-93b0-59f458769fb2", -"name": "response", -"type": "object", -"value": "={{ $json.body.payload }}" -} -] -} -}, -"typeVersion": 3.3 -}, -{ -"id": "b3e0e490-18e0-44b5-a960-0fdbf8422515", -"name": "Qualys Create Report", -"type": "n8n-nodes-base.executeWorkflow", -"position": [ -1720, -1740 -], -"parameters": { -"options": {}, -"workflowId": "icSLX102kSS9zNdK" -}, -"typeVersion": 1 -}, -{ -"id": "80ae074b-bda5-4638-b46f-246a1b9530ae", -"name": "Required Report Variables", -"type": "n8n-nodes-base.set", -"position": [ -1520, -1740 -], -"parameters": { -"options": {}, -"assignments": { -"assignments": [ -{ -"id": "47cd1502-3039-4661-a6b1-e20a74056550", -"name": "report_title", -"type": "string", -"value": "={{ $json.response.view.state.values.report_title.report_title_input.value }}" -}, -{ -"id": "6a8a0cbf-bf3e-4702-956e-a35966d8b9c5", -"name": "base_url", -"type": "string", -"value": "https://qualysapi.qg3.apps.qualys.com" -}, -{ -"id": "9a15f4db-f006-4ad8-a2c0-4002dd3e2655", -"name": "output_format", -"type": "string", -"value": "={{ $json.response.view.state.values.output_format.output_format_select.selected_option.value }}" -}, -{ -"id": "13978e05-7e7f-42e9-8645-d28803db8cc9", -"name": "template_name", -"type": "string", -"value": "={{ $json.response.view.state.values.report_template.report_template_select.selected_option.text.text }}" -} -] -} -}, -"typeVersion": 3.3 -}, -{ -"id": "b596da86-02c7-4d8e-a267-88933f47ae0c", -"name": "Qualys Start Vulnerability Scan", -"type": "n8n-nodes-base.executeWorkflow", -"position": [ -1720, -1540 -], -"parameters": { -"options": {}, -"workflowId": "pYPh5FlGZgb36xZO" -}, -"typeVersion": 1 -}, -{ -"id": "61e39516-6558-46ce-a300-b4cbade7a6f6", -"name": "Scan Report Task Modal", -"type": "n8n-nodes-base.httpRequest", -"position": [ -1620, -720 -], -"parameters": { -"url": "https://slack.com/api/views.open", -"method": "POST", -"options": {}, -"jsonBody": "= {\n \"trigger_id\": \"{{ $('Parse Webhook').item.json['response']['trigger_id'] }}\",\n \"external_id\": \"Scan Report Generator\",\n \"view\": {\n\t\"title\": {\n\t\t\"type\": \"plain_text\",\n\t\t\"text\": \"Scan Report Generator\",\n\t\t\"emoji\": true\n\t},\n\t\"submit\": {\n\t\t\"type\": \"plain_text\",\n\t\t\"text\": \"Generate Report\",\n\t\t\"emoji\": true\n\t},\n\t\"type\": \"modal\",\n\t\"close\": {\n\t\t\"type\": \"plain_text\",\n\t\t\"text\": \"Cancel\",\n\t\t\"emoji\": true\n\t},\n\t\"blocks\": [\n\t\t{\n\t\t\t\"type\": \"image\",\n\t\t\t\"image_url\": \"https://upload.wikimedia.org/wikipedia/commons/thumb/2/26/Logo-Qualys.svg/300px-Logo-Qualys.svg.png\",\n\t\t\t\"alt_text\": \"Qualys Logo\"\n\t\t},\n\t\t{\n\t\t\t\"type\": \"section\",\n\t\t\t\"text\": {\n\t\t\t\t\"type\": \"mrkdwn\",\n\t\t\t\t\"text\": \"Select a template and generate a detailed scan report based on the results of your previous scans.\"\n\t\t\t}\n\t\t},\n\t\t{\n\t\t\t\"type\": \"input\",\n\t\t\t\"block_id\": \"report_template\",\n\t\t\t\"element\": {\n\t\t\t\t\"type\": \"external_select\",\n\t\t\t\t\"placeholder\": {\n\t\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\t\"text\": \"Select a report template\",\n\t\t\t\t\t\"emoji\": true\n\t\t\t\t},\n\t\t\t\t\"action_id\": \"report_template_select\"\n\t\t\t},\n\t\t\t\"label\": {\n\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\"text\": \"Report Template\",\n\t\t\t\t\"emoji\": true\n\t\t\t},\n\t\t\t\"hint\": {\n\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\"text\": \"Choose a report template from your Qualys account to structure the output.\"\n\t\t\t}\n\t\t},\n\t\t{\n\t\t\t\"type\": \"input\",\n\t\t\t\"block_id\": \"report_title\",\n\t\t\t\"element\": {\n\t\t\t\t\"type\": \"plain_text_input\",\n\t\t\t\t\"action_id\": \"report_title_input\",\n\t\t\t\t\"placeholder\": {\n\t\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\t\"text\": \"Enter a custom title for the report\"\n\t\t\t\t}\n\t\t\t},\n\t\t\t\"label\": {\n\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\"text\": \"Report Title\",\n\t\t\t\t\"emoji\": true\n\t\t\t},\n\t\t\t\"hint\": {\n\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\"text\": \"Provide a descriptive title for your report. This title will be used in the report header.\"\n\t\t\t}\n\t\t},\n\t\t{\n\t\t\t\"type\": \"input\",\n\t\t\t\"block_id\": \"output_format\",\n\t\t\t\"element\": {\n\t\t\t\t\"type\": \"static_select\",\n\t\t\t\t\"placeholder\": {\n\t\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\t\"text\": \"Select output format\",\n\t\t\t\t\t\"emoji\": true\n\t\t\t\t},\n\t\t\t\t\"options\": [\n\t\t\t\t\t{\n\t\t\t\t\t\t\"text\": {\n\t\t\t\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\t\t\t\"text\": \"PDF\",\n\t\t\t\t\t\t\t\"emoji\": true\n\t\t\t\t\t\t},\n\t\t\t\t\t\t\"value\": \"pdf\"\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\t\"text\": {\n\t\t\t\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\t\t\t\"text\": \"HTML\",\n\t\t\t\t\t\t\t\"emoji\": true\n\t\t\t\t\t\t},\n\t\t\t\t\t\t\"value\": \"html\"\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\t\"text\": {\n\t\t\t\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\t\t\t\"text\": \"CSV\",\n\t\t\t\t\t\t\t\"emoji\": true\n\t\t\t\t\t\t},\n\t\t\t\t\t\t\"value\": \"csv\"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t\"action_id\": \"output_format_select\"\n\t\t\t},\n\t\t\t\"label\": {\n\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\"text\": \"Output Format\",\n\t\t\t\t\"emoji\": true\n\t\t\t},\n\t\t\t\"hint\": {\n\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\"text\": \"Choose the format in which you want the report to be generated.\"\n\t\t\t}\n\t\t}\n\t]\n}\n}", -"sendBody": true, -"jsonQuery": "{\n \"Content-type\": \"application/json\"\n}", -"sendQuery": true, -"specifyBody": "json", -"specifyQuery": "json", -"authentication": "predefinedCredentialType", -"nodeCredentialType": "slackApi" -}, -"credentials": { -"slackApi": { -"id": "DZJDes1ZtGpqClNk", -"name": "Qualys Slack App" -} -}, -"typeVersion": 4.2 -}, -{ -"id": "29cf716c-9cd6-4bd9-a0f9-c75baca86cc1", -"name": "Vuln Scan Modal", -"type": "n8n-nodes-base.httpRequest", -"position": [ -1620, -560 -], -"parameters": { -"url": "https://slack.com/api/views.open", -"method": "POST", -"options": {}, -"jsonBody": "= {\n \"trigger_id\": \"{{ $('Parse Webhook').item.json['response']['trigger_id'] }}\",\n \"external_id\": \"Scan Report Generator\",\n \"view\": {\n\t\"title\": {\n\t\t\"type\": \"plain_text\",\n\t\t\"text\": \"Vulnerability Scan\",\n\t\t\"emoji\": true\n\t},\n\t\"submit\": {\n\t\t\"type\": \"plain_text\",\n\t\t\"text\": \"Execute Scan\",\n\t\t\"emoji\": true\n\t},\n\t\"type\": \"modal\",\n\t\"close\": {\n\t\t\"type\": \"plain_text\",\n\t\t\"text\": \"Cancel\",\n\t\t\"emoji\": true\n\t},\n\t\"blocks\": [\n\t\t{\n\t\t\t\"type\": \"image\",\n\t\t\t\"image_url\": \"https://upload.wikimedia.org/wikipedia/commons/thumb/2/26/Logo-Qualys.svg/300px-Logo-Qualys.svg.png\",\n\t\t\t\"alt_text\": \"Qualys Logo\"\n\t\t},\n\t\t{\n\t\t\t\"type\": \"section\",\n\t\t\t\"text\": {\n\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\"text\": \"Initiate a network-wide scan to detect and assess security vulnerabilities.\",\n\t\t\t\t\"emoji\": true\n\t\t\t}\n\t\t},\n\t\t{\n\t\t\t\"type\": \"input\",\n\t\t\t\"block_id\": \"option_title\",\n\t\t\t\"element\": {\n\t\t\t\t\"type\": \"plain_text_input\",\n\t\t\t\t\"action_id\": \"text_input-action\",\n\t\t\t\t\"initial_value\": \"Initial Options\"\n\t\t\t},\n\t\t\t\"label\": {\n\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\"text\": \"Option Title\",\n\t\t\t\t\"emoji\": true\n\t\t\t},\n\t\t\t\"hint\": {\n\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\"text\": \"Specify the title of the option profile to use for the scan.\"\n\t\t\t}\n\t\t},\n\t\t{\n\t\t\t\"type\": \"input\",\n\t\t\t\"block_id\": \"scan_title\",\n\t\t\t\"element\": {\n\t\t\t\t\"type\": \"plain_text_input\",\n\t\t\t\t\"action_id\": \"text_input-action\",\n\t\t\t\t\"placeholder\": {\n\t\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\t\"text\": \"Enter your scan title\"\n\t\t\t\t},\n\t\t\t\t\"initial_value\": \"n8n Scan 1\"\n\t\t\t},\n\t\t\t\"label\": {\n\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\"text\": \"Scan Title\",\n\t\t\t\t\"emoji\": true\n\t\t\t},\n\t\t\t\"hint\": {\n\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\"text\": \"Provide a descriptive title for the scan. Up to 2000 characters.\"\n\t\t\t}\n\t\t},\n\t\t{\n\t\t\t\"type\": \"input\",\n\t\t\t\"block_id\": \"asset_groups\",\n\t\t\t\"element\": {\n\t\t\t\t\"type\": \"plain_text_input\",\n\t\t\t\t\"action_id\": \"text_input-action\",\n\t\t\t\t\"placeholder\": {\n\t\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\t\"text\": \"Enter asset groups\"\n\t\t\t\t},\n\t\t\t\t\"initial_value\": \"Group1\"\n\t\t\t},\n\t\t\t\"label\": {\n\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\"text\": \"Asset Groups\",\n\t\t\t\t\"emoji\": true\n\t\t\t},\n\t\t\t\"hint\": {\n\t\t\t\t\"type\": \"plain_text\",\n\t\t\t\t\"text\": \"Specify asset group titles for targeting. Multiple titles must be comma-separated.\"\n\t\t\t}\n\t\t}\n\t]\n}\n}", -"sendBody": true, -"jsonQuery": "{\n \"Content-type\": \"application/json\"\n}", -"sendQuery": true, -"specifyBody": "json", -"specifyQuery": "json", -"authentication": "predefinedCredentialType", -"nodeCredentialType": "slackApi" -}, -"credentials": { -"slackApi": { -"id": "DZJDes1ZtGpqClNk", -"name": "Qualys Slack App" -} -}, -"typeVersion": 4.2 -}, -{ -"id": "a771704d-4191-4e80-b62f-81b41b047a87", -"name": "Route Message", -"type": "n8n-nodes-base.switch", -"position": [ -940, -640 -], -"parameters": { -"rules": { -"values": [ -{ -"outputKey": "Vuln Scan Modal", -"conditions": { -"options": { -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"operator": { -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.response.callback_id }}", -"rightValue": "trigger-qualys-vmscan" -} -] -}, -"renameOutput": true -}, -{ -"outputKey": "Scan Report Modal", -"conditions": { -"options": { -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "02868fd8-2577-4c6d-af5e-a1963cb2f786", -"operator": { -"name": "filter.operator.equals", -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.response.callback_id }}", -"rightValue": "qualys-scan-report" -} -] -}, -"renameOutput": true -}, -{ -"outputKey": "Process Submission", -"conditions": { -"options": { -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "c320c8b8-947b-433a-be82-d2aa96594808", -"operator": { -"name": "filter.operator.equals", -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.response.type }}", -"rightValue": "view_submission" -} -] -}, -"renameOutput": true -} -] -}, -"options": { -"fallbackOutput": "none" -} -}, -"typeVersion": 3 -}, -{ -"id": "c8346d57-762a-4bbd-8d2b-f13097cb063d", -"name": "Required Scan Variables", -"type": "n8n-nodes-base.set", -"position": [ -1520, -1540 -], -"parameters": { -"options": {}, -"assignments": { -"assignments": [ -{ -"id": "096ff32e-356e-4a85-aad2-01001d69dd46", -"name": "platformurl", -"type": "string", -"value": "https://qualysapi.qg3.apps.qualys.com" -}, -{ -"id": "070178a6-73b0-458b-8657-20ab4ff0485c", -"name": "option_title", -"type": "string", -"value": "={{ $json.response.view.state.values.option_title['text_input-action'].value }}" -}, -{ -"id": "3605424b-5bfc-44f0-b6e4-e0d6b1130b8e", -"name": "scan_title", -"type": "string", -"value": "={{ $json.response.view.state.values.scan_title['text_input-action'].value }}" -}, -{ -"id": "2320d966-b834-46fb-b674-be97cc08682e", -"name": "asset_groups", -"type": "string", -"value": "={{ $json.response.view.state.values.asset_groups['text_input-action'].value }}" -} -] -} -}, -"typeVersion": 3.3 -}, -{ -"id": "55589da9-50ce-4d55-a5ff-d62abdf65fa4", -"name": "Route Submission", -"type": "n8n-nodes-base.switch", -"position": [ -1240, -1140 -], -"parameters": { -"rules": { -"values": [ -{ -"outputKey": "Vuln Scan", -"conditions": { -"options": { -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"operator": { -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.response.view.title.text }}", -"rightValue": "Vulnerability Scan" -} -] -}, -"renameOutput": true -}, -{ -"outputKey": "Scan Report", -"conditions": { -"options": { -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "02868fd8-2577-4c6d-af5e-a1963cb2f786", -"operator": { -"name": "filter.operator.equals", -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.response.view.title.text }}", -"rightValue": "Scan Report Generator" -} -] -}, -"renameOutput": true -} -] -}, -"options": { -"fallbackOutput": "none" -} -}, -"typeVersion": 3 -}, -{ -"id": "d0fc264d-0c48-4aa6-aeab-ed605d96f35a", -"name": "Sticky Note", -"type": "n8n-nodes-base.stickyNote", -"position": [ -428.3467548314237, -270.6382978723399 -], -"parameters": { -"color": 7, -"width": 466.8168310000617, -"height": 567.6433222116042, -"content": "![Imgur](https://uploads.n8n.io/templates/slack.png)\n## Events Webhook Trigger\nThe first node receives all messages from Slack API via Subscription Events API. You can find more information about setting up the subscription events API by [clicking here](https://api.slack.com/apis/connections/events-api). \n\nThe second node extracts the payload from slack into an object that n8n can understand. " -}, -"typeVersion": 1 -}, -{ -"id": "acb3fbdc-1fcb-4763-8529-ea2842607569", -"name": "Sticky Note15", -"type": "n8n-nodes-base.stickyNote", -"position": [ -900, --32.762682645579616 -], -"parameters": { -"color": 7, -"width": 566.0553219408072, -"height": 1390.6748140207737, -"content": "![n8n](https://uploads.n8n.io/templates/n8n.png)\n## Efficient Slack Interaction Handling with n8n\n\nThis section of the workflow is designed to efficiently manage and route messages and submissions from Slack based on specific triggers and conditions. When a Slack interaction occurs—such as a user triggering a vulnerability scan or generating a report through a modal—the workflow intelligently routes the message to the appropriate action:\n\n- **Dynamic Routing**: Uses conditions to determine the nature of the Slack interaction, whether it's a direct command to initiate a scan or a request to generate a report.\n- **Modal Management**: Differentiates actions based on modal titles and `callback_id`s, ensuring that each type of submission is processed according to its context.\n- **Streamlined Responses**: After routing, the workflow promptly handles the necessary responses or actions, including closing modal popups and responding to Slack with appropriate confirmation or data.\n\n**Purpose**: This mechanism ensures that all interactions within Slack are handled quickly and accurately, automating responses and actions in real-time to enhance user experience and workflow efficiency." -}, -"typeVersion": 1 -}, -{ -"id": "85f370e8-70d2-466e-8f44-45eaf04a0d95", -"name": "Sticky Note11", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1473.6255461332685, -56.17183602125283 -], -"parameters": { -"color": 7, -"width": 396.6025898621133, -"height": 881.1659905894905, -"content": "![Imgur](https://uploads.n8n.io/templates/slack.png)\n## Display Modal Popup\nThis section pops open a modal window that is later used to send data into TheHive. \n\nModals can be customized to perform all sorts of actions. And they are natively mobile! You can see a screenshot of the Slack Modals on the right. \n\nLearn more about them by [clicking here](https://api.slack.com/surfaces/modals)" -}, -"typeVersion": 1 -}, -{ -"id": "cae79c1c-47f8-41c0-b1d0-e284359b52a8", -"name": "Sticky Note12", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1480, -960 -], -"parameters": { -"color": 7, -"width": 390.82613196003143, -"height": 950.1640646001949, -"content": "![Imgur](https://i.imgur.com/abGF8EO.png)\n## Modal Submission Payload\nThe data input into the Slack Modal makes its way into these set nodes that then pass that data into the Qualys Sub workflows that handle the heavy lifting. \n\n### Two Trigger Options\n- **Trigger a Vulnerability Scan** in the Slack UI which then sends a slack message to a channel of your choice summarizing and linking to the scan in slack\n- **Trigger report creation** in the Slack UI from the previously generated Vulnerability scan and upload a PDF copy of the report directly in a slack channel of your choice" -}, -"typeVersion": 1 -}, -{ -"id": "1017df8b-ff32-47aa-a4c2-a026e6597fa9", -"name": "Close Modal Popup", -"type": "n8n-nodes-base.respondToWebhook", -"position": [ -1000, -1140 -], -"parameters": { -"options": { -"responseCode": 204 -}, -"respondWith": "noData" -}, -"typeVersion": 1.1 -}, -{ -"id": "6b058f2a-2c0c-4326-aa42-08d840e306f7", -"name": "Sticky Note8", -"type": "n8n-nodes-base.stickyNote", -"position": [ --260, -280 -], -"parameters": { -"width": 675.1724774900403, -"height": 972.8853473866498, -"content": "![n8n](https://uploads.n8n.io/templates/n8n.png)\n## Enhance Security Operations with the Qualys Slack Shortcut Bot!\n\nOur **Qualys Slack Shortcut Bot** is strategically designed to facilitate immediate security operations directly from Slack. This powerful tool allows users to initiate vulnerability scans and generate detailed reports through simple Slack interactions, streamlining the process of managing security assessments.\n\n**Workflow Highlights:**\n- **Interactive Modals**: Utilizes Slack modals to gather user inputs for scan configurations and report generation, providing a user-friendly interface for complex operations.\n- **Dynamic Workflow Execution**: Integrates seamlessly with Qualys to execute vulnerability scans and create reports based on user-specified parameters.\n- **Real-Time Feedback**: Offers instant feedback within Slack, updating users about the status of their requests and delivering reports directly through Slack channels.\n\n\n**Operational Flow:**\n- **Parse Webhook Data**: Captures and parses incoming data from Slack to understand user commands accurately.\n- **Execute Actions**: Depending on the user's selection, the workflow triggers other sub-workflows like 'Qualys Start Vulnerability Scan' or 'Qualys Create Report' for detailed processing.\n- **Respond to Slack**: Ensures that every interaction is acknowledged, maintaining a smooth user experience by managing modal popups and sending appropriate responses.\n\n\n**Setup Instructions:**\n- Verify that Slack and Qualys API integrations are correctly configured for seamless interaction.\n- Customize the modal interfaces to align with your organization's operational protocols and security policies.\n- Test the workflow to ensure that it responds accurately to Slack commands and that the integration with Qualys is functioning as expected.\n\n\n**Need Assistance?**\n- Explore our [Documentation](https://docs.qualys.com) or get help from the [n8n Community](https://community.n8n.io) for more detailed guidance on setup and customization.\n\nDeploy this bot within your Slack environment to significantly enhance the efficiency and responsiveness of your security operations, enabling proactive management of vulnerabilities and streamlined reporting." -}, -"typeVersion": 1 -}, -{ -"id": "63b537e8-50c9-479d-96a4-54e621689a23", -"name": "Webhook", -"type": "n8n-nodes-base.webhook", -"position": [ -520, -640 -], -"webhookId": "4f86c00d-ceb4-4890-84c5-850f8e5dec05", -"parameters": { -"path": "4f86c00d-ceb4-4890-84c5-850f8e5dec05", -"options": {}, -"httpMethod": "POST", -"responseMode": "responseNode" -}, -"typeVersion": 2 -}, -{ -"id": "13500444-f2ff-4b77-8f41-8ac52d067ec7", -"name": "Respond to Slack Webhook - Vulnerability", -"type": "n8n-nodes-base.respondToWebhook", -"position": [ -1280, -560 -], -"parameters": { -"options": {}, -"respondWith": "noData" -}, -"typeVersion": 1.1 -}, -{ -"id": "e64cedf0-948c-43c8-a62c-d0ec2916f3b6", -"name": "Respond to Slack Webhook - Report", -"type": "n8n-nodes-base.respondToWebhook", -"position": [ -1280, -720 -], -"parameters": { -"options": { -"responseCode": 200 -}, -"respondWith": "noData" -}, -"typeVersion": 1.1 -}, -{ -"id": "d2e53f7b-090a-4330-949d-d66ac0e5849c", -"name": "Sticky Note1", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1494.8207799250774, -1400 -], -"parameters": { -"color": 5, -"width": 361.46312518523973, -"height": 113.6416448104651, -"content": "### 🙋 Remember to update your Slack Channels\nDon't forget to update the Slack Channels in the Slack nodes in these two subworkflows. \n" -}, -"typeVersion": 1 -}, -{ -"id": "2731f910-288f-497a-a71d-d840a63b2930", -"name": "Sticky Note2", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1480, -400 -], -"parameters": { -"color": 5, -"width": 376.26546828439086, -"height": 113.6416448104651, -"content": "### 🙋 Don't forget your slack credentials!\nThankfully n8n makes it easy, as long as you've added credentials to a normal slack node, these http nodes are a snap to change via the drop down. " -}, -"typeVersion": 1 -}, -{ -"id": "72105959-ee9b-4ce6-a7f8-0f5f112c14d2", -"name": "Sticky Note3", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1880, -500 -], -"parameters": { -"color": 5, -"width": 532.5097590794944, -"height": 671.013686767174, -"content": "![Imgur](https://uploads.n8n.io/templates/qualysscanreport.png)" -}, -"typeVersion": 1 -}, -{ -"id": "49b8ce63-cefd-483a-b802-03e3500d807b", -"name": "Sticky Note4", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1880, --200 -], -"parameters": { -"color": 5, -"width": 535.8333316661616, -"height": 658.907292269235, -"content": "![Imgur](https://uploads.n8n.io/templates/qualysmodalscan.png)" -}, -"typeVersion": 1 -}, -{ -"id": "3ec8c799-d5a5-4134-891a-59adb3e68e23", -"name": "Sticky Note5", -"type": "n8n-nodes-base.stickyNote", -"position": [ -280, --158.042446016207 -], -"parameters": { -"color": 5, -"width": 596.6847639718076, -"height": 422.00743613240917, -"content": "![Imgur](https://uploads.n8n.io/templates/qualysscanshortcut.png)\n### 🤖 Triggering this workflow is as easy as typing a backslash in Slack" -}, -"typeVersion": 1 -} -], -"pinData": {}, -"connections": { -"Webhook": { -"main": [ -[ -{ -"node": "Parse Webhook", -"type": "main", -"index": 0 -} -] -] -}, -"Parse Webhook": { -"main": [ -[ -{ -"node": "Route Message", -"type": "main", -"index": 0 -} -] -] -}, -"Route Message": { -"main": [ -[ -{ -"node": "Respond to Slack Webhook - Vulnerability", -"type": "main", -"index": 0 -} -], -[ -{ -"node": "Respond to Slack Webhook - Report", -"type": "main", -"index": 0 -} -], -[ -{ -"node": "Close Modal Popup", -"type": "main", -"index": 0 -} -] -] -}, -"Route Submission": { -"main": [ -[ -{ -"node": "Required Scan Variables", -"type": "main", -"index": 0 -} -], -[ -{ -"node": "Required Report Variables", -"type": "main", -"index": 0 -} -] -] -}, -"Close Modal Popup": { -"main": [ -[ -{ -"node": "Route Submission", -"type": "main", -"index": 0 -} -] -] -}, -"Required Scan Variables": { -"main": [ -[ -{ -"node": "Qualys Start Vulnerability Scan", -"type": "main", -"index": 0 -} -] -] -}, -"Required Report Variables": { -"main": [ -[ -{ -"node": "Qualys Create Report", -"type": "main", -"index": 0 -} -] -] -}, -"Respond to Slack Webhook - Report": { -"main": [ -[ -{ -"node": "Scan Report Task Modal", -"type": "main", -"index": 0 -} -] -] -}, -"Respond to Slack Webhook - Vulnerability": { -"main": [ -[ -{ -"node": "Vuln Scan Modal", -"type": "main", -"index": 0 -} -] -] -} -} -} \ No newline at end of file diff --git a/Slack/Enrich Pipedrive_s Organization Data with OpenAI GPT-4o & Notify it in Slack.txt b/Slack/Enrich Pipedrive_s Organization Data with OpenAI GPT-4o & Notify it in Slack.json similarity index 100% rename from Slack/Enrich Pipedrive_s Organization Data with OpenAI GPT-4o & Notify it in Slack.txt rename to Slack/Enrich Pipedrive_s Organization Data with OpenAI GPT-4o & Notify it in Slack.json diff --git a/Slack/IT Ops AI SlackBot Workflow - Chat with your knowledge base.txt b/Slack/IT Ops AI SlackBot Workflow - Chat with your knowledge base.json similarity index 100% rename from Slack/IT Ops AI SlackBot Workflow - Chat with your knowledge base.txt rename to Slack/IT Ops AI SlackBot Workflow - Chat with your knowledge base.json diff --git a/Slack/Sentiment Analysis Tracking on Support Issues with Linear and Slack (1).txt b/Slack/Sentiment Analysis Tracking on Support Issues with Linear and Slack.json similarity index 100% rename from Slack/Sentiment Analysis Tracking on Support Issues with Linear and Slack (1).txt rename to Slack/Sentiment Analysis Tracking on Support Issues with Linear and Slack.json diff --git a/Slack/Sentiment Analysis Tracking on Support Issues with Linear and Slack.txt b/Slack/Sentiment Analysis Tracking on Support Issues with Linear and Slack.txt deleted file mode 100644 index a819868..0000000 --- a/Slack/Sentiment Analysis Tracking on Support Issues with Linear and Slack.txt +++ /dev/null @@ -1,752 +0,0 @@ -{ -"nodes": [ -{ -"id": "82fd6023-2cc3-416e-83b7-fda24d07d77a", -"name": "Issues to List", -"type": "n8n-nodes-base.splitOut", -"position": [ -40, --100 -], -"parameters": { -"options": {}, -"fieldToSplitOut": "data.issues.nodes" -}, -"typeVersion": 1 -}, -{ -"id": "9cc77786-e14f-47c6-a3cf-60c2830612e6", -"name": "OpenAI Chat Model", -"type": "@n8n/n8n-nodes-langchain.lmChatOpenAi", -"position": [ -360, -80 -], -"parameters": { -"options": {} -}, -"credentials": { -"openAiApi": { -"id": "8gccIjcuf3gvaoEr", -"name": "OpenAi account" -} -}, -"typeVersion": 1 -}, -{ -"id": "821d4a60-81a4-4915-9c13-3d978cc0114b", -"name": "Combine Sentiment Analysis", -"type": "n8n-nodes-base.set", -"position": [ -700, --80 -], -"parameters": { -"mode": "raw", -"options": {}, -"jsonOutput": "={{\n{\n ...$('Issues to List').item.json,\n ...$json.output\n}\n}}" -}, -"typeVersion": 3.4 -}, -{ -"id": "fe6560f6-2e1b-4442-a2af-bd5a1623f213", -"name": "Sentiment over Issue Comments", -"type": "@n8n/n8n-nodes-langchain.informationExtractor", -"position": [ -360, --80 -], -"parameters": { -"text": "={{\n$json.comments.nodes.map(node => [\n `${node.user.displayName} commented on ${node.createdAt}:`,\n node.body\n].join('\\n')).join('---\\n')\n}}", -"options": {}, -"attributes": { -"attributes": [ -{ -"name": "sentiment", -"required": true, -"description": "One of positive, negative or neutral" -}, -{ -"name": "sentimentSummary", -"description": "Describe the sentiment of the conversation" -} -] -} -}, -"typeVersion": 1 -}, -{ -"id": "4fd0345d-e5bf-426d-8403-e2217e19bbea", -"name": "Copy of Issue", -"type": "n8n-nodes-base.set", -"position": [ -1200, --60 -], -"parameters": { -"mode": "raw", -"options": {}, -"jsonOutput": "={{ $json }}" -}, -"typeVersion": 3.4 -}, -{ -"id": "6d103d67-451e-4780-8f52-f4dba4b42860", -"name": "For Each Issue...", -"type": "n8n-nodes-base.splitInBatches", -"position": [ -1020, --60 -], -"parameters": { -"options": {} -}, -"typeVersion": 3 -}, -{ -"id": "032702d9-27d8-4735-b978-20b55bc1a74f", -"name": "Get Existing Sentiment", -"type": "n8n-nodes-base.airtable", -"position": [ -1380, --60 -], -"parameters": { -"base": { -"__rl": true, -"mode": "list", -"value": "appViDaeaFw4qv9La", -"cachedResultUrl": "https://airtable.com/appViDaeaFw4qv9La", -"cachedResultName": "Sentiment Analysis over Issue Comments" -}, -"table": { -"__rl": true, -"mode": "list", -"value": "tblhO0sfRhKP6ibS8", -"cachedResultUrl": "https://airtable.com/appViDaeaFw4qv9La/tblhO0sfRhKP6ibS8", -"cachedResultName": "Table 1" -}, -"options": { -"fields": [ -"Issue ID", -"Current Sentiment" -] -}, -"operation": "search", -"filterByFormula": "={Issue ID} = '{{ $json.identifier || 'XYZ' }}'" -}, -"credentials": { -"airtableTokenApi": { -"id": "Und0frCQ6SNVX3VV", -"name": "Airtable Personal Access Token account" -} -}, -"typeVersion": 2.1, -"alwaysOutputData": true -}, -{ -"id": "f2ded6fa-8b0f-4a34-868c-13c19f725c98", -"name": "Update Row", -"type": "n8n-nodes-base.airtable", -"position": [ -1560, --60 -], -"parameters": { -"base": { -"__rl": true, -"mode": "list", -"value": "appViDaeaFw4qv9La", -"cachedResultUrl": "https://airtable.com/appViDaeaFw4qv9La", -"cachedResultName": "Sentiment Analysis over Issue Comments" -}, -"table": { -"__rl": true, -"mode": "list", -"value": "tblhO0sfRhKP6ibS8", -"cachedResultUrl": "https://airtable.com/appViDaeaFw4qv9La/tblhO0sfRhKP6ibS8", -"cachedResultName": "Table 1" -}, -"columns": { -"value": { -"Summary": "={{ $('Copy of Issue').item.json.sentimentSummary || '' }}", -"Assigned": "={{ $('Copy of Issue').item.json.assignee.name }}", -"Issue ID": "={{ $('Copy of Issue').item.json.identifier }}", -"Issue Title": "={{ $('Copy of Issue').item.json.title }}", -"Issue Created": "={{ $('Copy of Issue').item.json.createdAt }}", -"Issue Updated": "={{ $('Copy of Issue').item.json.updatedAt }}", -"Current Sentiment": "={{ $('Copy of Issue').item.json.sentiment.toSentenceCase() }}", -"Previous Sentiment": "={{ !$json.isEmpty() ? $json['Current Sentiment'] : 'N/A' }}" -}, -"schema": [ -{ -"id": "id", -"type": "string", -"display": true, -"removed": true, -"readOnly": true, -"required": false, -"displayName": "id", -"defaultMatch": true -}, -{ -"id": "Issue ID", -"type": "string", -"display": true, -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Issue ID", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Previous Sentiment", -"type": "options", -"display": true, -"options": [ -{ -"name": "Positive", -"value": "Positive" -}, -{ -"name": "Negative", -"value": "Negative" -}, -{ -"name": "Neutral", -"value": "Neutral" -}, -{ -"name": "N/A", -"value": "N/A" -} -], -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Previous Sentiment", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Current Sentiment", -"type": "options", -"display": true, -"options": [ -{ -"name": "Positive", -"value": "Positive" -}, -{ -"name": "Negative", -"value": "Negative" -}, -{ -"name": "Neutral", -"value": "Neutral" -}, -{ -"name": "N/A", -"value": "N/A" -} -], -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Current Sentiment", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Summary", -"type": "string", -"display": true, -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Summary", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Issue Title", -"type": "string", -"display": true, -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Issue Title", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Issue Created", -"type": "dateTime", -"display": true, -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Issue Created", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Issue Updated", -"type": "dateTime", -"display": true, -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Issue Updated", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Assigned", -"type": "string", -"display": true, -"removed": false, -"readOnly": false, -"required": false, -"displayName": "Assigned", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Created", -"type": "string", -"display": true, -"removed": true, -"readOnly": true, -"required": false, -"displayName": "Created", -"defaultMatch": false, -"canBeUsedToMatch": true -}, -{ -"id": "Last Modified", -"type": "string", -"display": true, -"removed": true, -"readOnly": true, -"required": false, -"displayName": "Last Modified", -"defaultMatch": false, -"canBeUsedToMatch": true -} -], -"mappingMode": "defineBelow", -"matchingColumns": [ -"Issue ID" -] -}, -"options": {}, -"operation": "upsert" -}, -"credentials": { -"airtableTokenApi": { -"id": "Und0frCQ6SNVX3VV", -"name": "Airtable Personal Access Token account" -} -}, -"typeVersion": 2.1 -}, -{ -"id": "e6fb0b8f-2469-4b66-b9e2-f4f3c0a613af", -"name": "Airtable Trigger", -"type": "n8n-nodes-base.airtableTrigger", -"position": [ -1900, --40 -], -"parameters": { -"baseId": { -"__rl": true, -"mode": "id", -"value": "appViDaeaFw4qv9La" -}, -"tableId": { -"__rl": true, -"mode": "id", -"value": "tblhO0sfRhKP6ibS8" -}, -"pollTimes": { -"item": [ -{ -"mode": "everyHour" -} -] -}, -"triggerField": "Current Sentiment", -"authentication": "airtableTokenApi", -"additionalFields": {} -}, -"credentials": { -"airtableTokenApi": { -"id": "Und0frCQ6SNVX3VV", -"name": "Airtable Personal Access Token account" -} -}, -"typeVersion": 1 -}, -{ -"id": "669762c4-860b-43ad-b677-72d4564e1c29", -"name": "Sentiment Transition", -"type": "n8n-nodes-base.switch", -"position": [ -2080, --40 -], -"parameters": { -"rules": { -"values": [ -{ -"outputKey": "NON-NEGATIVE to NEGATIVE", -"conditions": { -"options": { -"version": 2, -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"operator": { -"type": "boolean", -"operation": "true", -"singleValue": true -}, -"leftValue": "={{ $json.fields[\"Previous Sentiment\"] !== 'Negative' && $json.fields[\"Current Sentiment\"] === 'Negative' }}", -"rightValue": "" -} -] -}, -"renameOutput": true -} -] -}, -"options": { -"fallbackOutput": "none" -} -}, -"typeVersion": 3.2 -}, -{ -"id": "2fbcfbea-3989-459b-8ca7-b65c130a479b", -"name": "Fetch Active Linear Issues", -"type": "n8n-nodes-base.graphql", -"position": [ --140, --100 -], -"parameters": { -"query": "=query (\n $filter: IssueFilter\n) {\n issues(\n filter: $filter\n ) {\n nodes {\n id\n identifier\n title\n description\n url\n createdAt\n updatedAt\n assignee {\n name\n }\n comments {\n nodes {\n id\n createdAt\n user {\n displayName\n }\n body\n }\n }\n }\n }\n}", -"endpoint": "https://api.linear.app/graphql", -"variables": "={{\n{\n \"filter\": {\n updatedAt: { gte: $now.minus(30, 'minutes').toISO() }\n }\n}\n}}", -"requestFormat": "json", -"authentication": "headerAuth" -}, -"credentials": { -"httpHeaderAuth": { -"id": "XME2Ubkuy9hpPEM5", -"name": "Linear.app (heightio)" -} -}, -"typeVersion": 1 -}, -{ -"id": "aaf1c25e-c398-4715-88bf-bd98daafc10f", -"name": "Schedule Trigger", -"type": "n8n-nodes-base.scheduleTrigger", -"position": [ --340, --100 -], -"parameters": { -"rule": { -"interval": [ -{ -"field": "minutes", -"minutesInterval": 30 -} -] -} -}, -"typeVersion": 1.2 -}, -{ -"id": "b3e2df39-90ce-4ebf-aa68-05499965ec30", -"name": "Deduplicate Notifications", -"type": "n8n-nodes-base.removeDuplicates", -"position": [ -2280, --40 -], -"parameters": { -"options": {}, -"operation": "removeItemsSeenInPreviousExecutions", -"dedupeValue": "={{ $json.fields[\"Issue ID\"] }}:{{ $json.fields['Last Modified'] }}" -}, -"typeVersion": 2 -}, -{ -"id": "2a116475-32cd-4c9d-bfc1-3bd494f79a49", -"name": "Report Issue Negative Transition", -"type": "n8n-nodes-base.slack", -"position": [ -2480, --40 -], -"webhookId": "612f1001-3fcc-480b-a835-05f9e2d56a5f", -"parameters": { -"text": "={{ $('Deduplicate Notifications').all().length }} Issues have transitions to Negative Sentiment", -"select": "channel", -"blocksUi": "={{\n{\n \"blocks\": [\n {\n \"type\": \"section\",\n \"text\": {\n \"type\": \"mrkdwn\",\n \"text\": \":rotating_light: The following Issues transitioned to Negative Sentiment\"\n }\n },\n {\n \"type\": \"divider\"\n },\n ...($('Deduplicate Notifications').all().map(item => (\n {\n \"type\": \"section\",\n \"text\": {\n \"type\": \"mrkdwn\",\n \"text\": `**\\n${$json.fields.Summary}`\n }\n }\n )))\n ]\n}\n}}", -"channelId": { -"__rl": true, -"mode": "list", -"value": "C0749JVFERK", -"cachedResultName": "n8n-tickets" -}, -"messageType": "block", -"otherOptions": {} -}, -"credentials": { -"slackApi": { -"id": "VfK3js0YdqBdQLGP", -"name": "Slack account" -} -}, -"executeOnce": true, -"typeVersion": 2.3 -}, -{ -"id": "1f3d30b6-de31-45a8-a872-554c339f112f", -"name": "Sticky Note", -"type": "n8n-nodes-base.stickyNote", -"position": [ --420, --320 -], -"parameters": { -"color": 7, -"width": 660, -"height": 440, -"content": "## 1. Continuously Monitor Active Linear Issues\n[Learn more about the GraphQL node](https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.graphql)\n\nTo keep up with the latest changes in our active Linear tickets, we'll need to use Linear's GraphQL endpoint because filtering is currently unavailable in the official Linear.app node.\n\nFor this demonstration, we'll check for updated tickets every 30mins." -}, -"typeVersion": 1 -}, -{ -"id": "9024512d-5cb9-4e9f-b6e1-495d1a32118a", -"name": "Sticky Note1", -"type": "n8n-nodes-base.stickyNote", -"position": [ -260, --320 -], -"parameters": { -"color": 7, -"width": 640, -"height": 560, -"content": "## 2. Sentiment Analysis on Current Issue Activity\n[Learn more about the Information Extractor node](https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.information-extractor)\n\nWith our recently updated posts, we can use our AI to perform a quick sentiment analysis on the ongoing conversation to check the overall mood of the support issue. This is a great way to check how things are generally going in the support queue; positive should be normal but negative could indicate some uncomfortableness or even frustration." -}, -"typeVersion": 1 -}, -{ -"id": "233ebd6d-38cb-4f2d-84b5-29c97d30d77b", -"name": "Sticky Note2", -"type": "n8n-nodes-base.stickyNote", -"position": [ -920, --320 -], -"parameters": { -"color": 7, -"width": 840, -"height": 560, -"content": "## 3. Capture and Track Results in Airtable\n[Learn more about the Airtable node](https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-base.airtable)\n\nNext, we can capture this analysis in our insights database as means for human review. When the issue is new, we can create a new row but if the issue exists, we will update it's existing row instead.\n\nWhen updating an existing row, we move its previous \"current sentiment\" value into the \"previous sentiment\" column and replace with our new current sentiment. This gives us a \"sentiment transition\" which will be useful in the next step.\n\nCheck out the Airtable here: https://airtable.com/appViDaeaFw4qv9La/shrq6HgeYzpW6uwXL" -}, -"typeVersion": 1 -}, -{ -"id": "a2229225-b580-43cb-b234-4f69cb5924fd", -"name": "Sticky Note3", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1800, --320 -], -"parameters": { -"color": 7, -"width": 920, -"height": 560, -"content": "## 4. Get Notified when Sentiment becomes Negative\n[Learn more about the Slack node](https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-base.slack/)\n\nA good use-case for tracking sentiment transitions could be to be alerted if ever an issue moves from a non-negative sentiment to a negative one. This could be a signal of issue handling troubles which may require attention before it escalates.\n\nIn this demonstration, we use the Airtable trigger to catch rows which have their sentiment column updated and check for the non-negative-to-negative sentiment transition using the switch node. For those matching rows, we combine add send a notification via slack. A cool trick is to use the \"remove duplication\" node to prevent repeat notifications for the same updates - here we combine the Linear issue key and the row's last modified date." -}, -"typeVersion": 1 -}, -{ -"id": "6f26769e-ec5d-46d0-ae0a-34148b24e6a2", -"name": "Sticky Note4", -"type": "n8n-nodes-base.stickyNote", -"position": [ --940, --720 -], -"parameters": { -"width": 480, -"height": 840, -"content": "## Try It Out!\n### This n8n template performs continous monitoring on Linear Issue conversations performing sentiment analysis and alerting when the sentiment becomes negative.\nThis is helpful to quickly identify difficult customer support situations early and prioritising them before they get out of hand.\n\n## How it works\n* A scheduled trigger is used to fetch recently updated issues in Linear using the GraphQL node.\n* Each issue's comments thread is passed into a simple Information Extractor node to identify the overall sentiment.\n* The resulting sentiment analysis combined with the some issue details are uploaded to Airtable for review.\n* When the template is re-run at a later date, each issue is re-analysed for sentiment\n* Each issue's new sentiment state is saved to the airtable whilst its previous state is moved to the \"previous sentiment\" column.\n* An Airtable trigger is used to watch for recently updated rows\n* Each matching Airtable row is filtered to check if it has a previous non-negative state but now has a negative state in its current sentiment.\n* The results are sent via notification to a team slack channel for priority.\n\n**Check out the sample Airtable here**: https://airtable.com/appViDaeaFw4qv9La/shrq6HgeYzpW6uwXL\n\n## How to use\n* Modify the GraphQL filter to fetch issues to a relevant issue type, team or person.\n* Update the Slack channel to ensure messages are sent to the correct location.\n\n### Need Help?\nJoin the [Discord](https://discord.com/invite/XPKeKXeB7d) or ask in the [Forum](https://community.n8n.io/)!\n\nHappy Hacking!" -}, -"typeVersion": 1 -} -], -"pinData": {}, -"connections": { -"Update Row": { -"main": [ -[ -{ -"node": "For Each Issue...", -"type": "main", -"index": 0 -} -] -] -}, -"Copy of Issue": { -"main": [ -[ -{ -"node": "Get Existing Sentiment", -"type": "main", -"index": 0 -} -] -] -}, -"Issues to List": { -"main": [ -[ -{ -"node": "Sentiment over Issue Comments", -"type": "main", -"index": 0 -} -] -] -}, -"Airtable Trigger": { -"main": [ -[ -{ -"node": "Sentiment Transition", -"type": "main", -"index": 0 -} -] -] -}, -"Schedule Trigger": { -"main": [ -[ -{ -"node": "Fetch Active Linear Issues", -"type": "main", -"index": 0 -} -] -] -}, -"For Each Issue...": { -"main": [ -[], -[ -{ -"node": "Copy of Issue", -"type": "main", -"index": 0 -} -] -] -}, -"OpenAI Chat Model": { -"ai_languageModel": [ -[ -{ -"node": "Sentiment over Issue Comments", -"type": "ai_languageModel", -"index": 0 -} -] -] -}, -"Sentiment Transition": { -"main": [ -[ -{ -"node": "Deduplicate Notifications", -"type": "main", -"index": 0 -} -] -] -}, -"Get Existing Sentiment": { -"main": [ -[ -{ -"node": "Update Row", -"type": "main", -"index": 0 -} -] -] -}, -"Deduplicate Notifications": { -"main": [ -[ -{ -"node": "Report Issue Negative Transition", -"type": "main", -"index": 0 -} -] -] -}, -"Combine Sentiment Analysis": { -"main": [ -[ -{ -"node": "For Each Issue...", -"type": "main", -"index": 0 -} -] -] -}, -"Fetch Active Linear Issues": { -"main": [ -[ -{ -"node": "Issues to List", -"type": "main", -"index": 0 -} -] -] -}, -"Sentiment over Issue Comments": { -"main": [ -[ -{ -"node": "Combine Sentiment Analysis", -"type": "main", -"index": 0 -} -] -] -} -} -} \ No newline at end of file diff --git a/Slack/Slack slash commands AI Chat Bot.txt b/Slack/Slack slash commands AI Chat Bot.json similarity index 100% rename from Slack/Slack slash commands AI Chat Bot.txt rename to Slack/Slack slash commands AI Chat Bot.json diff --git a/Slack/Venafi Cloud Slack Cert Bot.txt b/Slack/Venafi Cloud Slack Cert Bot.json similarity index 100% rename from Slack/Venafi Cloud Slack Cert Bot.txt rename to Slack/Venafi Cloud Slack Cert Bot.json diff --git a/Telegram/AI-Powered Children_s Arabic Storytelling on Telegram.txt b/Telegram/AI-Powered Children_s Arabic Storytelling on Telegram.json similarity index 100% rename from Telegram/AI-Powered Children_s Arabic Storytelling on Telegram.txt rename to Telegram/AI-Powered Children_s Arabic Storytelling on Telegram.json diff --git a/Telegram/AI-Powered Children_s English Storytelling on Telegram with OpenAI.txt b/Telegram/AI-Powered Children_s English Storytelling on Telegram with OpenAI.json similarity index 100% rename from Telegram/AI-Powered Children_s English Storytelling on Telegram with OpenAI.txt rename to Telegram/AI-Powered Children_s English Storytelling on Telegram with OpenAI.json diff --git a/Telegram/Agentic Telegram AI bot with with LangChain nodes and new tools.txt b/Telegram/Agentic Telegram AI bot with with LangChain nodes and new tools.json similarity index 100% rename from Telegram/Agentic Telegram AI bot with with LangChain nodes and new tools.txt rename to Telegram/Agentic Telegram AI bot with with LangChain nodes and new tools.json diff --git a/Telegram/Angie, Personal AI Assistant with Telegram Voice and Text.txt b/Telegram/Angie, Personal AI Assistant with Telegram Voice and Text.json similarity index 100% rename from Telegram/Angie, Personal AI Assistant with Telegram Voice and Text.txt rename to Telegram/Angie, Personal AI Assistant with Telegram Voice and Text.json diff --git a/Telegram/Automated AI image analysis and response via Telegram.txt b/Telegram/Automated AI image analysis and response via Telegram.json similarity index 100% rename from Telegram/Automated AI image analysis and response via Telegram.txt rename to Telegram/Automated AI image analysis and response via Telegram.json diff --git a/Telegram/Chat with OpenAIs GPT via a simple Telegram Bot.txt b/Telegram/Chat with OpenAIs GPT via a simple Telegram Bot.json similarity index 100% rename from Telegram/Chat with OpenAIs GPT via a simple Telegram Bot.txt rename to Telegram/Chat with OpenAIs GPT via a simple Telegram Bot.json diff --git a/Telegram/Detect toxic language in Telegram messages.txt b/Telegram/Detect toxic language in Telegram messages.json similarity index 91% rename from Telegram/Detect toxic language in Telegram messages.txt rename to Telegram/Detect toxic language in Telegram messages.json index 8b4622b..ad26d4a 100644 --- a/Telegram/Detect toxic language in Telegram messages.txt +++ b/Telegram/Detect toxic language in Telegram messages.json @@ -145,4 +145,4 @@ ] } } -}sDetect toxic language in Telegram messages \ No newline at end of file +} diff --git a/Telegram/Image Creation with OpenAI and Telegram.txt b/Telegram/Image Creation with OpenAI and Telegram.json similarity index 100% rename from Telegram/Image Creation with OpenAI and Telegram.txt rename to Telegram/Image Creation with OpenAI and Telegram.json diff --git a/Telegram/Send a random recipe once a day to Telegram.txt b/Telegram/Send a random recipe once a day to Telegram.json similarity index 100% rename from Telegram/Send a random recipe once a day to Telegram.txt rename to Telegram/Send a random recipe once a day to Telegram.json diff --git a/Telegram/Telegram AI Bot_ NeurochainAI Text & Image - NeurochainAI Basic API Integration.txt b/Telegram/Telegram AI Bot_ NeurochainAI Text & Image - NeurochainAI Basic API Integration.json similarity index 100% rename from Telegram/Telegram AI Bot_ NeurochainAI Text & Image - NeurochainAI Basic API Integration.txt rename to Telegram/Telegram AI Bot_ NeurochainAI Text & Image - NeurochainAI Basic API Integration.json diff --git a/Telegram/Telegram AI Chatbot.txt b/Telegram/Telegram AI Chatbot.json similarity index 100% rename from Telegram/Telegram AI Chatbot.txt rename to Telegram/Telegram AI Chatbot.json diff --git a/Telegram/Telegram AI bot assistant_ ready-made template for voice & text messages.txt b/Telegram/Telegram AI bot assistant_ ready-made template for voice & text messages.json similarity index 100% rename from Telegram/Telegram AI bot assistant_ ready-made template for voice & text messages.txt rename to Telegram/Telegram AI bot assistant_ ready-made template for voice & text messages.json diff --git a/Telegram/Telegram AI bot with LangChain nodes.txt b/Telegram/Telegram AI bot with LangChain nodes.json similarity index 100% rename from Telegram/Telegram AI bot with LangChain nodes.txt rename to Telegram/Telegram AI bot with LangChain nodes.json diff --git a/Telegram/Telegram Bot with Supabase memory and OpenAI assistant integration.txt b/Telegram/Telegram Bot with Supabase memory and OpenAI assistant integration.json similarity index 100% rename from Telegram/Telegram Bot with Supabase memory and OpenAI assistant integration.txt rename to Telegram/Telegram Bot with Supabase memory and OpenAI assistant integration.json diff --git a/Telegram/Telegram chat with PDF.txt b/Telegram/Telegram chat with PDF.json similarity index 100% rename from Telegram/Telegram chat with PDF.txt rename to Telegram/Telegram chat with PDF.json diff --git a/Telegram/Telegram to Spotify with OpenAI.txt b/Telegram/Telegram to Spotify with OpenAI.json similarity index 100% rename from Telegram/Telegram to Spotify with OpenAI.txt rename to Telegram/Telegram to Spotify with OpenAI.json diff --git a/Telegram/Translate Telegram audio messages with AI (55 supported languages).txt b/Telegram/Translate Telegram audio messages with AI (55 supported languages).json similarity index 100% rename from Telegram/Translate Telegram audio messages with AI (55 supported languages).txt rename to Telegram/Translate Telegram audio messages with AI (55 supported languages).json diff --git a/Telegram/🐋🤖 DeepSeek AI Agent + Telegram + LONG TERM Memory 🧠.txt b/Telegram/🐋🤖 DeepSeek AI Agent + Telegram + LONG TERM Memory 🧠.json similarity index 100% rename from Telegram/🐋🤖 DeepSeek AI Agent + Telegram + LONG TERM Memory 🧠.txt rename to Telegram/🐋🤖 DeepSeek AI Agent + Telegram + LONG TERM Memory 🧠.json diff --git a/Telegram/🤖 Telegram Messaging Agent for Text_Audio_Images.txt b/Telegram/🤖 Telegram Messaging Agent for Text_Audio_Images.json similarity index 100% rename from Telegram/🤖 Telegram Messaging Agent for Text_Audio_Images.txt rename to Telegram/🤖 Telegram Messaging Agent for Text_Audio_Images.json diff --git a/Telegram/🤖🧠 AI Agent Chatbot + LONG TERM Memory + Note Storage + Telegram.txt b/Telegram/🤖🧠 AI Agent Chatbot + LONG TERM Memory + Note Storage + Telegram.json similarity index 100% rename from Telegram/🤖🧠 AI Agent Chatbot + LONG TERM Memory + Note Storage + Telegram.txt rename to Telegram/🤖🧠 AI Agent Chatbot + LONG TERM Memory + Note Storage + Telegram.json diff --git a/WhatsApp/Automate Sales Meeting Prep with AI & APIFY Sent To WhatsApp.txt b/WhatsApp/Automate Sales Meeting Prep with AI & APIFY Sent To WhatsApp.json similarity index 100% rename from WhatsApp/Automate Sales Meeting Prep with AI & APIFY Sent To WhatsApp.txt rename to WhatsApp/Automate Sales Meeting Prep with AI & APIFY Sent To WhatsApp.json diff --git a/WhatsApp/Building Your First WhatsApp Chatbot (1).txt b/WhatsApp/Building Your First WhatsApp Chatbot.json similarity index 100% rename from WhatsApp/Building Your First WhatsApp Chatbot (1).txt rename to WhatsApp/Building Your First WhatsApp Chatbot.json diff --git a/WhatsApp/Building Your First WhatsApp Chatbot.txt b/WhatsApp/Building Your First WhatsApp Chatbot.txt deleted file mode 100644 index 9263c95..0000000 --- a/WhatsApp/Building Your First WhatsApp Chatbot.txt +++ /dev/null @@ -1,700 +0,0 @@ -{ -"meta": { -"instanceId": "408f9fb9940c3cb18ffdef0e0150fe342d6e655c3a9fac21f0f644e8bedabcd9" -}, -"nodes": [ -{ -"id": "77ee6494-4898-47dc-81d9-35daf6f0beea", -"name": "WhatsApp Trigger", -"type": "n8n-nodes-base.whatsAppTrigger", -"position": [ -1360, --280 -], -"webhookId": "aaa71f03-f7af-4d18-8d9a-0afb86f1b554", -"parameters": { -"updates": [ -"messages" -] -}, -"credentials": { -"whatsAppTriggerApi": { -"id": "H3uYNtpeczKMqtYm", -"name": "WhatsApp OAuth account" -} -}, -"typeVersion": 1 -}, -{ -"id": "57210e27-1f89-465a-98cc-43f890a4bf58", -"name": "OpenAI Chat Model", -"type": "@n8n/n8n-nodes-langchain.lmChatOpenAi", -"position": [ -1960, --200 -], -"parameters": { -"model": "gpt-4o-2024-08-06", -"options": {} -}, -"credentials": { -"openAiApi": { -"id": "8gccIjcuf3gvaoEr", -"name": "OpenAi account" -} -}, -"typeVersion": 1 -}, -{ -"id": "e1053235-0ade-4e36-9ad2-8b29c78fced8", -"name": "Window Buffer Memory", -"type": "@n8n/n8n-nodes-langchain.memoryBufferWindow", -"position": [ -2080, --200 -], -"parameters": { -"sessionKey": "=whatsapp-75-{{ $json.messages[0].from }}", -"sessionIdType": "customKey" -}, -"typeVersion": 1.2 -}, -{ -"id": "69f1b78b-7c93-4713-863a-27e04809996f", -"name": "Vector Store Tool", -"type": "@n8n/n8n-nodes-langchain.toolVectorStore", -"position": [ -2200, --200 -], -"parameters": { -"name": "query_product_brochure", -"description": "Call this tool to query the product brochure. Valid for the year 2024." -}, -"typeVersion": 1 -}, -{ -"id": "170e8f7d-7e14-48dd-9f80-5352cc411fc1", -"name": "Embeddings OpenAI", -"type": "@n8n/n8n-nodes-langchain.embeddingsOpenAi", -"position": [ -2200, -80 -], -"parameters": { -"model": "text-embedding-3-small", -"options": {} -}, -"credentials": { -"openAiApi": { -"id": "8gccIjcuf3gvaoEr", -"name": "OpenAi account" -} -}, -"typeVersion": 1 -}, -{ -"id": "ee78320b-d407-49e8-b4b8-417582a44709", -"name": "OpenAI Chat Model1", -"type": "@n8n/n8n-nodes-langchain.lmChatOpenAi", -"position": [ -2440, --60 -], -"parameters": { -"model": "gpt-4o-2024-08-06", -"options": {} -}, -"credentials": { -"openAiApi": { -"id": "8gccIjcuf3gvaoEr", -"name": "OpenAi account" -} -}, -"typeVersion": 1 -}, -{ -"id": "9dd89378-5acf-4ca6-8d84-e6e64254ed02", -"name": "When clicking ‘Test workflow’", -"type": "n8n-nodes-base.manualTrigger", -"position": [ -0, --240 -], -"parameters": {}, -"typeVersion": 1 -}, -{ -"id": "e68fc137-1bcb-43f0-b597-3ae07f380c15", -"name": "Embeddings OpenAI1", -"type": "@n8n/n8n-nodes-langchain.embeddingsOpenAi", -"position": [ -760, --20 -], -"parameters": { -"model": "text-embedding-3-small", -"options": {} -}, -"credentials": { -"openAiApi": { -"id": "8gccIjcuf3gvaoEr", -"name": "OpenAi account" -} -}, -"typeVersion": 1 -}, -{ -"id": "2d31e92b-18d4-4f6b-8cdb-bed0056d50d7", -"name": "Default Data Loader", -"type": "@n8n/n8n-nodes-langchain.documentDefaultDataLoader", -"position": [ -900, --20 -], -"parameters": { -"options": {}, -"jsonData": "={{ $('Extract from File').item.json.text }}", -"jsonMode": "expressionData" -}, -"typeVersion": 1 -}, -{ -"id": "ca0c015e-fba2-4dca-b0fe-bac66681725a", -"name": "Recursive Character Text Splitter", -"type": "@n8n/n8n-nodes-langchain.textSplitterRecursiveCharacterTextSplitter", -"position": [ -900, -100 -], -"parameters": { -"options": {}, -"chunkSize": 2000, -"chunkOverlap": {} -}, -"typeVersion": 1 -}, -{ -"id": "63abb6b2-b955-4e65-9c63-3211dca65613", -"name": "Extract from File", -"type": "n8n-nodes-base.extractFromFile", -"position": [ -360, --240 -], -"parameters": { -"options": {}, -"operation": "pdf" -}, -"typeVersion": 1 -}, -{ -"id": "be2add9c-3670-4196-8c38-82742bf4f283", -"name": "get Product Brochure", -"type": "n8n-nodes-base.httpRequest", -"position": [ -180, --240 -], -"parameters": { -"url": "https://usa.yamaha.com/files/download/brochure/1/1474881/Yamaha-Powered-Loudspeakers-brochure-2024-en-web.pdf", -"options": {} -}, -"typeVersion": 4.2 -}, -{ -"id": "1ae5a311-36d7-4454-ab14-6788d1331780", -"name": "Reply To User", -"type": "n8n-nodes-base.whatsApp", -"position": [ -2820, --280 -], -"parameters": { -"textBody": "={{ $json.output }}", -"operation": "send", -"phoneNumberId": "477115632141067", -"requestOptions": {}, -"additionalFields": { -"previewUrl": false -}, -"recipientPhoneNumber": "={{ $('WhatsApp Trigger').item.json.messages[0].from }}" -}, -"credentials": { -"whatsAppApi": { -"id": "9SFJPeqrpChOkAmw", -"name": "WhatsApp account" -} -}, -"typeVersion": 1 -}, -{ -"id": "b6efba81-18b0-4378-bb91-51f39ca57f3e", -"name": "Reply To User1", -"type": "n8n-nodes-base.whatsApp", -"position": [ -1760, -80 -], -"parameters": { -"textBody": "=I'm unable to process non-text messages. Please send only text messages. Thanks!", -"operation": "send", -"phoneNumberId": "477115632141067", -"requestOptions": {}, -"additionalFields": { -"previewUrl": false -}, -"recipientPhoneNumber": "={{ $('WhatsApp Trigger').item.json.messages[0].from }}" -}, -"credentials": { -"whatsAppApi": { -"id": "9SFJPeqrpChOkAmw", -"name": "WhatsApp account" -} -}, -"typeVersion": 1 -}, -{ -"id": "52decd86-ac6c-4d91-a938-86f93ec5f822", -"name": "Product Catalogue", -"type": "@n8n/n8n-nodes-langchain.vectorStoreInMemory", -"position": [ -2200, --60 -], -"parameters": { -"memoryKey": "whatsapp-75" -}, -"typeVersion": 1 -}, -{ -"id": "6dd5a652-2464-4ab8-8e5f-568529299523", -"name": "Sticky Note", -"type": "n8n-nodes-base.stickyNote", -"position": [ --88.75, --473.4375 -], -"parameters": { -"color": 7, -"width": 640.4375, -"height": 434.6875, -"content": "## 1. Download Product Brochure PDF\n[Read more about the HTTP Request Tool](https://docs.n8n.io/integrations/builtin/core-nodes/n8n-nodes-base.httprequest)\n\nImport your marketing PDF document to build your vector store. This will be used as the knowledgebase by the Sales AI Agent.\n\nFor this demonstration, we'll use the HTTP request node to import the YAMAHA POWERED LOUDSPEAKERS 2024 brochure ([Source](https://usa.yamaha.com/files/download/brochure/1/1474881/Yamaha-Powered-Loudspeakers-brochure-2024-en-web.pdf)) and an Extract from File node to extract the text contents. " -}, -"typeVersion": 1 -}, -{ -"id": "116663bc-d8d6-41a5-93dc-b219adbb2235", -"name": "Sticky Note1", -"type": "n8n-nodes-base.stickyNote", -"position": [ -580, --476 -], -"parameters": { -"color": 7, -"width": 614.6875, -"height": 731.1875, -"content": "## 2. Create Product Brochure Vector Store\n[Read more about the In-Memory Vector Store](https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.vectorstoreinmemory/)\n\nVector stores are powerful databases which serve the purpose of matching a user's questions to relevant parts of a document. By creating a vector store of our product catalog, we'll allow users to query using natural language.\n\nTo keep things simple, we'll use the **In-memory Vector Store** which comes built-in to n8n and doesn't require a separate service. For production deployments, I'd recommend replacing the in-memory vector store with either [Qdrant](https://qdrant.tech) or [Pinecone](https://pinecone.io)." -}, -"typeVersion": 1 -}, -{ -"id": "86bd5334-d735-4650-aeff-06230119d705", -"name": "Create Product Catalogue", -"type": "@n8n/n8n-nodes-langchain.vectorStoreInMemory", -"position": [ -760, --200 -], -"parameters": { -"mode": "insert", -"memoryKey": "whatsapp-75", -"clearStore": true -}, -"typeVersion": 1 -}, -{ -"id": "b8078b0d-cbd7-423f-bb30-13902988be38", -"name": "Sticky Note2", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1254, --552 -], -"parameters": { -"color": 7, -"width": 546.6875, -"height": 484.1875, -"content": "## 3. Use the WhatsApp Trigger\n[Learn more about the WhatsApp Trigger](https://docs.n8n.io/integrations/builtin/trigger-nodes/n8n-nodes-base.whatsapptrigger/)\n\nThe WhatsApp Trigger allows you to receive incoming WhatsApp messages from customers. It requires a bit of setup so remember to follow the documentation carefully! Once ready however, it's quite easy to build powerful workflows which are easily accessible to users.\n\nNote that WhatsApp can send many message types such as audio and video so in this demonstration, we'll filter them out and just accept the text messages." -}, -"typeVersion": 1 -}, -{ -"id": "5bf7ed07-282b-4198-aa90-3e5ae5180404", -"name": "Sticky Note3", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1640, -280 -], -"parameters": { -"width": 338, -"height": 92, -"content": "### Want to handle all message types?\nCheck out my other WhatsApp template in my creator page! https://n8n.io/creators/jimleuk/" -}, -"typeVersion": 1 -}, -{ -"id": "a3661b59-25d2-446e-8462-32b4d692b69d", -"name": "Sticky Note4", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1640, --40 -], -"parameters": { -"color": 7, -"width": 337.6875, -"height": 311.1875, -"content": "### 3a. Handle Unsupported Message Types\nFor non-text messages, we'll just reply with a simple message to inform the sender." -}, -"typeVersion": 1 -}, -{ -"id": "ea3c9ee1-505a-40e7-82fe-9169bdbb80af", -"name": "Sticky Note5", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1840, --682.5 -], -"parameters": { -"color": 7, -"width": 746.6875, -"height": 929.1875, -"content": "## 4. Sales AI Agent Responds To Customers\n[Learn more about using AI Agents](https://docs.n8n.io/integrations/builtin/cluster-nodes/root-nodes/n8n-nodes-langchain.agent/)\n\nn8n's AI agents are powerful nodes which make it incredibly easy to use state-of-the-art AI in your workflows. Not only do they have the ability to remember conversations per individual customer but also tap into resources such as our product catalogue vector store to pull factual information and data for every question.\n\nIn this demonstration, we use an AI agent which is directed to help the user navigate the product brochure. A Chat memory subnode is attached to identify and keep track of the customer session. A Vector store tool is added to allow the Agent to tap into the product catalogue knowledgebase we built earlier." -}, -"typeVersion": 1 -}, -{ -"id": "5c72df8d-bca1-4634-b1ed-61ffec8bd103", -"name": "Sticky Note6", -"type": "n8n-nodes-base.stickyNote", -"position": [ -2620, --560 -], -"parameters": { -"color": 7, -"width": 495.4375, -"height": 484.1875, -"content": "## 5. Repond to WhatsApp User\n[Learn more about the WhatsApp Node](https://docs.n8n.io/integrations/builtin/app-nodes/n8n-nodes-base.whatsapp/)\n\nThe WhatsApp node is the go-to if you want to interact with WhatsApp users. With this node, you can send text, images, audio and video messages as well as use your WhatsApp message templates.\n\nHere, we'll keep it simple by replying with a text message which is the output of the AI agent." -}, -"typeVersion": 1 -}, -{ -"id": "48ec809f-ca0e-4052-b403-9ad7077b3fff", -"name": "Sticky Note7", -"type": "n8n-nodes-base.stickyNote", -"position": [ --520, --620 -], -"parameters": { -"width": 401.25, -"height": 582.6283033962263, -"content": "## Try It Out!\n\n### This n8n template builds a simple WhatsApp chabot acting as a Sales Agent. The Agent is backed by a product catalog vector store to better answer user's questions.\n\n* This template is in 2 parts: creating the product catalog vector store and building the WhatsApp AI chatbot.\n* A product brochure is imported via HTTP request node and its text contents extracted.\n* The text contents are then uploaded to the in-memory vector store to build a knowledgebase for the chatbot.\n* A WhatsApp trigger is used to capture messages from customers where non-text messages are filtered out.\n* The customer's message is sent to the AI Agent which queries the product catalogue using the vector store tool.\n* The Agent's response is sent back to the user via the WhatsApp node.\n\n### Need Help?\nJoin the [Discord](https://discord.com/invite/XPKeKXeB7d) or ask in the [Forum](https://community.n8n.io/)!" -}, -"typeVersion": 1 -}, -{ -"id": "87cf9b41-66de-49a7-aeb0-c8809191b5a0", -"name": "Handle Message Types", -"type": "n8n-nodes-base.switch", -"position": [ -1560, --280 -], -"parameters": { -"rules": { -"values": [ -{ -"outputKey": "Supported", -"conditions": { -"options": { -"version": 2, -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"operator": { -"type": "string", -"operation": "equals" -}, -"leftValue": "={{ $json.messages[0].type }}", -"rightValue": "text" -} -] -}, -"renameOutput": true -}, -{ -"outputKey": "Not Supported", -"conditions": { -"options": { -"version": 2, -"leftValue": "", -"caseSensitive": true, -"typeValidation": "strict" -}, -"combinator": "and", -"conditions": [ -{ -"id": "89971d8c-a386-4e77-8f6c-f491a8e84cb6", -"operator": { -"type": "string", -"operation": "notEquals" -}, -"leftValue": "={{ $json.messages[0].type }}", -"rightValue": "text" -} -] -}, -"renameOutput": true -} -] -}, -"options": {} -}, -"typeVersion": 3.2 -}, -{ -"id": "e52f0a50-0c34-4c4a-b493-4c42ba112277", -"name": "Sticky Note8", -"type": "n8n-nodes-base.stickyNote", -"position": [ --80, --20 -], -"parameters": { -"color": 5, -"width": 345.10906976744184, -"height": 114.53583720930231, -"content": "### You only have to run this part once!\nRun this step to populate our product catalogue vector. Run again if you want to update the vector store with a new version." -}, -"typeVersion": 1 -}, -{ -"id": "c1a7d6d1-191e-4343-af9f-f2c9eb4ecf49", -"name": "Sticky Note9", -"type": "n8n-nodes-base.stickyNote", -"position": [ -1260, --40 -], -"parameters": { -"color": 5, -"width": 364.6293255813954, -"height": 107.02804651162779, -"content": "### Activate your workflow to use!\nTo start using the WhatsApp chatbot, you'll need to activate the workflow. If you are self-hosting ensure WhatsApp is able to connect to your server." -}, -"typeVersion": 1 -}, -{ -"id": "a36524d0-22a6-48cc-93fe-b4571cec428a", -"name": "AI Sales Agent", -"type": "@n8n/n8n-nodes-langchain.agent", -"position": [ -1960, --400 -], -"parameters": { -"text": "={{ $json.messages[0].text.body }}", -"options": { -"systemMessage": "You are an assistant working for a company who sells Yamaha Powered Loudspeakers and helping the user navigate the product catalog for the year 2024. Your goal is not to facilitate a sale but if the user enquires, direct them to the appropriate website, url or contact information.\n\nDo your best to answer any questions factually. If you don't know the answer or unable to obtain the information from the datastore, then tell the user so." -}, -"promptType": "define" -}, -"typeVersion": 1.6 -} -], -"pinData": {}, -"connections": { -"AI Sales Agent": { -"main": [ -[ -{ -"node": "Reply To User", -"type": "main", -"index": 0 -} -] -] -}, -"WhatsApp Trigger": { -"main": [ -[ -{ -"node": "Handle Message Types", -"type": "main", -"index": 0 -} -] -] -}, -"Embeddings OpenAI": { -"ai_embedding": [ -[ -{ -"node": "Product Catalogue", -"type": "ai_embedding", -"index": 0 -} -] -] -}, -"Extract from File": { -"main": [ -[ -{ -"node": "Create Product Catalogue", -"type": "main", -"index": 0 -} -] -] -}, -"OpenAI Chat Model": { -"ai_languageModel": [ -[ -{ -"node": "AI Sales Agent", -"type": "ai_languageModel", -"index": 0 -} -] -] -}, -"Product Catalogue": { -"ai_vectorStore": [ -[ -{ -"node": "Vector Store Tool", -"type": "ai_vectorStore", -"index": 0 -} -] -] -}, -"Vector Store Tool": { -"ai_tool": [ -[ -{ -"node": "AI Sales Agent", -"type": "ai_tool", -"index": 0 -} -] -] -}, -"Embeddings OpenAI1": { -"ai_embedding": [ -[ -{ -"node": "Create Product Catalogue", -"type": "ai_embedding", -"index": 0 -} -] -] -}, -"OpenAI Chat Model1": { -"ai_languageModel": [ -[ -{ -"node": "Vector Store Tool", -"type": "ai_languageModel", -"index": 0 -} -] -] -}, -"Default Data Loader": { -"ai_document": [ -[ -{ -"node": "Create Product Catalogue", -"type": "ai_document", -"index": 0 -} -] -] -}, -"Handle Message Types": { -"main": [ -[ -{ -"node": "AI Sales Agent", -"type": "main", -"index": 0 -} -], -[ -{ -"node": "Reply To User1", -"type": "main", -"index": 0 -} -] -] -}, -"Window Buffer Memory": { -"ai_memory": [ -[ -{ -"node": "AI Sales Agent", -"type": "ai_memory", -"index": 0 -} -] -] -}, -"get Product Brochure": { -"main": [ -[ -{ -"node": "Extract from File", -"type": "main", -"index": 0 -} -] -] -}, -"Recursive Character Text Splitter": { -"ai_textSplitter": [ -[ -{ -"node": "Default Data Loader", -"type": "ai_textSplitter", -"index": 0 -} -] -] -}, -"When clicking ‘Test workflow’": { -"main": [ -[ -{ -"node": "get Product Brochure", -"type": "main", -"index": 0 -} -] -] -} -} -} \ No newline at end of file diff --git a/WhatsApp/Complete business WhatsApp AI-Powered RAG Chatbot using OpenAI.txt b/WhatsApp/Complete business WhatsApp AI-Powered RAG Chatbot using OpenAI.json similarity index 100% rename from WhatsApp/Complete business WhatsApp AI-Powered RAG Chatbot using OpenAI.txt rename to WhatsApp/Complete business WhatsApp AI-Powered RAG Chatbot using OpenAI.json diff --git a/WhatsApp/Respond to WhatsApp Messages with AI Like a Pro!.txt b/WhatsApp/Respond to WhatsApp Messages with AI Like a Pro!.json similarity index 100% rename from WhatsApp/Respond to WhatsApp Messages with AI Like a Pro!.txt rename to WhatsApp/Respond to WhatsApp Messages with AI Like a Pro!.json diff --git a/WordPress/Auto-Categorize blog posts in wordpress using A.I..txt b/WordPress/Auto-Categorize blog posts in wordpress using A.I..json similarity index 100% rename from WordPress/Auto-Categorize blog posts in wordpress using A.I..txt rename to WordPress/Auto-Categorize blog posts in wordpress using A.I..json diff --git a/WordPress/Auto-Tag Blog Posts in WordPress with AI.txt b/WordPress/Auto-Tag Blog Posts in WordPress with AI.json similarity index 100% rename from WordPress/Auto-Tag Blog Posts in WordPress with AI.txt rename to WordPress/Auto-Tag Blog Posts in WordPress with AI.json diff --git a/WordPress/Automate Blog Creation in Brand Voice with AI.txt b/WordPress/Automate Blog Creation in Brand Voice with AI.json similarity index 100% rename from WordPress/Automate Blog Creation in Brand Voice with AI.txt rename to WordPress/Automate Blog Creation in Brand Voice with AI.json diff --git a/WordPress/Automate Content Generator for WordPress with DeepSeek R1.txt b/WordPress/Automate Content Generator for WordPress with DeepSeek R1.json similarity index 100% rename from WordPress/Automate Content Generator for WordPress with DeepSeek R1.txt rename to WordPress/Automate Content Generator for WordPress with DeepSeek R1.json diff --git a/WordPress/WordPress - AI Chatbot to enhance user experience - with Supabase and OpenAI.txt b/WordPress/WordPress - AI Chatbot to enhance user experience - with Supabase and OpenAI.json similarity index 100% rename from WordPress/WordPress - AI Chatbot to enhance user experience - with Supabase and OpenAI.txt rename to WordPress/WordPress - AI Chatbot to enhance user experience - with Supabase and OpenAI.json diff --git a/WordPress/Write a WordPress post with AI (starting from a few keywords).txt b/WordPress/Write a WordPress post with AI (starting from a few keywords).json similarity index 100% rename from WordPress/Write a WordPress post with AI (starting from a few keywords).txt rename to WordPress/Write a WordPress post with AI (starting from a few keywords).json