[{"data":1,"prerenderedAt":9686},["ShallowReactive",2],{"$aTUEVnxcOx":3,"page-/contribution-guide":9253},[4,22,129,230,242,253,265,277,289,301,313,325,337,810,1583,2247,2967,3044,3728,4454,5037,5709,7779,8077,8089,8101,8113,8125,8137,8149,8161,8173,8185,8197,8223,8324,8919],{"id":5,"title":6,"body":7,"description":11,"extension":14,"meta":15,"navigation":17,"path":18,"seo":19,"stem":20,"__hash__":21},"content/Cloud Computing/index.md","Cloud Computing",{"type":8,"value":9,"toc":10},"minimal",[],{"title":11,"searchDepth":12,"depth":12,"links":13},"",2,[],"md",{"author":16},"Mohsen Emami",true,"/cloud-computing",{"title":6,"description":11},"Cloud Computing/index","3nOKr2OI9lc1ElWMT8Wo0Pq60fDkdhgaWBeLKjK5S14",{"id":23,"title":24,"body":25,"description":120,"extension":14,"meta":121,"navigation":17,"path":125,"seo":126,"stem":127,"__hash__":128},"content/Code Clan/Virtual Desktop/FAQ/index.md","Virtual Desktop FAQ",{"type":8,"value":26,"toc":106},[27,32,36,40,43,47,50,54,57,61,64,68,71,75,78,82,85,89,92,96,99,103],[28,29,31],"h3",{"id":30},"q-what-is-a-virtual-desktop-vd","Q: What is a Virtual Desktop (VD)?",[33,34,35],"p",{},"A: A Virtual Desktop (VD) is a desktop environment hosted on a remote server, accessible from various devices over the internet. It allows users to access their desktop, applications, and data from anywhere.",[28,37,39],{"id":38},"q-will-my-vd-perform-the-same-as-my-physical-desktop","Q: Will my VD perform the same as my physical desktop?",[33,41,42],{},"A: The performance of your VD can be comparable to a physical desktop, depending on the resources allocated to it and the speed of your internet connection. Ensure you have a stable and fast internet connection for the best experience.",[28,44,46],{"id":45},"q-should-i-change-my-password-of-the-vd","Q: Should I Change my password of the VD?",[33,48,49],{},"A: Yes, you need to take control of your account as soon as you login to your VD.",[28,51,53],{"id":52},"q-can-i-use-vd-for-personal-use","Q: Can I use VD for personal use?",[33,55,56],{},"A: VDs are meant to be used for work use only.",[28,58,60],{"id":59},"q-do-i-need-to-turn-off-the-vd-at-the-end-of-the-day","Q: Do I need to turn off the VD at the end of the day?",[33,62,63],{},"A: No, you can just simply close your Remote Desktop session or log out of it at the end of the day. Next time you login, you can continue your work as you left it.",[28,65,67],{"id":66},"q-can-i-install-new-software-on-vd","Q: Can I install new software on VD?",[33,69,70],{},"A: Yes, you should have access to install all the tools you need for your work.",[28,72,74],{"id":73},"q-what-about-the-software-licenses-i-need-for-work","Q: What about the software licenses I need for work?",[33,76,77],{},"A: VD comes with a valid Windows License and some other tools out of the box. However, for other software needed for work, the user must coordinate with the Project or Engineering Manager.",[28,79,81],{"id":80},"q-can-i-use-a-vpn-or-proxy-on-vd","Q: Can I use a VPN or proxy on VD?",[33,83,84],{},"A: Yes, you may use a proxy or VPN as long as it does not change your internal IP address. Unfortunately, this restriction means that most VPN tools and settings are not suitable. However, using Outline Client is acceptable.",[28,86,88],{"id":87},"q-is-my-data-secure-on-a-vd","Q: Is my data secure on a VD?",[33,90,91],{},"A: Yes, your VD is hosted in secure data centers and is protected by robust security measures. Ensure you follow best practices, such as using strong passwords and logging out when not in use.",[28,93,95],{"id":94},"q-can-i-transfer-files-between-my-local-device-and-my-vd","Q: Can I transfer files between my local device and my VD?",[33,97,98],{},"A: Yes, you can transfer files between your local device and your VD using file transfer features provided by the remote desktop client like Copy-and-Paste.",[28,100,102],{"id":101},"q-is-my-vd-being-monitored","Q: Is my VD being monitored?",[33,104,105],{},"A: Yes, your VD is monitored using Wazuh, CodeClan, and Activitywatch agents to track application usage and security conditions. However, we do not have access to your display or your input.",{"title":11,"searchDepth":12,"depth":12,"links":107},[108,110,111,112,113,114,115,116,117,118,119],{"id":30,"depth":109,"text":31},3,{"id":38,"depth":109,"text":39},{"id":45,"depth":109,"text":46},{"id":52,"depth":109,"text":53},{"id":59,"depth":109,"text":60},{"id":66,"depth":109,"text":67},{"id":73,"depth":109,"text":74},{"id":80,"depth":109,"text":81},{"id":87,"depth":109,"text":88},{"id":94,"depth":109,"text":95},{"id":101,"depth":109,"text":102},"CODE CLAN Virtual Desktop - Frequently Asked Questions",{"tags":122},[123,124],"CodeClan","VirtualDesktop","/code-clan/virtual-desktop/faq",{"title":24,"description":120},"Code Clan/Virtual Desktop/FAQ/index","KDvcwc6JZ6kBEUvjuLdbwOI02SVmj6FIkaGt8tZhJAk",{"id":130,"title":131,"body":132,"description":213,"extension":14,"meta":214,"navigation":17,"path":226,"seo":227,"stem":228,"__hash__":229},"content/Code Clan/Virtual Desktop/Virtual Desktop Connection Instructions/index.md","Virtual Desktop Connection Instructions",{"type":8,"value":133,"toc":208},[134,139,142,149,162,168,173,179,184,190,195,199,202,206],[135,136,138],"h2",{"id":137},"connect-using-windows","Connect using Windows",[33,140,141],{},"If you are using a Windows machine, you can use the RDC (Remote Desktop Connection) app.\nHere is what you need to do ...",[33,143,144],{},[145,146],"img",{"alt":147,"src":148},"Remote Desktop Connection","/media/codeclan/virtualdesktop/rdc.png",[150,151,152,156,159],"ul",{},[153,154,155],"li",{},"Open the Remote Desktop Connection app on your local Windows",[153,157,158],{},"Enter the values in Computer and User name",[153,160,161],{},"Leave the \"Allow me to save credentials\" unchecked",[33,163,164],{},[145,165],{"alt":166,"src":167},"Remote Desktop Connection - Display","/media/codeclan/virtualdesktop/rdc-display-tab.png",[150,169,170],{},[153,171,172],{},"Check this box: Use all my monitors for the remote session",[33,174,175],{},[145,176],{"alt":177,"src":178},"Remote Desktop Connection - Local Resources","/media/codeclan/virtualdesktop/rdc-resources-tab.png",[150,180,181],{},[153,182,183],{},"Select these options in the Local Resources tab",[33,185,186],{},[145,187],{"alt":188,"src":189},"Remote Desktop Connection - Connect","/media/codeclan/virtualdesktop/rdc-connect.png",[150,191,192],{},[153,193,194],{},"Finally, press Connect, you will then be asked for your password",[135,196,198],{"id":197},"connect-using-macos","Connect using MacOS",[33,200,201],{},"TODO",[135,203,205],{"id":204},"connect-using-linux","Connect using Linux",[33,207,201],{},{"title":11,"searchDepth":12,"depth":12,"links":209},[210,211,212],{"id":137,"depth":12,"text":138},{"id":197,"depth":12,"text":198},{"id":204,"depth":12,"text":205},"Instructions for connecting to Code Clan Virtual Desktop using Windows or Mac",{"tags":215,"excerpt":220},[123,124,216,217,218,219],"RDC","Windows","Mac","Linux",{"type":8,"value":221},[222,224],[135,223,138],{"id":137},[33,225,141],{},"/code-clan/virtual-desktop/virtual-desktop-connection-instructions",{"title":131,"description":213},"Code Clan/Virtual Desktop/Virtual Desktop Connection Instructions/index","elVWEsgOYh6K4AQ0ELx8WSDUaK7gnb-5J0YWZ_3LzqU",{"id":231,"title":232,"body":233,"description":11,"extension":14,"meta":237,"navigation":17,"path":238,"seo":239,"stem":240,"__hash__":241},"content/Databases/NoSQL/index.md","Database Development",{"type":8,"value":234,"toc":235},[],{"title":11,"searchDepth":12,"depth":12,"links":236},[],{"author":16},"/databases/nosql",{"title":232,"description":11},"Databases/NoSQL/index","in-OtJqm5pUr3HcRn5Uf0VrSiHQO7xNWxwes3GDa3gU",{"id":243,"title":232,"body":244,"description":11,"extension":14,"meta":248,"navigation":17,"path":249,"seo":250,"stem":251,"__hash__":252},"content/Databases/SQL/index.md",{"type":8,"value":245,"toc":246},[],{"title":11,"searchDepth":12,"depth":12,"links":247},[],{"author":16},"/databases/sql",{"title":232,"description":11},"Databases/SQL/index","9Iu9Xwm4zK3C46-3hiA7l6msk5PjnN4vweB-ih5b0Ts",{"id":254,"title":255,"body":256,"description":11,"extension":14,"meta":260,"navigation":17,"path":261,"seo":262,"stem":263,"__hash__":264},"content/E-commerce and Content managements/index.md","E-commerce Development",{"type":8,"value":257,"toc":258},[],{"title":11,"searchDepth":12,"depth":12,"links":259},[],{"author":16},"/e-commerce-and-content-managements",{"title":255,"description":11},"E-commerce and Content managements/index","OaVfAlNpMmCPIFVhDrp_5VRIyptdo6B_6T5yKwswDag",{"id":266,"title":267,"body":268,"description":11,"extension":14,"meta":272,"navigation":17,"path":273,"seo":274,"stem":275,"__hash__":276},"content/Game Development/index.md","Game Development",{"type":8,"value":269,"toc":270},[],{"title":11,"searchDepth":12,"depth":12,"links":271},[],{"author":16},"/game-development",{"title":267,"description":11},"Game Development/index","AKXG57eSpzwFXlKSNpwMmn8mGu5uMJvxxJI6ztSDpkw",{"id":278,"title":279,"body":280,"description":11,"extension":14,"meta":284,"navigation":17,"path":285,"seo":286,"stem":287,"__hash__":288},"content/Mobile Development/Android/index.md","Android Development",{"type":8,"value":281,"toc":282},[],{"title":11,"searchDepth":12,"depth":12,"links":283},[],{"author":16},"/mobile-development/android",{"title":279,"description":11},"Mobile Development/Android/index","yQ6TePpGANTytio8mTvfPz_VuwMruYkuKPbDhRx5ezs",{"id":290,"title":291,"body":292,"description":11,"extension":14,"meta":296,"navigation":17,"path":297,"seo":298,"stem":299,"__hash__":300},"content/Mobile Development/Cross platform/index.md","Cross Platform Development",{"type":8,"value":293,"toc":294},[],{"title":11,"searchDepth":12,"depth":12,"links":295},[],{"author":16},"/mobile-development/cross-platform",{"title":291,"description":11},"Mobile Development/Cross platform/index","3NVcGezXunxpzY5R8DHhhuHEbMBFovYa2lpJbLc2FS8",{"id":302,"title":303,"body":304,"description":11,"extension":14,"meta":308,"navigation":17,"path":309,"seo":310,"stem":311,"__hash__":312},"content/Mobile Development/IOS/index.md","IOS Development",{"type":8,"value":305,"toc":306},[],{"title":11,"searchDepth":12,"depth":12,"links":307},[],{"author":16},"/mobile-development/ios",{"title":303,"description":11},"Mobile Development/IOS/index","2G5ciKu0VFrhyk_lSBgvkc2N8eqyFOeOVAEKIrTJLWY",{"id":314,"title":315,"body":316,"description":11,"extension":14,"meta":320,"navigation":17,"path":321,"seo":322,"stem":323,"__hash__":324},"content/Network and Security/index.md","Network Development",{"type":8,"value":317,"toc":318},[],{"title":11,"searchDepth":12,"depth":12,"links":319},[],{"author":16},"/network-and-security",{"title":315,"description":11},"Network and Security/index","10YDgBhVh5xidqhUqtcprTMBGToi1NcoBxku19EXaKc",{"id":326,"title":327,"body":328,"description":11,"extension":14,"meta":332,"navigation":17,"path":333,"seo":334,"stem":335,"__hash__":336},"content/Programming Language/Concepts/index.md","Concept Of Development",{"type":8,"value":329,"toc":330},[],{"title":11,"searchDepth":12,"depth":12,"links":331},[],{"author":16},"/programming-language/concepts",{"title":327,"description":11},"Programming Language/Concepts/index","u37cgaFdbCeZnpu66oySJNlyvHgkHEhe7LdCCbRTxFc",{"id":338,"title":339,"body":340,"description":349,"extension":14,"meta":801,"navigation":17,"path":806,"seo":807,"stem":808,"__hash__":809},"content/Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/1.getting-started-with-AI-Development-Tools/index.md","Generative AI - Getting Started with AI Development Tools",{"type":8,"value":341,"toc":792},[342,347,350,353,365,371,375,383,387,406,410,413,420,440,451,458,463,466,486,489,504,519,523,538,564,567,571,574,578,581,584,587,638,641,644,650,653,679,683,686,692,695,743,746,752,764,768,771,775,778,781,788],[343,344,346],"h1",{"id":345},"getting-started-with-ai-development-tools","Getting Started with AI Development Tools",[33,348,349],{},"Refresh your generative AI knowledge and understand the .NET tooling available to help you to develop generative AI applications.",[351,352],"hr",{},[33,354,355],{},[356,357,361],"a",{"href":358,"rel":359},"https://aka.ms/genainnet/videos/lesson1-genaireview",[360],"nofollow",[145,362],{"alt":363,"src":364},"Introduction to Generative AI","content/generative-ai/images/LIM_GAN_01_thumb_w480.png",[33,366,367],{},[368,369,370],"em",{},"⬆️Click the image to watch the video⬆️",[135,372,374],{"id":373},"what-youll-learn-in-this-lesson","What you'll learn in this lesson:",[150,376,377,380],{},[153,378,379],{},"🌟 Understand fundamental concepts of generative AI and their applications",[153,381,382],{},"🔍 Explore the .NET tooling for AI development including MEAI, Semantic Kernel, and Azure OpenAI",[135,384,386],{"id":385},"generative-ai-fundamentals-for-net","Generative AI Fundamentals for .NET",[33,388,389,390,393,394,397,398,401,402,405],{},"Before we dive into some code, let's take a minute to review some generative AI (GenAI) concepts. In this lesson, ",[391,392,386],"strong",{},", we'll refresh some fundamental GenAI concepts so you can understand why certain things are done like they are. And we'll introduce the tooling and SDKs you'll use to build apps, like ",[391,395,396],{},"MEAI"," (Microsoft.Extensions.AI), ",[391,399,400],{},"Semantic Kernel",", and the ",[391,403,404],{},"AI Toolkit Extension for VS Code",".",[28,407,409],{"id":408},"a-quick-refresh-on-generative-ai-concepts","A quick refresh on Generative AI concepts",[33,411,412],{},"Generative AI is a type of artificial intelligence that creates new content, such as text, images, or code, based on patterns and relationships learned from data. Generative AI models can generate human-like responses, understand context, and sometimes even create content that seems human-like.",[33,414,415,416,419],{},"As you develop your .NET AI applications, you'll work with ",[391,417,418],{},"generative AI models"," to create content. Some capabilities of generative AI models include:",[150,421,422,428,434],{},[153,423,424,427],{},[391,425,426],{},"Text Generation",": Crafting human-like text for chatbots, content, and text completion.",[153,429,430,433],{},[391,431,432],{},"Image Generation and Analysis",": Producing realistic images, enhancing photos, and detecting objects.",[153,435,436,439],{},[391,437,438],{},"Code Generation",": Writing code snippets or scripts.",[33,441,442,443,446,447,450],{},"There are specific types of models that are optimized for different tasks. For example, ",[391,444,445],{},"Small Language Models (SLMs)"," are ideal for text generation, while ",[391,448,449],{},"Large Language Models (LLMs)"," are more suitable for complex tasks like code generation or image analysis. And from there different companies and groups develop models, like Microsoft, OpenAI, or Anthropic. The specific one you use will depend on your use case and the capabilities you need.",[33,452,453,454,457],{},"Of course, the responses from these models are not perfect all the time. You've probably heard about models \"hallucinating\" or generating incorrect information in an authoritative manner. But you can help guide the model to generate better responses by providing them with clear instructions and context. This is where ",[391,455,456],{},"prompt engineering"," comes in.",[459,460,462],"h4",{"id":461},"prompt-engineering-review","Prompt engineering review",[33,464,465],{},"Prompt engineering is the practice of designing effective inputs to guide AI models toward desired outputs. It involves:",[150,467,468,474,480],{},[153,469,470,473],{},[391,471,472],{},"Clarity",": Making instructions clear and unambiguous.",[153,475,476,479],{},[391,477,478],{},"Context",": Providing necessary background information.",[153,481,482,485],{},[391,483,484],{},"Constraints",": Specifying any limitations or formats.",[33,487,488],{},"Some best practices for prompt engineering include, prompt design, clear instructions, task breakdown, one shot and few shot learning, and prompt tuning. Plus, trying and testing different prompts to see what works best for your specific use case.",[33,490,491,492,495,496,499,500,503],{},"And it's important to note there are different types of prompts when developing applications. For example, you'll be responsible for setting ",[391,493,494],{},"system prompts"," that set the base rules and context for the model's response. The data the user of your application feeds into the model are known as ",[391,497,498],{},"user prompts",". And ",[391,501,502],{},"assistant prompts"," are the responses the model generates based on the system and user prompts.",[505,506,507],"blockquote",{},[33,508,509,510,513,514],{},"🧑‍🏫 ",[391,511,512],{},"Learn more",": Learn more about prompt engineering in ",[356,515,518],{"href":516,"rel":517},"https://github.com/microsoft/generative-ai-for-beginners/tree/main/04-prompt-engineering-fundamentals",[360],"Prompt Engineering chapter of GenAI for Beginners course",[459,520,522],{"id":521},"tokens-embeddings-and-agents-oh-my","Tokens, embeddings, and agents - oh my!",[33,524,525,526,529,530,533,534,537],{},"When working with generative AI models, you'll encounter terms like ",[391,527,528],{},"tokens",", ",[391,531,532],{},"embeddings",", and ",[391,535,536],{},"agents",". Here's a quick overview of these concepts:",[150,539,540,546,552,558],{},[153,541,542,545],{},[391,543,544],{},"Tokens",": Tokens are the smallest unit of text in a model. They can be words, characters, or subwords. Tokens are used to represent text data in a format that the model can understand.",[153,547,548,551],{},[391,549,550],{},"Embeddings",": Embeddings are vector representations of tokens. They capture the semantic meaning of words and phrases, allowing models to understand relationships between words and generate contextually relevant responses.",[153,553,554,557],{},[391,555,556],{},"Vector databases",": Vector databases are collections of embeddings that can be used to compare and analyze text data. They enable models to generate responses based on the context of the input data.",[153,559,560,563],{},[391,561,562],{},"Agents",": Agents are AI components that interact with models to generate responses. They can be chatbots, virtual assistants, or other applications that use generative AI models to create content.",[33,565,566],{},"When developing .NET AI applications, you'll work with tokens, embeddings, and agents to create chatbots, content generators, and other AI-powered applications. Understanding these concepts will help you build more effective and efficient AI applications.",[28,568,570],{"id":569},"ai-development-tools-and-libraries-for-net","AI Development Tools and Libraries for .NET",[33,572,573],{},".NET offers a range of tooling for AI development. Let's take a minute to understand some of the tools and libraries available.",[459,575,577],{"id":576},"microsoftextensionsai-meai","Microsoft.Extensions.AI (MEAI)",[33,579,580],{},"The Microsoft.Extensions.AI (MEAI) library provides unified abstractions and middleware to simplify the integration of AI services into .NET applications.",[33,582,583],{},"By providing a consistent API, MEAI enables developers to interact with different AI services, such as small and large language models, embeddings, and even middleware through a common interface. This lowers the friction it takes to build an .NET AI application as you'll be developing against the same API for different services.",[33,585,586],{},"For example, here's the interface you would use to create a chat client with MEAI regardless of the AI service you're using:",[588,589,593],"pre",{"className":590,"code":591,"language":592,"meta":11,"style":11},"language-csharp shiki shiki-themes github-light github-dark","public interface IChatClient : IDisposable\n{\n    Task\u003CChatCompletion> CompleteAsync(...);\n    IAsyncEnumerable\u003CStreamingChatCompletionUpdate> CompleteStreamingAsync(...);\n    ChatClientMetadata Metadata { get; }\n    TService? GetService\u003CTService>(object? key = null) where TService : class;\n}\n","csharp",[594,595,596,604,609,614,620,626,632],"code",{"__ignoreMap":11},[597,598,601],"span",{"class":599,"line":600},"line",1,[597,602,603],{},"public interface IChatClient : IDisposable\n",[597,605,606],{"class":599,"line":12},[597,607,608],{},"{\n",[597,610,611],{"class":599,"line":109},[597,612,613],{},"    Task\u003CChatCompletion> CompleteAsync(...);\n",[597,615,617],{"class":599,"line":616},4,[597,618,619],{},"    IAsyncEnumerable\u003CStreamingChatCompletionUpdate> CompleteStreamingAsync(...);\n",[597,621,623],{"class":599,"line":622},5,[597,624,625],{},"    ChatClientMetadata Metadata { get; }\n",[597,627,629],{"class":599,"line":628},6,[597,630,631],{},"    TService? GetService\u003CTService>(object? key = null) where TService : class;\n",[597,633,635],{"class":599,"line":634},7,[597,636,637],{},"}\n",[33,639,640],{},"This way when using MEAI to build a chat application, you'll develop against the same API surface to get a chat completion or stream the completion, get metadata, or access the underlying AI service. This makes it easier to switch out AI services or add new ones as needed.",[33,642,643],{},"Additionally, the library supports middleware components for functionalities like logging, caching and telemetry making it easier to develop robust AI applications.",[33,645,646],{},[145,647],{"alt":648,"src":649},"Figure: Microsoft.Extensions.AI (MEAI) library.","content/generative-ai/images/meai-architecture-diagram.png",[33,651,652],{},"Using a unified API, MEAI allows developers to work with different AI services, such as Azure AI Inference, Ollama, and OpenAI, in a consistent manner. This simplifies the integration of AI models into .NET applications, adding flexibility for developers to choose the best AI services for their projects and specific requirements.",[505,654,655,667],{},[33,656,657,658,661,662,405],{},"🏎️ ",[391,659,660],{},"Quick start",": For a quick start with MEAI, ",[356,663,666],{"href":664,"rel":665},"https://devblogs.microsoft.com/dotnet/introducing-microsoft-extensions-ai-preview/",[360],"check out the blog post",[33,668,669,670,673,674],{},"📖 ",[391,671,672],{},"Docs",": Learn more about Microsoft.Extensions.AI (MEAI) in the ",[356,675,678],{"href":676,"rel":677},"https://learn.microsoft.com/dotnet/ai/ai-extensions",[360],"MEAI documentation",[459,680,682],{"id":681},"semantic-kernel-sk","Semantic Kernel (SK)",[33,684,685],{},"Semantic Kernel is an open-source SDK that enables developers to integrate generative AI language models into their .NET applications. It provides abstractions for AI services and memory (vector) stores allowing creation of plugins that can be automatically orchestrated by AI. It even uses the OpenAPI standard enabling developers to create AI agents to interact with external APIs.",[33,687,688],{},[145,689],{"alt":690,"src":691},"Figure: Semantic Kernel (SK) SDK.","content/generative-ai/images/semantic-kernel.png",[33,693,694],{},"Semantic Kernel supports .NET, as well as other languages such as Java and Python, offering a plethora of connectors, functions and plugins for integration. Some of the key features of Semantic Kernel include:",[150,696,697,713,719,725,731,737],{},[153,698,699,702,703,706,707,709],{},[391,700,701],{},"Kernel Core",": Provides the core functionality for the Semantic Kernel, including connectors, functions, and plugins, to interact with AI services and models. The kernel is the heart of the Semantic Kernel, being available to services and plugins, retrieving them when needed, monitoring Agents, and being an active middleware for your application.",[704,705],"br",{},"For example, it can pick the best AI service for a specific task, build plus send the prompt to the service, and return the response to the application. Below, a diagram of the Kernel Core in action:",[704,708],{},[145,710],{"alt":711,"src":712},"Figure: Semantic Kernel (SK) Kernel Core.","content/generative-ai/images/semantic-kernel-core.png",[153,714,715,718],{},[391,716,717],{},"AI Service Connectors",": Provides an abstraction layer to expose AI services to multiple providers, with a common and consistent interface, some examples like Chat Completion, Text to Image, Text to Speech, and Audio to Text.",[153,720,721,724],{},[391,722,723],{},"Vector Store Connectors",": Exposes vector stores to multiple providers, via a common and consistent interface, allowing developers to work with embeddings, vectors, and other data representations.",[153,726,727,730],{},[391,728,729],{},"Functions and Plugins",": Offers a range of functions and plugins for common AI tasks, such as function processing, Prompt Templating, Text Search, and more. Connecting this to the AI Service/Model, creating implementations for RAG, and agents, for example.",[153,732,733,736],{},[391,734,735],{},"Prompt Templating",": Provides tools for prompt engineering, including prompt design, testing, and optimization, to enhance AI model performance and accuracy. Allowing developers to create and test prompts, and optimize them for specific tasks.",[153,738,739,742],{},[391,740,741],{},"Filters",": Controls around when and how functions are run to improve security and responsible AI practices.",[33,744,745],{},"In Semantic Kernel, a full loop would look like the diagram below:",[33,747,748],{},[145,749],{"alt":750,"src":751},"Figure: Semantic Kernel (SK) full loop.","content/generative-ai/images/semantic-kernel-full-loop.png",[505,753,754],{},[33,755,669,756,758,759],{},[391,757,672],{},": Learn more about Semantic Kernel in the ",[356,760,763],{"href":761,"rel":762},"https://learn.microsoft.com/semantic-kernel/overview/",[360],"Semantic Kernel documentation",[135,765,767],{"id":766},"conclusion","Conclusion",[33,769,770],{},"Generative AI offers a world of possibilities for developers, enabling them to create innovative applications that generate content, understand context, and provide human-like responses. The .NET ecosystem provides a range of tools and libraries to simplify AI development, making it easier to integrate AI capabilities into .NET applications.",[135,772,774],{"id":773},"next-steps","Next Steps",[33,776,777],{},"In the next chapters, we'll explore these scenarios in detail, providing hands-on examples, code snippets, and best practices to help you build real-world AI solutions using .NET!",[33,779,780],{},"Next up, we'll get your development environment setup! So you'll be ready to dive into the world of generative AI with .NET!",[33,782,783,784],{},"👉 ",[356,785,787],{"href":786},"./setup-dev-environment/setting-up-the-development-environment-for-this-course","Set up your AI development environment",[789,790,791],"style",{},"html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}",{"title":11,"searchDepth":12,"depth":12,"links":793},[794,795,799,800],{"id":373,"depth":12,"text":374},{"id":385,"depth":12,"text":386,"children":796},[797,798],{"id":408,"depth":109,"text":409},{"id":569,"depth":109,"text":570},{"id":766,"depth":12,"text":767},{"id":773,"depth":12,"text":774},{"author":802,"github":803,"tags":804,"cover":805},"Microsoft","microsoft","generative-ai, ai, microsoft","/media/generative-ai/generative-ai-cover.jpg","/programming-language/platform/dotnet/generative-ai-for-beginners-dotnet/getting-started-with-ai-development-tools",{"title":339,"description":349},"Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/1.getting-started-with-AI-Development-Tools/index","oveHjZ6TAwxVfhDHiVn0xcbDUcHIYz_pOKO8c08AKwQ",{"id":811,"title":812,"body":813,"description":820,"extension":14,"meta":1578,"navigation":17,"path":1579,"seo":1580,"stem":1581,"__hash__":1582},"content/Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/2.Setup-Dev-Environment/1.Setting-Up-the-Development-Environment-for-This-Course.md","Setting Up the Development Environment for This Course",{"type":8,"value":814,"toc":1560},[815,818,821,836,838,840,854,857,867,871,875,878,881,899,925,931,934,961,981,1004,1026,1030,1035,1041,1045,1048,1052,1151,1160,1164,1167,1210,1216,1220,1227,1233,1243,1252,1258,1264,1268,1271,1491,1506,1510,1513,1517,1546,1548,1551,1557],[343,816,812],{"id":817},"setting-up-the-development-environment-for-this-course",[33,819,820],{},"This lesson will guide you through setting up your development environment for this course. To ensure your success we've prepared a devcontainer configuration that will provide all the tooling you need to complete the course. You can run the devcontainer in GitHub Codespaces (recommended) or locally on your machine. And we also demonstrate how to set up your GitHub access tokens to interact with GitHub Models.",[33,822,823],{},[368,824,825,826,830,831,835],{},"We have you covered with guides to setup ",[356,827,829],{"href":828},"getting-started-azure-openai","Azure OpenAI"," and ",[356,832,834],{"href":833},"getting-started-ollama","Ollama",", if desired.",[351,837],{},[135,839,374],{"id":373},[150,841,842,845,848,851],{},[153,843,844],{},"⚡ How to setup a development environment with GitHub Codepaces",[153,846,847],{},"🤖 Configure your development environment to access LLMs via GitHub Models, Azure OpenAI, or Ollama",[153,849,850],{},"🛠️ Industry-standard tools configuration with .devcontainer",[153,852,853],{},"🎯 Finally, everything is ready to complete the rest of the course",[33,855,856],{},"Let's dive in and set up your development environment! 🏃‍♂️",[33,858,859],{},[356,860,863],{"href":861,"rel":862},"https://aka.ms/genainnet/videos/lesson2-setupdevenv",[360],[145,864],{"alt":865,"src":866},"Watch the Video Tutorial","content/generative-ai/images/LIM_GAN_02_thumb_w480.png",[33,868,869],{},[368,870,370],{},[135,872,874],{"id":873},"which-ai-service-should-i-use-for-this-course","Which AI service should I use for this course?",[33,876,877],{},"We provide instructions for setting up your development environment with GitHub Models, Azure OpenAI, and Ollama. You can choose the one that best fits your needs. We recommend using GitHub Models for this course, but you can use any of the three services.",[33,879,880],{},"Here's a quick rundown of the services:",[150,882,883,889,894],{},[153,884,885,888],{},[391,886,887],{},"GitHub Models",": A free service to get started with that allows you to test and interact with various AI models directly within your development environment. It's easy to use from Codespaces and a great way to experiment with different models and understand their capabilities before implementation.",[153,890,891,893],{},[391,892,829],{},": A paid service that provides access to a wide range of AI models. It includes all of the benefits that you'd come to expect from Azure include robust security and scalability. This is a great option for this course if you already have access to an Azure subscription.",[153,895,896,898],{},[391,897,834],{},": Ollama allows you to run AI models locally on your machine or within a Codespace or devcontainer for free. It's a great option if you prefer to run the models locally, but it requires more hardware resources and can be slower than the cloud-based options.",[505,900,901,907],{},[33,902,903,904,906],{},"If ",[391,905,887],{}," is your choice, follow the rest of this document to set up your development environment with GitHub Models.",[150,908,909,917],{},[153,910,911,913,914,405],{},[391,912,829],{}," have your eye? ",[356,915,916],{"href":828},"This is the document for you",[153,918,919,921,922,405],{},[391,920,834],{}," your choice? ",[356,923,924],{"href":833},"This guide has the info you need",[28,926,928],{"id":927},"note-for-local-models-with-ollama",[391,929,930],{},"NOTE for local models with Ollama:",[33,932,933],{},"The Ollama Codespace will provision all the necessary models that you need. However, if you are working in local mode, once you have installed Ollama, you need to pull the models for the lessons you want to run.",[150,935,936],{},[153,937,938,939,942,943,948,949,954,955,960],{},"For lesson \"",[391,940,941],{},"02 - Setting Up for .NET Development with Generative AI","\" and project ",[356,944,947],{"href":945,"rel":946},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/tree/main/02-SetupDevEnvironment/src/BasicChat-03Ollama",[360],"MEAIFunctionsOllama"," you need to pull a model like ",[356,950,953],{"href":951,"rel":952},"https://ollama.com/library/phi4-mini",[360],"phi4-mini"," or ",[356,956,959],{"href":957,"rel":958},"https://ollama.com/library/llama3.2",[360],"llama3.2"," by entering in terminal",[588,962,966],{"className":963,"code":964,"language":965,"meta":11,"style":11},"language-bash shiki shiki-themes github-light github-dark","ollama pull phi4-mini\n","bash",[594,967,968],{"__ignoreMap":11},[597,969,970,974,978],{"class":599,"line":600},[597,971,973],{"class":972},"sScJk","ollama",[597,975,977],{"class":976},"sZZnC"," pull",[597,979,980],{"class":976}," phi4-mini\n",[150,982,983],{},[153,984,938,985,988,989,994,995,830,1000,1003],{},[391,986,987],{},"03 - Core Generative AI Techniques with .NET","\", when running the ollama projects like ",[356,990,993],{"href":991,"rel":992},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/tree/main/03-CoreGenerativeAITechniques/src/RAGSimple-10SKOllama",[360],"RAGSimple-10SKOllama",", you need to pull the models ",[356,996,999],{"href":997,"rel":998},"https://ollama.com/library/all-minilm",[360],"all-minilm",[356,1001,953],{"href":951,"rel":1002},[360]," by entering in terminal:",[588,1005,1007],{"className":963,"code":1006,"language":965,"meta":11,"style":11},"ollama pull phi4-mini\nollama pull all-minilm\n",[594,1008,1009,1017],{"__ignoreMap":11},[597,1010,1011,1013,1015],{"class":599,"line":600},[597,1012,973],{"class":972},[597,1014,977],{"class":976},[597,1016,980],{"class":976},[597,1018,1019,1021,1023],{"class":599,"line":12},[597,1020,973],{"class":972},[597,1022,977],{"class":976},[597,1024,1025],{"class":976}," all-minilm\n",[135,1027,1029],{"id":1028},"learn-and-test-ai-models-with-github-models","Learn and test AI models with GitHub Models",[33,1031,1032,1034],{},[391,1033,887],{}," provides an intuitive way to experiment with various AI models directly within your development environment. This feature allows developers to test and interact with different models, understanding their capabilities and limitations before implementation. Through a simple interface, you can explore model responses, evaluate performance, and determine the best fit for your application requirements. Hosted within GitHub's infrastructure, these models offer reliable access and consistent performance, making them ideal for development and testing phases. Best of all, there is a free tier to start your exploration without any cost.",[33,1036,1037],{},[145,1038],{"alt":1039,"src":1040},"Image for GitHub Models page, demonstrating multiple generative AI models","content/generative-ai/images/github-models-webpage.png",[135,1042,1044],{"id":1043},"pre-flight-check-setting-up-github-access-tokens","Pre-flight check: Setting up GitHub Access Tokens",[33,1046,1047],{},"Before we do anything else, we need to configure essential security credentials that will enable our Codespace to interact with GitHub Models and execute our applications securely.",[28,1049,1051],{"id":1050},"creating-a-personal-access-token-for-github-model-access","Creating a Personal Access Token for GitHub Model access",[1053,1054,1055,1081,1106],"ol",{},[153,1056,1057,1058,1063,1064,1076,1078],{},"Navigate to ",[356,1059,1062],{"href":1060,"rel":1061},"https://github.com/settings/profile",[360],"GitHub Settings",":",[150,1065,1066,1069],{},[153,1067,1068],{},"Click your profile picture in the top-right corner",[153,1070,1071,1072,1075],{},"Select ",[391,1073,1074],{},"Settings"," from the dropdown menu",[704,1077],{},[145,1079],{"alt":1062,"src":1080},"content/generative-ai/images/settings-github.png",[153,1082,1083,1084,1063,1089,1101,1103],{},"Access ",[356,1085,1088],{"href":1086,"rel":1087},"https://github.com/settings/apps",[360],"Developer Settings",[150,1090,1091,1094],{},[153,1092,1093],{},"Scroll down the left sidebar",[153,1095,1096,1097,1100],{},"Click on ",[391,1098,1099],{},"Developer settings"," (usually at the bottom)",[704,1102],{},[145,1104],{"alt":1088,"src":1105},"content/generative-ai/images/developer-settings-github.png",[153,1107,1108,1109],{},"Generate a New Token:",[150,1110,1111,1126,1138,1145,1148],{},[153,1112,1071,1113,1116,1117,1120,1122],{},[391,1114,1115],{},"Personal access tokens"," → ",[391,1118,1119],{},"Tokens (classic)",[704,1121],{},[145,1123],{"alt":1124,"src":1125},"Adding the Tokens(classic)","content/generative-ai/images/tokens-classic-github.png",[153,1127,1128,1129,1132,1134],{},"In the dropdown in the middle of the page, click ",[391,1130,1131],{},"Generate new token (classic)",[704,1133],{},[145,1135],{"alt":1136,"src":1137},"Create your Token","content/generative-ai/images/token-generate-github.png",[153,1139,1140,1141,1144],{},"Under \"Note\", provide a descriptive name (e.g., ",[594,1142,1143],{},"GenAI-DotNet-Course-Token",")",[153,1146,1147],{},"Set an expiration date (recommended: 7 days for security best practices)",[153,1149,1150],{},"There is no need adding any permissions to this token.",[505,1152,1153],{},[33,1154,1155,1156,1159],{},"💡 ",[391,1157,1158],{},"Security Tip",": Always use the minimum required scope and shortest practical expiration time for your access tokens. This follows the principle of least privilege and helps maintain your account's tokens safe.",[135,1161,1163],{"id":1162},"creating-a-github-codespace","Creating a GitHub Codespace",[33,1165,1166],{},"Let's create a GitHub Codespace to use for the rest of this course.",[1053,1168,1169,1182,1189,1200],{},[153,1170,1171,1172,1177,1178,1181],{},"Open this repository's main page in a new window by ",[356,1173,1176],{"href":1174,"rel":1175},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet",[360],"right-clicking here"," and selecting ",[391,1179,1180],{},"Open in new window"," from the context menu",[153,1183,1184,1185,1188],{},"Fork this repo into your GitHub account by clicking the ",[391,1186,1187],{},"Fork"," button in the top right corner of the page",[153,1190,1191,1192,1195,1196,1199],{},"Click the ",[391,1193,1194],{},"Code"," dropdown button and then select the ",[391,1197,1198],{},"Codespaces"," tab",[153,1201,1202,1203,1206,1207],{},"Select the ",[391,1204,1205],{},"..."," option (the three dots) and choose ",[391,1208,1209],{},"New with options...",[33,1211,1212],{},[145,1213],{"alt":1214,"src":1215},"Creating a Codespace with custom options","content/generative-ai/images/creating-codespace.png",[28,1217,1219],{"id":1218},"choosing-your-development-container","Choosing Your development container",[33,1221,1222,1223,1226],{},"From the ",[391,1224,1225],{},"Dev container configuration"," dropdown, select one of the following options:",[33,1228,1229,1232],{},[391,1230,1231],{},"Option 1: C# (.NET)"," : This is the option you should use if you plan to use GitHub Models and is our recommended way to complete this course. It has all the core .NET development tools needed for the rest of the course and a fast startup time",[33,1234,1235,1238,1239,1242],{},[391,1236,1237],{},"Option 2: C# (.NET) - Ollama",": Ollama allows you to run the demos without needing to connect to GitHub Models or Azure OpenAI. It includes all the core .NET development in addition to Ollama, but has a slower start-up time, five minutes on average. ",[356,1240,1241],{"href":833},"Follow this guide"," if you want to use Ollama",[505,1244,1245],{},[33,1246,1247,1248,1251],{},"💡",[391,1249,1250],{},"Tip"," : When creating your codespace, please, use the region closest to you if you have the option in the menu. Using a region far away, can cause errors in the creation.",[33,1253,1191,1254,1257],{},[391,1255,1256],{},"Create codespace"," button to start the Codespace creation process.",[33,1259,1260],{},[145,1261],{"alt":1262,"src":1263},"Selecting your development container configuration","content/generative-ai/images/select-container-codespace.png",[135,1265,1267],{"id":1266},"verifying-your-codespace-is-running-correctly-with-github-models","Verifying your Codespace is running correctly with GitHub Models",[33,1269,1270],{},"Once your Codespace is fully loaded and configured, let's run a sample app to verify everything is working correctly:",[1053,1272,1273,1284,1313,1329],{},[153,1274,1275,1276,1279,1280,1283],{},"Open the terminal. You can open a terminal window by typing ",[391,1277,1278],{},"Ctrl+`"," (backtick) on Windows or ",[391,1281,1282],{},"Cmd+`"," on macOS.",[153,1285,1286,1287],{},"Switch to the proper directory by running the following command:",[588,1288,1290],{"className":963,"code":1289,"language":965,"meta":11,"style":11},"cd 02-SetupDevEnvironment\\src\\BasicChat-01MEAI\n",[594,1291,1292],{"__ignoreMap":11},[597,1293,1294,1298,1301,1304,1307,1310],{"class":599,"line":600},[597,1295,1297],{"class":1296},"sj4cs","cd",[597,1299,1300],{"class":976}," 02-SetupDevEnvironment",[597,1302,1303],{"class":1296},"\\s",[597,1305,1306],{"class":976},"rc",[597,1308,1309],{"class":1296},"\\B",[597,1311,1312],{"class":976},"asicChat-01MEAI\n",[153,1314,1315,1316],{},"Then run the application with the following command:",[588,1317,1319],{"className":963,"code":1318,"language":965,"meta":11,"style":11},"dotnet run\n",[594,1320,1321],{"__ignoreMap":11},[597,1322,1323,1326],{"class":599,"line":600},[597,1324,1325],{"class":972},"dotnet",[597,1327,1328],{"class":976}," run\n",[153,1330,1331,1332],{},"It may take a couple of seconds, but eventually the application should output a message similar to the following:",[588,1333,1335],{"className":963,"code":1334,"language":965,"meta":11,"style":11},"AI, or artificial intelligence, refers to the simulation of human intelligence in machines that are programmed to think and learn like humans. It is a broad field of computer science that focuses on creating systems and algorithms capable of performing tasks that typically require human intelligence. These tasks include problem-solving,\n\n...\n",[594,1336,1337,1481,1486],{"__ignoreMap":11},[597,1338,1339,1342,1345,1348,1351,1354,1357,1360,1363,1366,1369,1372,1375,1378,1381,1384,1387,1389,1392,1395,1398,1401,1404,1407,1410,1413,1416,1419,1421,1424,1427,1429,1432,1435,1438,1441,1443,1446,1449,1451,1454,1457,1459,1462,1465,1467,1470,1473,1475,1478],{"class":599,"line":600},[597,1340,1341],{"class":972},"AI,",[597,1343,1344],{"class":976}," or",[597,1346,1347],{"class":976}," artificial",[597,1349,1350],{"class":976}," intelligence,",[597,1352,1353],{"class":976}," refers",[597,1355,1356],{"class":976}," to",[597,1358,1359],{"class":976}," the",[597,1361,1362],{"class":976}," simulation",[597,1364,1365],{"class":976}," of",[597,1367,1368],{"class":976}," human",[597,1370,1371],{"class":976}," intelligence",[597,1373,1374],{"class":976}," in",[597,1376,1377],{"class":976}," machines",[597,1379,1380],{"class":976}," that",[597,1382,1383],{"class":976}," are",[597,1385,1386],{"class":976}," programmed",[597,1388,1356],{"class":976},[597,1390,1391],{"class":976}," think",[597,1393,1394],{"class":976}," and",[597,1396,1397],{"class":976}," learn",[597,1399,1400],{"class":976}," like",[597,1402,1403],{"class":976}," humans.",[597,1405,1406],{"class":976}," It",[597,1408,1409],{"class":976}," is",[597,1411,1412],{"class":976}," a",[597,1414,1415],{"class":976}," broad",[597,1417,1418],{"class":976}," field",[597,1420,1365],{"class":976},[597,1422,1423],{"class":976}," computer",[597,1425,1426],{"class":976}," science",[597,1428,1380],{"class":976},[597,1430,1431],{"class":976}," focuses",[597,1433,1434],{"class":976}," on",[597,1436,1437],{"class":976}," creating",[597,1439,1440],{"class":976}," systems",[597,1442,1394],{"class":976},[597,1444,1445],{"class":976}," algorithms",[597,1447,1448],{"class":976}," capable",[597,1450,1365],{"class":976},[597,1452,1453],{"class":976}," performing",[597,1455,1456],{"class":976}," tasks",[597,1458,1380],{"class":976},[597,1460,1461],{"class":976}," typically",[597,1463,1464],{"class":976}," require",[597,1466,1368],{"class":976},[597,1468,1469],{"class":976}," intelligence.",[597,1471,1472],{"class":976}," These",[597,1474,1456],{"class":976},[597,1476,1477],{"class":976}," include",[597,1479,1480],{"class":976}," problem-solving,\n",[597,1482,1483],{"class":599,"line":12},[597,1484,1485],{"emptyLinePlaceholder":17},"\n",[597,1487,1488],{"class":599,"line":109},[597,1489,1490],{"class":1296},"...\n",[505,1492,1493],{},[33,1494,1495,1496,1499,1500,1505],{},"🙋 ",[391,1497,1498],{},"Need help?",": Something not working? ",[356,1501,1504],{"href":1502,"rel":1503},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/issues/new?template=Blank+issue",[360],"Open an issue"," and we'll help you out.",[135,1507,1509],{"id":1508},"summary","Summary",[33,1511,1512],{},"In this lesson, you learned how to set up your development environment for the rest of the course. You created a GitHub Codespace and configured it to use GitHub Models, Azure OpenAI, or Ollama. You also learned how to create a personal access token for GitHub Models and how to run a sample application to verify everything is working correctly.",[28,1514,1516],{"id":1515},"additional-resources","Additional Resources",[150,1518,1519,1532,1539],{},[153,1520,1521,1522],{},"Test this guide with other hosting providers!\n",[150,1523,1524,1528],{},[153,1525,1526],{},[356,1527,829],{"href":828},[153,1529,1530],{},[356,1531,834],{"href":833},[153,1533,1534],{},[356,1535,1538],{"href":1536,"rel":1537},"https://docs.github.com/en/codespaces",[360],"GitHub Codespaces Documentation",[153,1540,1541],{},[356,1542,1545],{"href":1543,"rel":1544},"https://docs.github.com/en/github-models/prototyping-with-ai-models",[360],"GitHub Models Documentation",[135,1547,774],{"id":773},[33,1549,1550],{},"Next, we'll explore how to create your first AI application! 🚀",[33,1552,783,1553],{},[356,1554,1556],{"href":1555},"../core-generative-ai-techniques/core-generative-ai-techniques","Core Generative AI Techniques",[789,1558,1559],{},"html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}",{"title":11,"searchDepth":12,"depth":12,"links":1561},[1562,1563,1566,1567,1570,1573,1574,1577],{"id":373,"depth":12,"text":374},{"id":873,"depth":12,"text":874,"children":1564},[1565],{"id":927,"depth":109,"text":930},{"id":1028,"depth":12,"text":1029},{"id":1043,"depth":12,"text":1044,"children":1568},[1569],{"id":1050,"depth":109,"text":1051},{"id":1162,"depth":12,"text":1163,"children":1571},[1572],{"id":1218,"depth":109,"text":1219},{"id":1266,"depth":12,"text":1267},{"id":1508,"depth":12,"text":1509,"children":1575},[1576],{"id":1515,"depth":109,"text":1516},{"id":773,"depth":12,"text":774},{},"/programming-language/platform/dotnet/generative-ai-for-beginners-dotnet/setup-dev-environment/setting-up-the-development-environment-for-this-course",{"title":812,"description":820},"Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/2.Setup-Dev-Environment/1.Setting-Up-the-Development-Environment-for-This-Course","KE_OJSVQzNkVx_7P_7W4JxfzH6bsnqRIR4si9jzHcIE",{"id":1584,"title":1585,"body":1586,"description":1593,"extension":14,"meta":2242,"navigation":17,"path":2243,"seo":2244,"stem":2245,"__hash__":2246},"content/Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/2.Setup-Dev-Environment/getting-started-azure-openai.md","Setting Up the Development Environment for Azure OpenAI",{"type":8,"value":1587,"toc":2227},[1588,1591,1594,1597,1607,1611,1614,1618,1697,1704,1708,1715,1770,1775,1781,1798,1802,1805,1840,1842,1845,1870,1874,1876,1880,1885,1891,1896,1900,1904,1907,1967,1974,2169,2178,2180,2183,2185,2215,2217,2219,2224],[343,1589,1585],{"id":1590},"setting-up-the-development-environment-for-azure-openai",[33,1592,1593],{},"If you want to use Azure AI Foundry models for your .NET AI apps in this course, follow the steps in this guide.",[33,1595,1596],{},"Don't want to use Azure OpenAI?",[33,1598,783,1599,1603,1604],{},[356,1600,1602],{"href":1601},"README","To use GitHub Models this is the guide for you","\n👉 ",[356,1605,1606],{"href":833},"Here are the steps for Ollama",[135,1608,1610],{"id":1609},"create-the-azure-ai-foundry-resources","Create the Azure AI Foundry resources",[33,1612,1613],{},"To use Azure AI Foundry models, you need to create a hub and project in the Azure AI Foundry portal. Then you'll need to deploy a model. This section will show you how to do that.",[28,1615,1617],{"id":1616},"create-a-hub-and-project-in-azure-ai-foundry","Create a Hub and Project in Azure AI Foundry",[1053,1619,1620,1628,1631,1652,1681],{},[153,1621,1622,1623,405],{},"Go to the ",[356,1624,1627],{"href":1625,"rel":1626},"https://ai.azure.com/",[360],"Azure AI Foundry Portal",[153,1629,1630],{},"Sign in with your Azure account.",[153,1632,1071,1633,1636,1637,1640,1641,1644,1645,1647,1648],{},[391,1634,1635],{},"All hubs + projects"," from the left-hand menu and then click the ",[391,1638,1639],{},"+ New hub"," from the dropdown. (Note: You may have to click on ",[391,1642,1643],{},"+ New project"," first to see the ",[391,1646,1639],{}," option).\n",[145,1649],{"alt":1650,"src":1651},"Create a new hub","content/generative-ai/images/ai-foundry-hub-selection.png",[153,1653,1654,1655],{},"A new window will open. Fill in the details for your hub:\n",[150,1656,1657,1660,1663,1666,1669,1675],{},[153,1658,1659],{},"Give your hub a name (e.g., \"MyAIHub\").",[153,1661,1662],{},"Choose a region closest to you.",[153,1664,1665],{},"Select the appropriate subscription and resource group.",[153,1667,1668],{},"You can leave the rest of the settings as they are.",[153,1670,1671,1672,405],{},"Click ",[391,1673,1674],{},"Next",[153,1676,1677,1678,405],{},"Review the details and click ",[391,1679,1680],{},"Create",[153,1682,1683,1684,1687,1688],{},"Once your hub is created, the portal will open its details page. Click the ",[391,1685,1686],{},"Create Project"," button.\n",[150,1689,1690,1693],{},[153,1691,1692],{},"Give your project a name (e.g., \"GenAINET\") or accept the default.",[153,1694,1671,1695,405],{},[391,1696,1680],{},[33,1698,1699,1700,1703],{},"🎉 ",[391,1701,1702],{},"Done!"," You’ve just created your first project in Azure AI Foundry.",[28,1705,1707],{"id":1706},"deploy-a-language-model-in-azure-ai-foundry","Deploy a Language Model in Azure AI Foundry",[33,1709,1710,1711,1714],{},"Now, let’s deploy a ",[391,1712,1713],{},"gpt-4o-mini"," model to your project:",[1053,1716,1717,1720,1730,1736,1742,1748,1751,1757],{},[153,1718,1719],{},"In the Azure AI Foundry portal, navigate to your project (it should automatically open after creating it).",[153,1721,1096,1722,1725,1726,1729],{},[391,1723,1724],{},"Models and Endpoints"," from the left-hand menu and then the ",[391,1727,1728],{},"Deploy Model"," button.",[153,1731,1071,1732,1735],{},[391,1733,1734],{},"Deploy base model"," from the dropdown.",[153,1737,1738,1739,1741],{},"Search for ",[391,1740,1713],{}," in the model catalog.",[153,1743,1744,1745,1729],{},"Select the model and click the ",[391,1746,1747],{},"Confirm",[153,1749,1750],{},"Specify a deployment name (e.g., \"gpt-4o-mini\"). You can leave the rest of the options as they are.",[153,1752,1671,1753,1756],{},[391,1754,1755],{},"Deploy"," and wait for the model to be provisioned.",[153,1758,1759,1760,529,1763,533,1766,1769],{},"Once deployed, note the ",[391,1761,1762],{},"Model Name",[391,1764,1765],{},"Target URI",[391,1767,1768],{},"API Key"," from the model details page.",[33,1771,1699,1772,1774],{},[391,1773,1702],{}," You’ve deployed your first Large Language Model in Azure AI Foundry.",[33,1776,1777],{},[145,1778],{"alt":1779,"src":1780},"Model deployed, copy model name, endpoint url and apikey","content/generative-ai/images/deploytoazure-20-copymodelinfo.png",[505,1782,1783],{},[33,1784,1785,1786,1789,1790,1793,1794,1797],{},"📝 ",[391,1787,1788],{},"Note:"," The endpoint maybe similar to ",[594,1791,1792],{},"https://\u003C your hub name>.openai.azure.com/openai/deployments/gpt-4o-mini/chat/completions?api-version=2024-08-01-preview",". The endpoint name that we need is only ",[594,1795,1796],{},"https://\u003C your hub name >.openai.azure.com/","*.",[135,1799,1801],{"id":1800},"adding-the-azure-ai-api-key-to-your-codespaces-secrets","Adding the Azure AI API Key to your Codespace's Secrets",[33,1803,1804],{},"To be secure, let's add the API key you just created to your Codespace's secrets.",[1053,1806,1807,1810,1827,1833],{},[153,1808,1809],{},"Make sure you have forked this repository to your GitHub account.",[153,1811,1622,1812,1814,1815,1818,1819,405,1821,1823],{},[391,1813,1074],{}," tab of your forked repository then expand ",[391,1816,1817],{},"Secrets and variables"," on the left-hand menu and select ",[391,1820,1198],{},[704,1822],{},[145,1824],{"alt":1825,"src":1826},"Adding a new Codespace secret","content/generative-ai/images/codespaces-secret.jpeg",[153,1828,1829,1830,405],{},"Name your secret ",[391,1831,1832],{},"AZURE_AI_KEY",[153,1834,1835,1836,1839],{},"Paste the API key you copied from the Azure AI Foundry portal into the ",[391,1837,1838],{},"Secret"," field.",[135,1841,1163],{"id":1162},[33,1843,1844],{},"Let's create a GitHub Codespace to develop with for the rest of this course.",[1053,1846,1847,1854,1858,1864],{},[153,1848,1171,1849,1177,1852,1181],{},[356,1850,1176],{"href":1174,"rel":1851},[360],[391,1853,1180],{},[153,1855,1184,1856,1188],{},[391,1857,1187],{},[153,1859,1191,1860,1195,1862,1199],{},[391,1861,1194],{},[391,1863,1198],{},[153,1865,1202,1866,1206,1868],{},[391,1867,1205],{},[391,1869,1209],{},[33,1871,1872],{},[145,1873],{"alt":1214,"src":1215},[28,1875,1219],{"id":1218},[33,1877,1222,1878,1226],{},[391,1879,1225],{},[33,1881,1882,1884],{},[391,1883,1231],{}," : This is the option you should use if you plan to use GitHub Models or Azure OpenAI. It has all the core .NET development tools needed for the rest of the course and a fast startup time",[33,1886,1887,1238,1889,1242],{},[391,1888,1237],{},[356,1890,1241],{"href":833},[33,1892,1893,1894,1257],{},"You can leave the rest of the settings as they are. Click the ",[391,1895,1256],{},[33,1897,1898],{},[145,1899],{"alt":1262,"src":1263},[135,1901,1903],{"id":1902},"update-the-sample-code-to-use-azure-openai-and-your-new-model","Update the sample code to use Azure OpenAI and your new model",[33,1905,1906],{},"Now let’s update the code to use the newly deployed model. First we'll need to add some NuGet packages to work with Azure OpenAI.",[1053,1908,1909,1924],{},[153,1910,1911,1912],{},"Open the terminal and switch to the project directory:",[588,1913,1915],{"className":963,"code":1914,"language":965,"meta":11,"style":11},"cd 02-SetupDevEnvironment/src/BasicChat-01MEAI/\n",[594,1916,1917],{"__ignoreMap":11},[597,1918,1919,1921],{"class":599,"line":600},[597,1920,1297],{"class":1296},[597,1922,1923],{"class":976}," 02-SetupDevEnvironment/src/BasicChat-01MEAI/\n",[153,1925,1926,1927],{},"Run the following commands to add the required package:",[588,1928,1930],{"className":963,"code":1929,"language":965,"meta":11,"style":11},"dotnet add package Azure.AI.OpenAI --version 2.2.0-beta.2\ndotnet add package Microsoft.Extensions.AI.OpenAI --version 9.3.0-preview.1.25114.11\n",[594,1931,1932,1951],{"__ignoreMap":11},[597,1933,1934,1936,1939,1942,1945,1948],{"class":599,"line":600},[597,1935,1325],{"class":972},[597,1937,1938],{"class":976}," add",[597,1940,1941],{"class":976}," package",[597,1943,1944],{"class":976}," Azure.AI.OpenAI",[597,1946,1947],{"class":1296}," --version",[597,1949,1950],{"class":976}," 2.2.0-beta.2\n",[597,1952,1953,1955,1957,1959,1962,1964],{"class":599,"line":12},[597,1954,1325],{"class":972},[597,1956,1938],{"class":976},[597,1958,1941],{"class":976},[597,1960,1961],{"class":976}," Microsoft.Extensions.AI.OpenAI",[597,1963,1947],{"class":1296},[597,1965,1966],{"class":976}," 9.3.0-preview.1.25114.11\n",[33,1968,1969,405],{},[356,1970,1973],{"href":1971,"rel":1972},"https://www.nuget.org/packages/Azure.AI.OpenAI/2.2.0-beta.2",[360],"More information about Azure.AI.OpenAI",[1053,1975,1976,2005,2038,2070,2083],{},[153,1977,1978,1979,405,1982,1984,1985],{},"Open ",[594,1980,1981],{},"/workspaces/Generative-AI-for-beginners-dotnet/02-SettingUp.NETDev/src/BasicChat-01MEAI/Program.cs",[704,1983],{},"Add the following using statements at the top of the file:",[588,1986,1988],{"className":590,"code":1987,"language":592,"meta":11,"style":11},"using System.ClientModel;\nusing Azure.AI.OpenAI;\nusing Microsoft.Extensions.AI;\n\n",[594,1989,1990,1995,2000],{"__ignoreMap":11},[597,1991,1992],{"class":599,"line":600},[597,1993,1994],{},"using System.ClientModel;\n",[597,1996,1997],{"class":599,"line":12},[597,1998,1999],{},"using Azure.AI.OpenAI;\n",[597,2001,2002],{"class":599,"line":109},[597,2003,2004],{},"using Microsoft.Extensions.AI;\n",[153,2006,2007,2008,2028,2030,2031,533,2034,2037],{},"Create new variables to hold the model name, endpoint, and API key:",[588,2009,2011],{"className":590,"code":2010,"language":592,"meta":11,"style":11},"var deploymentName = \"\u003C deployment name > \"; // e.g. \"gpt-4o-mini\"\nvar endpoint = new Uri(\"\u003C endpoint >\"); // e.g. \"https://\u003C your hub name >.openai.azure.com/\"\nvar apiKey = new ApiKeyCredential(Environment.GetEnvironmentVariable(\"AZURE_AI_SECRET\"));\n",[594,2012,2013,2018,2023],{"__ignoreMap":11},[597,2014,2015],{"class":599,"line":600},[597,2016,2017],{},"var deploymentName = \"\u003C deployment name > \"; // e.g. \"gpt-4o-mini\"\n",[597,2019,2020],{"class":599,"line":12},[597,2021,2022],{},"var endpoint = new Uri(\"\u003C endpoint >\"); // e.g. \"https://\u003C your hub name >.openai.azure.com/\"\n",[597,2024,2025],{"class":599,"line":109},[597,2026,2027],{},"var apiKey = new ApiKeyCredential(Environment.GetEnvironmentVariable(\"AZURE_AI_SECRET\"));\n",[704,2029],{},"Making sure to replace ",[594,2032,2033],{},"\u003C deployment name >",[594,2035,2036],{},"\u003C endpoint >"," with the values you noted above.",[153,2039,2040,2041,2044,2045],{},"Replace the ",[594,2042,2043],{},"IChatClient"," creation with the following code:",[588,2046,2048],{"className":590,"code":2047,"language":592,"meta":11,"style":11},"IChatClient client = new AzureOpenAIClient(\n    endpoint,\n    apiKey)\n.AsChatClient(deploymentName);\n",[594,2049,2050,2055,2060,2065],{"__ignoreMap":11},[597,2051,2052],{"class":599,"line":600},[597,2053,2054],{},"IChatClient client = new AzureOpenAIClient(\n",[597,2056,2057],{"class":599,"line":12},[597,2058,2059],{},"    endpoint,\n",[597,2061,2062],{"class":599,"line":109},[597,2063,2064],{},"    apiKey)\n",[597,2066,2067],{"class":599,"line":616},[597,2068,2069],{},".AsChatClient(deploymentName);\n",[153,2071,2072,2073],{},"Run the following command in the terminal:",[588,2074,2075],{"className":963,"code":1318,"language":965,"meta":11,"style":11},[594,2076,2077],{"__ignoreMap":11},[597,2078,2079,2081],{"class":599,"line":600},[597,2080,1325],{"class":972},[597,2082,1328],{"class":976},[153,2084,2085,2086],{},"You should see output similar to the following:",[588,2087,2089],{"className":963,"code":2088,"language":965,"meta":11,"style":11},"Artificial Intelligence (AI) refers to the simulation of human intelligence in machines that are programmed to think and learn like humans. AI encompasses a variety of technologies and approaches that enable computers and systems to perform tasks that typically require human intelligence. These tasks include:\n\n1. **Learning**: The ability to improve performance based on experience, often through algorithms that analyze data.\n\n...\n",[594,2090,2091,2103,2107,2161,2165],{"__ignoreMap":11},[597,2092,2093,2096,2099],{"class":599,"line":600},[597,2094,2095],{"class":972},"Artificial",[597,2097,2098],{"class":976}," Intelligence",[597,2100,2102],{"class":2101},"sVt8B"," (AI) refers to the simulation of human intelligence in machines that are programmed to think and learn like humans. AI encompasses a variety of technologies and approaches that enable computers and systems to perform tasks that typically require human intelligence. These tasks include:\n",[597,2104,2105],{"class":599,"line":12},[597,2106,1485],{"emptyLinePlaceholder":17},[597,2108,2109,2112,2115,2118,2121,2123,2126,2129,2131,2134,2137,2140,2142,2145,2148,2151,2153,2155,2158],{"class":599,"line":109},[597,2110,2111],{"class":972},"1.",[597,2113,2114],{"class":1296}," **",[597,2116,2117],{"class":976},"Learning",[597,2119,2120],{"class":1296},"**",[597,2122,1063],{"class":976},[597,2124,2125],{"class":976}," The",[597,2127,2128],{"class":976}," ability",[597,2130,1356],{"class":976},[597,2132,2133],{"class":976}," improve",[597,2135,2136],{"class":976}," performance",[597,2138,2139],{"class":976}," based",[597,2141,1434],{"class":976},[597,2143,2144],{"class":976}," experience,",[597,2146,2147],{"class":976}," often",[597,2149,2150],{"class":976}," through",[597,2152,1445],{"class":976},[597,2154,1380],{"class":976},[597,2156,2157],{"class":976}," analyze",[597,2159,2160],{"class":976}," data.\n",[597,2162,2163],{"class":599,"line":616},[597,2164,1485],{"emptyLinePlaceholder":17},[597,2166,2167],{"class":599,"line":622},[597,2168,1490],{"class":1296},[505,2170,2171],{},[33,2172,1495,2173,1499,2175,1505],{},[391,2174,1498],{},[356,2176,1504],{"href":1502,"rel":2177},[360],[135,2179,1509],{"id":1508},[33,2181,2182],{},"In this lesson, you learned how to set up your development environment for the rest of the course. You created a GitHub Codespace and configured it to use Azure OpenAI. You also updated the sample code to use the newly deployed model in Azure AI Foundry.",[28,2184,1516],{"id":1515},[150,2186,2187,2194,2201,2208],{},[153,2188,2189],{},[356,2190,2193],{"href":2191,"rel":2192},"https://learn.microsoft.com/azure/ai-services/",[360],"Azure AI Foundry Documentation",[153,2195,2196],{},[356,2197,2200],{"href":2198,"rel":2199},"https://docs.github.com/en/codespaces/getting-started",[360],"Working with GitHub Codespaces",[153,2202,2203],{},[356,2204,2207],{"href":2205,"rel":2206},"https://learn.microsoft.com/azure/ai-services/deploy/",[360],"How to Deploy Models in Azure AI Foundry",[153,2209,2210],{},[356,2211,2214],{"href":2212,"rel":2213},"https://www.nuget.org/packages/Azure.AI.OpenAI",[360],"Azure.AI.OpenAI NuGet Package",[135,2216,774],{"id":773},[33,2218,1550],{},[33,2220,783,2221],{},[356,2222,1556],{"href":2223},"../03-CoreGenerativeAITechniques/readme",[789,2225,2226],{},"html pre.shiki code .sj4cs, html code.shiki .sj4cs{--shiki-default:#005CC5;--shiki-dark:#79B8FF}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}",{"title":11,"searchDepth":12,"depth":12,"links":2228},[2229,2233,2234,2237,2238,2241],{"id":1609,"depth":12,"text":1610,"children":2230},[2231,2232],{"id":1616,"depth":109,"text":1617},{"id":1706,"depth":109,"text":1707},{"id":1800,"depth":12,"text":1801},{"id":1162,"depth":12,"text":1163,"children":2235},[2236],{"id":1218,"depth":109,"text":1219},{"id":1902,"depth":12,"text":1903},{"id":1508,"depth":12,"text":1509,"children":2239},[2240],{"id":1515,"depth":109,"text":1516},{"id":773,"depth":12,"text":774},{},"/programming-language/platform/dotnet/generative-ai-for-beginners-dotnet/setup-dev-environment/getting-started-azure-openai",{"title":1585,"description":1593},"Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/2.Setup-Dev-Environment/getting-started-azure-openai","BTMps2c7Kn5EYPk6YL8ophsvEkQdEIVqyr4oF5FlnxQ",{"id":2248,"title":2249,"body":2250,"description":2257,"extension":14,"meta":2962,"navigation":17,"path":2963,"seo":2964,"stem":2965,"__hash__":2966},"content/Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/2.Setup-Dev-Environment/getting-started-ollama.md","Setting Up the Development Environment with Ollama",{"type":8,"value":2251,"toc":2951},[2252,2255,2258,2260,2267,2269,2271,2296,2300,2302,2306,2311,2318,2322,2326,2330,2332,2526,2535,2539,2552,2904,2913,2915,2918,2920,2941,2943,2945,2949],[343,2253,2249],{"id":2254},"setting-up-the-development-environment-with-ollama",[33,2256,2257],{},"If you want to use Ollama to run local models for this course, follow the steps in this guide.",[33,2259,1596],{},[33,2261,783,2262,1603,2265],{},[356,2263,1602],{"href":2264},"./readme",[356,2266,1606],{"href":833},[135,2268,1163],{"id":1162},[33,2270,1844],{},[1053,2272,2273,2280,2284,2290],{},[153,2274,1171,2275,1177,2278,1181],{},[356,2276,1176],{"href":1174,"rel":2277},[360],[391,2279,1180],{},[153,2281,1184,2282,1188],{},[391,2283,1187],{},[153,2285,1191,2286,1195,2288,1199],{},[391,2287,1194],{},[391,2289,1198],{},[153,2291,1202,2292,1206,2294],{},[391,2293,1205],{},[391,2295,1209],{},[33,2297,2298],{},[145,2299],{"alt":1214,"src":1215},[28,2301,1219],{"id":1218},[33,2303,1222,2304,1226],{},[391,2305,1225],{},[33,2307,2308,2310],{},[391,2309,1231],{}," : This is the option you should use if you plan to use GitHub Models or Azure OpenAI and is our recommended way to complete this course. It has all the core .NET development tools needed for the rest of the course and a fast startup time",[33,2312,2313,2315,2316,1242],{},[391,2314,1237],{},": This is the one you want for running models locally with Ollama. It includes all the core .NET development in addition to Ollama, but has a slower start-up time, five minutes on average. ",[356,2317,1241],{"href":833},[33,2319,1893,2320,1257],{},[391,2321,1256],{},[33,2323,2324],{},[145,2325],{"alt":1262,"src":1263},[135,2327,2329],{"id":2328},"verifying-your-codespace-is-running-correctly-with-ollama","Verifying your Codespace is running correctly with Ollama",[33,2331,1270],{},[1053,2333,2334,2340,2354,2366],{},[153,2335,1275,2336,1279,2338,1283],{},[391,2337,1278],{},[391,2339,1282],{},[153,2341,1286,2342],{},[588,2343,2345],{"className":963,"code":2344,"language":965,"meta":11,"style":11},"cd 02-SetupDevEnvironment/src/BasicChat-03Ollama/\n",[594,2346,2347],{"__ignoreMap":11},[597,2348,2349,2351],{"class":599,"line":600},[597,2350,1297],{"class":1296},[597,2352,2353],{"class":976}," 02-SetupDevEnvironment/src/BasicChat-03Ollama/\n",[153,2355,1315,2356],{},[588,2357,2358],{"className":963,"code":1318,"language":965,"meta":11,"style":11},[594,2359,2360],{"__ignoreMap":11},[597,2361,2362,2364],{"class":599,"line":600},[597,2363,1325],{"class":972},[597,2365,1328],{"class":976},[153,2367,1331,2368],{},[588,2369,2371],{"className":963,"code":2370,"language":965,"meta":11,"style":11},"AI, or Artificial Intelligence, refers to the development of computer systems that can perform tasks that typically require human intelligence, such as:\n\n1. Learning: AI systems can learn from data and improve their performance over time.\n2. Reasoning: AI systems can draw conclusions and make decisions based on the data they have been trained on.\n\n...\n",[594,2372,2373,2426,2430,2467,2518,2522],{"__ignoreMap":11},[597,2374,2375,2377,2379,2382,2385,2387,2389,2391,2394,2396,2398,2400,2402,2405,2408,2410,2412,2414,2416,2418,2420,2423],{"class":599,"line":600},[597,2376,1341],{"class":972},[597,2378,1344],{"class":976},[597,2380,2381],{"class":976}," Artificial",[597,2383,2384],{"class":976}," Intelligence,",[597,2386,1353],{"class":976},[597,2388,1356],{"class":976},[597,2390,1359],{"class":976},[597,2392,2393],{"class":976}," development",[597,2395,1365],{"class":976},[597,2397,1423],{"class":976},[597,2399,1440],{"class":976},[597,2401,1380],{"class":976},[597,2403,2404],{"class":976}," can",[597,2406,2407],{"class":976}," perform",[597,2409,1456],{"class":976},[597,2411,1380],{"class":976},[597,2413,1461],{"class":976},[597,2415,1464],{"class":976},[597,2417,1368],{"class":976},[597,2419,1350],{"class":976},[597,2421,2422],{"class":976}," such",[597,2424,2425],{"class":976}," as:\n",[597,2427,2428],{"class":599,"line":12},[597,2429,1485],{"emptyLinePlaceholder":17},[597,2431,2432,2434,2437,2440,2442,2444,2446,2449,2452,2454,2456,2459,2461,2464],{"class":599,"line":109},[597,2433,2111],{"class":972},[597,2435,2436],{"class":976}," Learning:",[597,2438,2439],{"class":976}," AI",[597,2441,1440],{"class":976},[597,2443,2404],{"class":976},[597,2445,1397],{"class":976},[597,2447,2448],{"class":976}," from",[597,2450,2451],{"class":976}," data",[597,2453,1394],{"class":976},[597,2455,2133],{"class":976},[597,2457,2458],{"class":976}," their",[597,2460,2136],{"class":976},[597,2462,2463],{"class":976}," over",[597,2465,2466],{"class":976}," time.\n",[597,2468,2469,2472,2475,2477,2479,2481,2484,2487,2489,2492,2495,2497,2499,2501,2503,2506,2509,2512,2515],{"class":599,"line":616},[597,2470,2471],{"class":972},"2.",[597,2473,2474],{"class":976}," Reasoning:",[597,2476,2439],{"class":976},[597,2478,1440],{"class":976},[597,2480,2404],{"class":976},[597,2482,2483],{"class":976}," draw",[597,2485,2486],{"class":976}," conclusions",[597,2488,1394],{"class":976},[597,2490,2491],{"class":976}," make",[597,2493,2494],{"class":976}," decisions",[597,2496,2139],{"class":976},[597,2498,1434],{"class":976},[597,2500,1359],{"class":976},[597,2502,2451],{"class":976},[597,2504,2505],{"class":976}," they",[597,2507,2508],{"class":976}," have",[597,2510,2511],{"class":976}," been",[597,2513,2514],{"class":976}," trained",[597,2516,2517],{"class":976}," on.\n",[597,2519,2520],{"class":599,"line":622},[597,2521,1485],{"emptyLinePlaceholder":17},[597,2523,2524],{"class":599,"line":628},[597,2525,1490],{"class":1296},[505,2527,2528],{},[33,2529,1495,2530,1499,2532,1505],{},[391,2531,1498],{},[356,2533,1504],{"href":1502,"rel":2534},[360],[135,2536,2538],{"id":2537},"swap-out-the-model-in-ollama","Swap out the model in Ollama",[33,2540,2541,2542,2544,2545,2547,2548,2551],{},"One of the cool things about Ollama is that it's easy to change models. The sample apps uses models like \"",[391,2543,953],{},"\" or \"",[391,2546,959],{},"\" model. Let’s switch it up and try the \"",[391,2549,2550],{},"phi3.5","\" model instead.",[1053,2553,2554,2585,2601,2614],{},[153,2555,2556,2557,2571,2573,2574,2579,2580,405],{},"Download the Phi3.5 model by running the command from the terminal:",[588,2558,2560],{"className":963,"code":2559,"language":965,"meta":11,"style":11},"ollama pull phi3.5\n",[594,2561,2562],{"__ignoreMap":11},[597,2563,2564,2566,2568],{"class":599,"line":600},[597,2565,973],{"class":972},[597,2567,977],{"class":976},[597,2569,2570],{"class":976}," phi3.5\n",[704,2572],{},"You can learn more about the ",[356,2575,2578],{"href":2576,"rel":2577},"https://ollama.com/library/phi3.5",[360],"Phi3.5"," and other available models in the ",[356,2581,2584],{"href":2582,"rel":2583},"https://ollama.com/library/",[360],"Ollama library",[153,2586,2587,2588,2591,2592],{},"Edit the initialization of the chat client in ",[594,2589,2590],{},"Program.cs"," to use the new model:",[588,2593,2595],{"className":590,"code":2594,"language":592,"meta":11,"style":11},"IChatClient client = new OllamaChatClient(new Uri(\"http://localhost:11434/\"), \"phi3.5\");\n",[594,2596,2597],{"__ignoreMap":11},[597,2598,2599],{"class":599,"line":600},[597,2600,2594],{},[153,2602,2603,2604],{},"Finally, run the app with the following command:",[588,2605,2606],{"className":963,"code":1318,"language":965,"meta":11,"style":11},[594,2607,2608],{"__ignoreMap":11},[597,2609,2610,2612],{"class":599,"line":600},[597,2611,1325],{"class":972},[597,2613,1328],{"class":976},[153,2615,2616,2617],{},"You’ve just switched to a new model. Notice how the response is longer and more detailed.",[588,2618,2620],{"className":963,"code":2619,"language":965,"meta":11,"style":11},"Artificial Intelligence (AI) refers to the simulation of human intelligence processes by machines, especially computer systems. These processes include learning (the acquisition of information and accumulation of knowledge), reasoning (using the acquired knowledge to make deductions or decisions), and self-correction. AI can manifest in various forms:\n\n1. **Narrow AI** – Designed for specific tasks, such as facial recognition software, voice assistants like Siri or Alexa, autonomous vehicles, etc., which operate under a limited preprogrammed set of behaviors and rules but excel within their domain when compared to humans in these specialized areas.\n\n2. **General AI** – Capable of understanding, learning, and applying intelligence broadly across various domains like human beings do (natural language processing, problem-solving at a high level). General AIs are still largely theoretical as we haven't yet achieved this form to the extent necessary for practical applications beyond narrow tasks.\n",[594,2621,2622,2679,2683,2816,2820],{"__ignoreMap":11},[597,2623,2624,2626,2628,2631,2634,2637,2639,2642,2644,2647,2649,2652,2655,2658,2660,2663,2665,2667,2669,2672,2674,2676],{"class":599,"line":600},[597,2625,2095],{"class":972},[597,2627,2098],{"class":976},[597,2629,2630],{"class":2101}," (AI) refers to the simulation of human intelligence processes by machines, especially computer systems. These processes include learning (",[597,2632,2633],{"class":972},"the",[597,2635,2636],{"class":976}," acquisition",[597,2638,1365],{"class":976},[597,2640,2641],{"class":976}," information",[597,2643,1394],{"class":976},[597,2645,2646],{"class":976}," accumulation",[597,2648,1365],{"class":976},[597,2650,2651],{"class":976}," knowledge",[597,2653,2654],{"class":2101},"), reasoning (",[597,2656,2657],{"class":972},"using",[597,2659,1359],{"class":976},[597,2661,2662],{"class":976}," acquired",[597,2664,2651],{"class":976},[597,2666,1356],{"class":976},[597,2668,2491],{"class":976},[597,2670,2671],{"class":976}," deductions",[597,2673,1344],{"class":976},[597,2675,2494],{"class":976},[597,2677,2678],{"class":2101},"), and self-correction. AI can manifest in various forms:\n",[597,2680,2681],{"class":599,"line":12},[597,2682,1485],{"emptyLinePlaceholder":17},[597,2684,2685,2687,2689,2692,2694,2696,2699,2702,2705,2708,2711,2713,2716,2719,2722,2725,2728,2731,2733,2736,2738,2741,2744,2747,2750,2753,2756,2759,2761,2764,2767,2770,2772,2775,2777,2780,2783,2786,2789,2791,2794,2797,2800,2802,2805,2807,2810,2813],{"class":599,"line":109},[597,2686,2111],{"class":972},[597,2688,2114],{"class":1296},[597,2690,2691],{"class":976},"Narrow",[597,2693,2439],{"class":976},[597,2695,2120],{"class":1296},[597,2697,2698],{"class":976}," –",[597,2700,2701],{"class":976}," Designed",[597,2703,2704],{"class":976}," for",[597,2706,2707],{"class":976}," specific",[597,2709,2710],{"class":976}," tasks,",[597,2712,2422],{"class":976},[597,2714,2715],{"class":976}," as",[597,2717,2718],{"class":976}," facial",[597,2720,2721],{"class":976}," recognition",[597,2723,2724],{"class":976}," software,",[597,2726,2727],{"class":976}," voice",[597,2729,2730],{"class":976}," assistants",[597,2732,1400],{"class":976},[597,2734,2735],{"class":976}," Siri",[597,2737,1344],{"class":976},[597,2739,2740],{"class":976}," Alexa,",[597,2742,2743],{"class":976}," autonomous",[597,2745,2746],{"class":976}," vehicles,",[597,2748,2749],{"class":976}," etc.,",[597,2751,2752],{"class":976}," which",[597,2754,2755],{"class":976}," operate",[597,2757,2758],{"class":976}," under",[597,2760,1412],{"class":976},[597,2762,2763],{"class":976}," limited",[597,2765,2766],{"class":976}," preprogrammed",[597,2768,2769],{"class":976}," set",[597,2771,1365],{"class":976},[597,2773,2774],{"class":976}," behaviors",[597,2776,1394],{"class":976},[597,2778,2779],{"class":976}," rules",[597,2781,2782],{"class":976}," but",[597,2784,2785],{"class":976}," excel",[597,2787,2788],{"class":976}," within",[597,2790,2458],{"class":976},[597,2792,2793],{"class":976}," domain",[597,2795,2796],{"class":976}," when",[597,2798,2799],{"class":976}," compared",[597,2801,1356],{"class":976},[597,2803,2804],{"class":976}," humans",[597,2806,1374],{"class":976},[597,2808,2809],{"class":976}," these",[597,2811,2812],{"class":976}," specialized",[597,2814,2815],{"class":976}," areas.\n",[597,2817,2818],{"class":599,"line":616},[597,2819,1485],{"emptyLinePlaceholder":17},[597,2821,2822,2824,2826,2829,2831,2833,2835,2838,2840,2843,2846,2848,2851,2853,2856,2859,2862,2865,2867,2869,2872,2875,2878,2881,2884,2887,2890,2892,2895,2898,2901],{"class":599,"line":622},[597,2823,2471],{"class":972},[597,2825,2114],{"class":1296},[597,2827,2828],{"class":976},"General",[597,2830,2439],{"class":976},[597,2832,2120],{"class":1296},[597,2834,2698],{"class":976},[597,2836,2837],{"class":976}," Capable",[597,2839,1365],{"class":976},[597,2841,2842],{"class":976}," understanding,",[597,2844,2845],{"class":976}," learning,",[597,2847,1394],{"class":976},[597,2849,2850],{"class":976}," applying",[597,2852,1371],{"class":976},[597,2854,2855],{"class":976}," broadly",[597,2857,2858],{"class":976}," across",[597,2860,2861],{"class":976}," various",[597,2863,2864],{"class":976}," domains",[597,2866,1400],{"class":976},[597,2868,1368],{"class":976},[597,2870,2871],{"class":976}," beings",[597,2873,2874],{"class":976}," do",[597,2876,2877],{"class":2101}," (natural ",[597,2879,2880],{"class":976},"language",[597,2882,2883],{"class":976}," processing,",[597,2885,2886],{"class":976}," problem-solving",[597,2888,2889],{"class":976}," at",[597,2891,1412],{"class":976},[597,2893,2894],{"class":976}," high",[597,2896,2897],{"class":976}," level",[597,2899,2900],{"class":2101},"). General AIs are still largely theoretical as we haven",[597,2902,2903],{"class":976},"'t yet achieved this form to the extent necessary for practical applications beyond narrow tasks.\n",[505,2905,2906],{},[33,2907,1495,2908,1499,2910,1505],{},[391,2909,1498],{},[356,2911,1504],{"href":1502,"rel":2912},[360],[135,2914,1509],{"id":1508},[33,2916,2917],{},"In this lesson, you learned how to set up your development environment for the rest of the course. You created a GitHub Codespace and configured it to use Ollama. You also updated the sample code to use change models easily.",[28,2919,1516],{"id":1515},[150,2921,2922,2929,2934],{},[153,2923,2924],{},[356,2925,2928],{"href":2926,"rel":2927},"https://ollama.com/search",[360],"Ollama Models",[153,2930,2931],{},[356,2932,2200],{"href":2198,"rel":2933},[360],[153,2935,2936],{},[356,2937,2940],{"href":2938,"rel":2939},"https://learn.microsoft.com/dotnet/",[360],"Microsoft Extensions for AI Documentation",[135,2942,774],{"id":773},[33,2944,1550],{},[33,2946,783,2947],{},[356,2948,1556],{"href":2223},[789,2950,2226],{},{"title":11,"searchDepth":12,"depth":12,"links":2952},[2953,2956,2957,2958,2961],{"id":1162,"depth":12,"text":1163,"children":2954},[2955],{"id":1218,"depth":109,"text":1219},{"id":2328,"depth":12,"text":2329},{"id":2537,"depth":12,"text":2538},{"id":1508,"depth":12,"text":1509,"children":2959},[2960],{"id":1515,"depth":109,"text":1516},{"id":773,"depth":12,"text":774},{},"/programming-language/platform/dotnet/generative-ai-for-beginners-dotnet/setup-dev-environment/getting-started-ollama",{"title":2249,"description":2257},"Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/2.Setup-Dev-Environment/getting-started-ollama","Y0lrb37p1oK3utb7yW762aLa3rBHpseR58UR7YWDCnk",{"id":2968,"title":1556,"body":2969,"description":2976,"extension":14,"meta":3039,"navigation":17,"path":3040,"seo":3041,"stem":3042,"__hash__":3043},"content/Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/3.Core-Generative-AI-Techniques/1.Core-Generative-AI-Techniques.md",{"type":8,"value":2970,"toc":3037},[2971,2974,2977,2979,2981,3001,3004,3029,3032],[343,2972,1556],{"id":2973},"core-generative-ai-techniques",[33,2975,2976],{},"In this lesson you'll learn some practical skills for building AI-enabled .NET applications. Concepts include large language model completions and chat, Retrieval-Augmented Generation (RAG), Audio/Video analysis and even AI Agents.",[351,2978],{},[459,2980,374],{"id":373},[150,2982,2983,2986,2989,2992,2995,2998],{},[153,2984,2985],{},"🌟 LLM completions and chat flows",[153,2987,2988],{},"🔗 Functions & plugins with LLMs",[153,2990,2991],{},"🔎 Retrieval-Augmented Generation (RAG)",[153,2993,2994],{},"👀 Vision-based AI approaches",[153,2996,2997],{},"🔊 Audio creation and transcription",[153,2999,3000],{},"🧩 Agents & assistants",[33,3002,3003],{},"For this lesson, we will subdivide the content into the following sections:",[150,3005,3006,3012,3018,3024],{},[153,3007,3008],{},[356,3009,3011],{"href":3010},"./lm-completions-functions","Chat, LLM completions, and function calling",[153,3013,3014],{},[356,3015,3017],{"href":3016},"./retrieval-augmented-generation","Retrieval-Augmented Generation (RAG)",[153,3019,3020],{},[356,3021,3023],{"href":3022},"./vision-audio","Vision and audio AI applications",[153,3025,3026],{},[356,3027,562],{"href":3028},"./agents",[33,3030,3031],{},"Starting with Language Model completions and Chat applications and function implementations with language models in .NET.",[33,3033,783,3034],{},[356,3035,3036],{"href":3010},"Go to part 1 - completions, chat and functions",{"title":11,"searchDepth":12,"depth":12,"links":3038},[],{},"/programming-language/platform/dotnet/generative-ai-for-beginners-dotnet/core-generative-ai-techniques/core-generative-ai-techniques",{"title":1556,"description":2976},"Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/3.Core-Generative-AI-Techniques/1.Core-Generative-AI-Techniques","yp86BSgWP6XEoBWlsyvFwOcHW27yT7SkxdExde4-47I",{"id":3045,"title":3046,"body":3047,"description":3054,"extension":14,"meta":3723,"navigation":17,"path":3724,"seo":3725,"stem":3726,"__hash__":3727},"content/Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/3.Core-Generative-AI-Techniques/2.lm-completions-functions.md","Chat App Basics",{"type":8,"value":3048,"toc":3711},[3049,3052,3055,3057,3061,3071,3075,3078,3081,3085,3092,3107,3221,3241,3253,3257,3260,3264,3267,3287,3291,3294,3297,3440,3452,3461,3465,3475,3479,3482,3492,3496,3499,3657,3666,3668,3671,3674,3677,3700,3704,3709],[343,3050,3046],{"id":3051},"chat-app-basics",[33,3053,3054],{},"In this lesson, we will explore the basics of building chat applications using language model completions and functions in .NET. We will also explore how to use Semantic Kernel and Microsoft Extensions AI (MEAI) to create chatbots. And use Semantic Kernel to create plugins, or functionality that's called by the chatbot based on the user's input.",[351,3056],{},[135,3058,3060],{"id":3059},"text-completions-and-chat","Text completions and chat",[33,3062,3063],{},[356,3064,3067],{"href":3065,"rel":3066},"https://aka.ms/genainnet/videos/lesson3-chat",[360],[145,3068],{"alt":3069,"src":3070},"Text completions and chat video","content/generative-ai/images/LIM_GAN_03_thumb_w480.png",[33,3072,3073],{},[368,3074,370],{},[33,3076,3077],{},"Text completions might be the most basic form of interaction with the language model in an AI application. A text completion is a single response generated by the model based on the input, or prompt, that is given to the model.",[33,3079,3080],{},"A text completion itself is not a chat application, it is a one and done interaction. You might use text completions for tasks such as content summary or sentiment analysis.",[28,3082,3084],{"id":3083},"text-completions","Text completions",[33,3086,3087,3088,3091],{},"Let's see how you would use text completions using the ",[391,3089,3090],{},"Microsoft.Extensions.AI"," library in .NET.",[505,3093,3094],{},[33,3095,3096,3097,3100,3101,3106],{},"🧑‍💻",[391,3098,3099],{},"Sample code",": ",[356,3102,3105],{"href":3103,"rel":3104},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/tree/main/03-CoreGenerativeAITechniques/src/BasicChat-01MEAI",[360],"Here is a working example of this application"," you can follow along with.",[588,3108,3110],{"className":590,"code":3109,"language":592,"meta":11,"style":11},"\n// this example illustrates using a model hosted on GitHub Models\nIChatClient client = new ChatCompletionsClient(\n    endpoint: new Uri(\"https://models.inference.ai.azure.com\"),\n    new AzureKeyCredential(githubToken)) // githubToken is retrieved from the environment variables\n    .AsChatClient(\"gpt-4o-mini\");\n\n// here we're building the prompt\nStringBuilder prompt = new StringBuilder();\nprompt.AppendLine(\"You will analyze the sentiment of the following product reviews. Each line is its own review. Output the sentiment of each review in a bulleted list and then provide a generate sentiment of all reviews. \");\nprompt.AppendLine(\"I bought this product and it's amazing. I love it!\");\nprompt.AppendLine(\"This product is terrible. I hate it.\");\nprompt.AppendLine(\"I'm not sure about this product. It's okay.\");\nprompt.AppendLine(\"I found this product based on the other reviews. It worked for a bit, and then it didn't.\");\n\n// send the prompt to the model and wait for the text completion\nvar response = await client.GetResponseAsync(prompt.ToString());\n\n// display the repsonse\nConsole.WriteLine(response.Message);\n\n",[594,3111,3112,3116,3121,3126,3131,3136,3141,3145,3151,3157,3163,3169,3175,3181,3187,3192,3198,3204,3209,3215],{"__ignoreMap":11},[597,3113,3114],{"class":599,"line":600},[597,3115,1485],{"emptyLinePlaceholder":17},[597,3117,3118],{"class":599,"line":12},[597,3119,3120],{},"// this example illustrates using a model hosted on GitHub Models\n",[597,3122,3123],{"class":599,"line":109},[597,3124,3125],{},"IChatClient client = new ChatCompletionsClient(\n",[597,3127,3128],{"class":599,"line":616},[597,3129,3130],{},"    endpoint: new Uri(\"https://models.inference.ai.azure.com\"),\n",[597,3132,3133],{"class":599,"line":622},[597,3134,3135],{},"    new AzureKeyCredential(githubToken)) // githubToken is retrieved from the environment variables\n",[597,3137,3138],{"class":599,"line":628},[597,3139,3140],{},"    .AsChatClient(\"gpt-4o-mini\");\n",[597,3142,3143],{"class":599,"line":634},[597,3144,1485],{"emptyLinePlaceholder":17},[597,3146,3148],{"class":599,"line":3147},8,[597,3149,3150],{},"// here we're building the prompt\n",[597,3152,3154],{"class":599,"line":3153},9,[597,3155,3156],{},"StringBuilder prompt = new StringBuilder();\n",[597,3158,3160],{"class":599,"line":3159},10,[597,3161,3162],{},"prompt.AppendLine(\"You will analyze the sentiment of the following product reviews. Each line is its own review. Output the sentiment of each review in a bulleted list and then provide a generate sentiment of all reviews. \");\n",[597,3164,3166],{"class":599,"line":3165},11,[597,3167,3168],{},"prompt.AppendLine(\"I bought this product and it's amazing. I love it!\");\n",[597,3170,3172],{"class":599,"line":3171},12,[597,3173,3174],{},"prompt.AppendLine(\"This product is terrible. I hate it.\");\n",[597,3176,3178],{"class":599,"line":3177},13,[597,3179,3180],{},"prompt.AppendLine(\"I'm not sure about this product. It's okay.\");\n",[597,3182,3184],{"class":599,"line":3183},14,[597,3185,3186],{},"prompt.AppendLine(\"I found this product based on the other reviews. It worked for a bit, and then it didn't.\");\n",[597,3188,3190],{"class":599,"line":3189},15,[597,3191,1485],{"emptyLinePlaceholder":17},[597,3193,3195],{"class":599,"line":3194},16,[597,3196,3197],{},"// send the prompt to the model and wait for the text completion\n",[597,3199,3201],{"class":599,"line":3200},17,[597,3202,3203],{},"var response = await client.GetResponseAsync(prompt.ToString());\n",[597,3205,3207],{"class":599,"line":3206},18,[597,3208,1485],{"emptyLinePlaceholder":17},[597,3210,3212],{"class":599,"line":3211},19,[597,3213,3214],{},"// display the repsonse\n",[597,3216,3218],{"class":599,"line":3217},20,[597,3219,3220],{},"Console.WriteLine(response.Message);\n",[505,3222,3223,3238],{},[33,3224,3225,3226,3228,3229,3234,3235,3237],{},"🗒️",[391,3227,1788],{}," This example showed GitHub Models as the hosting service. If you want to use Ollama, ",[356,3230,3233],{"href":3231,"rel":3232},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/tree/main/03-CoreGenerativeAITechniques/src/BasicChat-03Ollama",[360],"check out this example"," (it instantiates a different ",[594,3236,2043],{},").",[33,3239,3240],{},"If you want to use Azure AI Foundry you can use the same code, but you will need to change the endpoint and the credentials.",[505,3242,3243],{},[33,3244,1495,3245,3247,3248,405],{},[391,3246,1498],{},": If you encounter any issues, ",[356,3249,3252],{"href":3250,"rel":3251},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/issues/new",[360],"open an issue in the repository",[28,3254,3256],{"id":3255},"chat-applications","Chat applications",[33,3258,3259],{},"Building a chat application is a bit more complex. There will be a conversation with the model, where the user can send prompts and the model will respond. And like any conversation you will need to make sure you keep the context, or history, of the conversation so everything makes sense.",[459,3261,3263],{"id":3262},"different-types-of-chat-roles","Different types of chat roles",[33,3265,3266],{},"During a chat with the model the messages sent to the model can be of different types. Here are some examples:",[150,3268,3269,3275,3281],{},[153,3270,3271,3274],{},[391,3272,3273],{},"System",": The system message guides the behavior of the model's responses. It serves as the initial instruction or prompt that sets the context, tone, and boundaries of the conversation. The end-user of that chat usually doesn't see this message, but it's very important in shaping the conversation.",[153,3276,3277,3280],{},[391,3278,3279],{},"User",": The user message is the input or prompt from the end-user. It can be a question, a statement, or a command. The model uses this message to generate a response.",[153,3282,3283,3286],{},[391,3284,3285],{},"Assistant",": The assistant message is the response generated by the model. These messages are based off the system and user messages and are generated by the model. The end-user sees these messages.",[459,3288,3290],{"id":3289},"managing-the-chat-history","Managing the chat history",[33,3292,3293],{},"During the chat with the model, you will need to keep track of the chat history. This is important because the model will generate responses based on the system message, and then all of the back and forth between the user and the assistant messages. Each additional message adds more context the model uses to generate the next response.",[33,3295,3296],{},"Let's take a look at how you would build a chat application using MEAI.",[588,3298,3300],{"className":590,"code":3299,"language":592,"meta":11,"style":11},"\n// assume IChatClient is instantiated as before\n\nList\u003CChatMessage> conversation =\n[\n    new (ChatRole.System, \"You are a product review assistant. Your job is to help people write great product reviews. Keep asking questions on the person's experience with the product until you have enough information to write a review. Then write the review for them and ask if they are happy with it.\")\n];\n\nConsole.Write(\"Start typing a review (type 'q' to quit): \");\n\n// Loop to read messages from the console\nwhile (true)\n{\n    string message = Console.ReadLine();\n\n    if (message.ToLower() == \"q\")\n    {\n        break;\n    }\n\n    conversation.Add(new ChatMessage(ChatRole.User, message));\n\n    // Process the message with the chat client (example)\n    var response = await client.GetResponseAsync(conversation);\n    conversation.Add(response.Message);\n\n    Console.WriteLine(response.Message.Text);\n}\n\n",[594,3301,3302,3306,3311,3315,3320,3325,3330,3335,3339,3344,3348,3353,3358,3362,3367,3371,3376,3381,3386,3391,3395,3401,3406,3412,3418,3424,3429,3435],{"__ignoreMap":11},[597,3303,3304],{"class":599,"line":600},[597,3305,1485],{"emptyLinePlaceholder":17},[597,3307,3308],{"class":599,"line":12},[597,3309,3310],{},"// assume IChatClient is instantiated as before\n",[597,3312,3313],{"class":599,"line":109},[597,3314,1485],{"emptyLinePlaceholder":17},[597,3316,3317],{"class":599,"line":616},[597,3318,3319],{},"List\u003CChatMessage> conversation =\n",[597,3321,3322],{"class":599,"line":622},[597,3323,3324],{},"[\n",[597,3326,3327],{"class":599,"line":628},[597,3328,3329],{},"    new (ChatRole.System, \"You are a product review assistant. Your job is to help people write great product reviews. Keep asking questions on the person's experience with the product until you have enough information to write a review. Then write the review for them and ask if they are happy with it.\")\n",[597,3331,3332],{"class":599,"line":634},[597,3333,3334],{},"];\n",[597,3336,3337],{"class":599,"line":3147},[597,3338,1485],{"emptyLinePlaceholder":17},[597,3340,3341],{"class":599,"line":3153},[597,3342,3343],{},"Console.Write(\"Start typing a review (type 'q' to quit): \");\n",[597,3345,3346],{"class":599,"line":3159},[597,3347,1485],{"emptyLinePlaceholder":17},[597,3349,3350],{"class":599,"line":3165},[597,3351,3352],{},"// Loop to read messages from the console\n",[597,3354,3355],{"class":599,"line":3171},[597,3356,3357],{},"while (true)\n",[597,3359,3360],{"class":599,"line":3177},[597,3361,608],{},[597,3363,3364],{"class":599,"line":3183},[597,3365,3366],{},"    string message = Console.ReadLine();\n",[597,3368,3369],{"class":599,"line":3189},[597,3370,1485],{"emptyLinePlaceholder":17},[597,3372,3373],{"class":599,"line":3194},[597,3374,3375],{},"    if (message.ToLower() == \"q\")\n",[597,3377,3378],{"class":599,"line":3200},[597,3379,3380],{},"    {\n",[597,3382,3383],{"class":599,"line":3206},[597,3384,3385],{},"        break;\n",[597,3387,3388],{"class":599,"line":3211},[597,3389,3390],{},"    }\n",[597,3392,3393],{"class":599,"line":3217},[597,3394,1485],{"emptyLinePlaceholder":17},[597,3396,3398],{"class":599,"line":3397},21,[597,3399,3400],{},"    conversation.Add(new ChatMessage(ChatRole.User, message));\n",[597,3402,3404],{"class":599,"line":3403},22,[597,3405,1485],{"emptyLinePlaceholder":17},[597,3407,3409],{"class":599,"line":3408},23,[597,3410,3411],{},"    // Process the message with the chat client (example)\n",[597,3413,3415],{"class":599,"line":3414},24,[597,3416,3417],{},"    var response = await client.GetResponseAsync(conversation);\n",[597,3419,3421],{"class":599,"line":3420},25,[597,3422,3423],{},"    conversation.Add(response.Message);\n",[597,3425,3427],{"class":599,"line":3426},26,[597,3428,1485],{"emptyLinePlaceholder":17},[597,3430,3432],{"class":599,"line":3431},27,[597,3433,3434],{},"    Console.WriteLine(response.Message.Text);\n",[597,3436,3438],{"class":599,"line":3437},28,[597,3439,637],{},[505,3441,3442],{},[33,3443,3225,3444,3446,3447,405],{},[391,3445,1788],{}," This can also be done with Semantic Kernel. ",[356,3448,3451],{"href":3449,"rel":3450},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/blob/main/03-CoreGenerativeAITechniques/src/BasicChat-02SK",[360],"Check out the code here",[505,3453,3454],{},[33,3455,1495,3456,3247,3458,405],{},[391,3457,1498],{},[356,3459,3252],{"href":3250,"rel":3460},[360],[135,3462,3464],{"id":3463},"function-calling","Function calling",[33,3466,3467],{},[356,3468,3471],{"href":3469,"rel":3470},"https://aka.ms/genainnet/videos/lesson3-functioncall",[360],[145,3472],{"alt":3473,"src":3474},"function explainer video","content/generative-ai/images/LIM_GAN_04_thumb_w480.png",[33,3476,3477],{},[368,3478,370],{},[33,3480,3481],{},"When building AI applications you are not limited to just text-based interactions. It is possible to extend the functionality of the chatbot by calling pre-defined functions in your code based off user input. In other words, function calls serve as a bridge between the model and external systems.",[505,3483,3484],{},[33,3485,3096,3486,3100,3488,3106],{},[391,3487,3099],{},[356,3489,3105],{"href":3490,"rel":3491},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/blob/main/03-CoreGenerativeAITechniques/src/MEAIFunctions",[360],[28,3493,3495],{"id":3494},"function-calling-in-chat-applications","Function calling in chat applications",[33,3497,3498],{},"There are a couple of setup steps you need to take in order to call functions with MEAI.",[1053,3500,3501,3554,3589,3632],{},[153,3502,3503,3504],{},"First, of course, define the function that you want the chatbot to be able to call. In this example we're going to get the weather forecast.",[588,3505,3507],{"className":590,"code":3506,"language":592,"meta":11,"style":11},"\n[Description(\"Get the weather\")]\nstatic string GetTheWeather()\n{\n    var temperature = Random.Shared.Next(5, 20);\n\n    var conditions = Random.Shared.Next(0, 1) == 0 ? \"sunny\" : \"rainy\";\n\n    return $\"The weather is {temperature} degrees C and {conditions}.\";\n}\n\n",[594,3508,3509,3513,3518,3523,3527,3532,3536,3541,3545,3550],{"__ignoreMap":11},[597,3510,3511],{"class":599,"line":600},[597,3512,1485],{"emptyLinePlaceholder":17},[597,3514,3515],{"class":599,"line":12},[597,3516,3517],{},"[Description(\"Get the weather\")]\n",[597,3519,3520],{"class":599,"line":109},[597,3521,3522],{},"static string GetTheWeather()\n",[597,3524,3525],{"class":599,"line":616},[597,3526,608],{},[597,3528,3529],{"class":599,"line":622},[597,3530,3531],{},"    var temperature = Random.Shared.Next(5, 20);\n",[597,3533,3534],{"class":599,"line":628},[597,3535,1485],{"emptyLinePlaceholder":17},[597,3537,3538],{"class":599,"line":634},[597,3539,3540],{},"    var conditions = Random.Shared.Next(0, 1) == 0 ? \"sunny\" : \"rainy\";\n",[597,3542,3543],{"class":599,"line":3147},[597,3544,1485],{"emptyLinePlaceholder":17},[597,3546,3547],{"class":599,"line":3153},[597,3548,3549],{},"    return $\"The weather is {temperature} degrees C and {conditions}.\";\n",[597,3551,3552],{"class":599,"line":3159},[597,3553,637],{},[153,3555,3556,3557,3560,3561],{},"Next we're going to create a ",[594,3558,3559],{},"ChatOptions"," object that will tell MEAI which functions are available to it.",[588,3562,3564],{"className":590,"code":3563,"language":592,"meta":11,"style":11},"\nvar chatOptions = new ChatOptions\n{\n    Tools = [AIFunctionFactory.Create(GetTheWeather)]\n};\n\n",[594,3565,3566,3570,3575,3579,3584],{"__ignoreMap":11},[597,3567,3568],{"class":599,"line":600},[597,3569,1485],{"emptyLinePlaceholder":17},[597,3571,3572],{"class":599,"line":12},[597,3573,3574],{},"var chatOptions = new ChatOptions\n",[597,3576,3577],{"class":599,"line":109},[597,3578,608],{},[597,3580,3581],{"class":599,"line":616},[597,3582,3583],{},"    Tools = [AIFunctionFactory.Create(GetTheWeather)]\n",[597,3585,3586],{"class":599,"line":622},[597,3587,3588],{},"};\n",[153,3590,3591,3592,3594,3595],{},"When we instantiate the ",[594,3593,2043],{}," object we'll want to specify that we'll be using function invocation.",[588,3596,3598],{"className":590,"code":3597,"language":592,"meta":11,"style":11},"IChatClient client = new ChatCompletionsClient(\n    endpoint: new Uri(\"https://models.inference.ai.azure.com\"),\n    new AzureKeyCredential(githubToken)) // githubToken is retrieved from the environment variables\n.AsChatClient(\"gpt-4o-mini\")\n.AsBuilder()\n.UseFunctionInvocation()  // here we're saying that we could be invoking functions!\n.Build();\n",[594,3599,3600,3604,3608,3612,3617,3622,3627],{"__ignoreMap":11},[597,3601,3602],{"class":599,"line":600},[597,3603,3125],{},[597,3605,3606],{"class":599,"line":12},[597,3607,3130],{},[597,3609,3610],{"class":599,"line":109},[597,3611,3135],{},[597,3613,3614],{"class":599,"line":616},[597,3615,3616],{},".AsChatClient(\"gpt-4o-mini\")\n",[597,3618,3619],{"class":599,"line":622},[597,3620,3621],{},".AsBuilder()\n",[597,3623,3624],{"class":599,"line":628},[597,3625,3626],{},".UseFunctionInvocation()  // here we're saying that we could be invoking functions!\n",[597,3628,3629],{"class":599,"line":634},[597,3630,3631],{},".Build();\n",[153,3633,3634,3635,3637,3638],{},"Then finally when we interact with the model, we'll send the ",[594,3636,3559],{}," object that specifies the function the model could call if it needs to get the weather info.",[588,3639,3641],{"className":590,"code":3640,"language":592,"meta":11,"style":11},"var responseOne = await client.GetResponseAsync(\"What is today's date\", chatOptions); // won't call the function\n\nvar responseTwo = await client.GetResponseAsync(\"Should I bring an umbrella with me today?\", chatOptions); // will call the function\n",[594,3642,3643,3648,3652],{"__ignoreMap":11},[597,3644,3645],{"class":599,"line":600},[597,3646,3647],{},"var responseOne = await client.GetResponseAsync(\"What is today's date\", chatOptions); // won't call the function\n",[597,3649,3650],{"class":599,"line":12},[597,3651,1485],{"emptyLinePlaceholder":17},[597,3653,3654],{"class":599,"line":109},[597,3655,3656],{},"var responseTwo = await client.GetResponseAsync(\"Should I bring an umbrella with me today?\", chatOptions); // will call the function\n",[505,3658,3659],{},[33,3660,1495,3661,3247,3663,405],{},[391,3662,1498],{},[356,3664,3252],{"href":3250,"rel":3665},[360],[135,3667,1509],{"id":1508},[33,3669,3670],{},"In this lesson, we learned how to use text completions, start and manage a chat conversation, and call functions in chat applications.",[33,3672,3673],{},"In the next lesson you'll see how to start chatting with data and build what's known as a Retrieval Augmented Generation (RAG) model chatbot - and work with vision and audio in an AI application!",[135,3675,3676],{"id":1515},"Additional resources",[150,3678,3679,3686,3693],{},[153,3680,3681],{},[356,3682,3685],{"href":3683,"rel":3684},"https://learn.microsoft.com/dotnet/ai/quickstarts/get-started-openai?tabs=azd&pivots=openai",[360],"Build an AI chat app with .NET",[153,3687,3688],{},[356,3689,3692],{"href":3690,"rel":3691},"https://learn.microsoft.com/dotnet/ai/quickstarts/quickstart-azure-openai-tool?tabs=azd&pivots=openai",[360],"Execute a local .NET function",[153,3694,3695],{},[356,3696,3699],{"href":3697,"rel":3698},"https://learn.microsoft.com/dotnet/ai/quickstarts/quickstart-local-ai",[360],"Chat with a local AI model",[135,3701,3703],{"id":3702},"up-next","Up next",[33,3705,783,3706],{},[356,3707,3708],{"href":3016},"Let's build a RAG app!",[789,3710,791],{},{"title":11,"searchDepth":12,"depth":12,"links":3712},[3713,3717,3720,3721,3722],{"id":3059,"depth":12,"text":3060,"children":3714},[3715,3716],{"id":3083,"depth":109,"text":3084},{"id":3255,"depth":109,"text":3256},{"id":3463,"depth":12,"text":3464,"children":3718},[3719],{"id":3494,"depth":109,"text":3495},{"id":1508,"depth":12,"text":1509},{"id":1515,"depth":12,"text":3676},{"id":3702,"depth":12,"text":3703},{},"/programming-language/platform/dotnet/generative-ai-for-beginners-dotnet/core-generative-ai-techniques/lm-completions-functions",{"title":3046,"description":3054},"Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/3.Core-Generative-AI-Techniques/2.lm-completions-functions","ganVJAc0bMbmwmXSoerCKEQ_Bibbxdu4vucPTraR_mU",{"id":3729,"title":3017,"body":3730,"description":4448,"extension":14,"meta":4449,"navigation":17,"path":4450,"seo":4451,"stem":4452,"__hash__":4453},"content/Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/3.Core-Generative-AI-Techniques/3.retrieval-augmented-generation.md",{"type":8,"value":3731,"toc":4435},[3732,3735,3741,3743,3753,3757,3760,3769,3781,3785,3805,3808,3811,3814,3817,3820,3824,3838,3859,3863,4118,4125,4129,4237,4241,4244,4261,4264,4374,4383,4385,4401,4405,4421,4425,4428,4433],[343,3733,3017],{"id":3734},"retrieval-augmented-generation-rag",[33,3736,3737,3738,3740],{},"In this lesson learn how to use ",[391,3739,3017],{}," in your AI applications. This technique can be used to augment the response of a language model with information retrieved from a data store - or chat with your data!",[351,3742],{},[33,3744,3745],{},[356,3746,3749],{"href":3747,"rel":3748},"https://aka.ms/genainnet/videos/lesson3-rag",[360],[145,3750],{"alt":3751,"src":3752},"RAG explainer video","content/generative-ai/images/LIM_GAN_07_thumb_w480.png",[33,3754,3755],{},[368,3756,370],{},[33,3758,3759],{},"Retrieval Augmented Generation (RAG) is a technique used to augment the response of a language model with information retrieved from a data store.",[33,3761,3762,3763,830,3766,405],{},"There are 2 main phases in a RAG architecture: ",[391,3764,3765],{},"Retrieval",[391,3767,3768],{},"Generation",[150,3770,3771,3776],{},[153,3772,3773,3775],{},[391,3774,3765],{},": When the user poses a prompt, the system employs a retrieval mechanism of some sort to gather information from an external knowledge store. The knowledge store could be a vector database or a document, amongst other things.",[153,3777,3778,3780],{},[391,3779,3768],{},": The retrieved information is then used to augment the user's prompt. This AI model processes both the retrieved info and the user's prompt to produce an enriched response.",[135,3782,3784],{"id":3783},"benefits-of-rag","Benefits of RAG",[150,3786,3787,3793,3799],{},[153,3788,3789,3792],{},[391,3790,3791],{},"Improved accuracy",": By augmenting the prompt with relevant information, the model can generate more accurate responses and reduce hallucinations.",[153,3794,3795,3798],{},[391,3796,3797],{},"Up-to-date information",": The model can retrieve the most recent information from the knowledge store. Remember, the language model has a knowledge cutoff date and augmenting the prompt with the most recent information can improve the response.",[153,3800,3801,3804],{},[391,3802,3803],{},"Domain-specific knowledge",": The model can be passed very specific domain information making the model more effective in niche situations.",[135,3806,3807],{"id":532},"Embeddings!",[33,3809,3810],{},"We've held off as long as we could to introduce the concept of embeddings. In the retrieval phase of RAG we do not want to pass the entire data store to the model to generate the response. We only want to grab the most relevant information.",[33,3812,3813],{},"So we need a way to compare the user's prompt with the data in the knowledge store. So we can pull out the minimum amount of information needed to augment the prompt.",[33,3815,3816],{},"Thus we need to have a way to represent the data in the knowledge store. This is where embeddings come in. Embeddings are a way to represent data in a vector space. This will allow us to mathematically compare the similarity of the user's prompt with the data in the knowledge store so we can retrieve the most relevant information.",[33,3818,3819],{},"You may have heard of vector databases. These are databases that store data in a vector space. This allows for very fast retrieval of information based on similarity. You don't need to use a vector database to use RAG, but it is a common use case.",[135,3821,3823],{"id":3822},"implementing-rag","Implementing RAG",[33,3825,3826,3827,830,3832,3837],{},"We'll use the Microsoft.Extension.AI along with the ",[356,3828,3831],{"href":3829,"rel":3830},"https://www.nuget.org/packages/Microsoft.Extensions.VectorData.Abstractions/",[360],"Microsoft.Extensions.VectorData",[356,3833,3836],{"href":3834,"rel":3835},"https://www.nuget.org/packages/Microsoft.SemanticKernel.Connectors.InMemory",[360],"Microsoft.SemanticKernel.Connectors.InMemory"," libraries to implement RAG below.",[505,3839,3840,3851],{},[33,3841,3096,3842,3845,3846,405],{},[391,3843,3844],{},"Sample code:"," You can follow along with the ",[356,3847,3850],{"href":3848,"rel":3849},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/blob/main/03-CoreGenerativeAITechniques/src/RAGSimple-02MEAIVectorsMemory",[360],"sample code here",[33,3852,3853,3854,405],{},"You can also see how to implement a RAG app ",[356,3855,3858],{"href":3856,"rel":3857},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/blob/main/03-CoreGenerativeAITechniques/src/RAGSimple-01SK",[360],"using Semantic Kernel by itself in our sample source code here",[28,3860,3862],{"id":3861},"populating-the-knowledge-store","Populating the knowledge store",[1053,3864,3865,3944,4007],{},[153,3866,3867,3868,3937,3939,3940,3943],{},"First we need some knowledge data to store. We'll use a POCO class that represents movies.",[588,3869,3871],{"className":590,"code":3870,"language":592,"meta":11,"style":11},"public class Movie\n{\n    [VectorStoreRecordKey]\n    public int Key { get; set; }\n\n    [VectorStoreRecordData]\n    public string Title { get; set; }\n\n    [VectorStoreRecordData]\n    public string Description { get; set; }\n\n    [VectorStoreRecordVector(384, DistanceFunction.CosineSimilarity)]\n    public ReadOnlyMemory\u003Cfloat> Vector { get; set; }\n}\n",[594,3872,3873,3878,3882,3887,3892,3896,3901,3906,3910,3914,3919,3923,3928,3933],{"__ignoreMap":11},[597,3874,3875],{"class":599,"line":600},[597,3876,3877],{},"public class Movie\n",[597,3879,3880],{"class":599,"line":12},[597,3881,608],{},[597,3883,3884],{"class":599,"line":109},[597,3885,3886],{},"    [VectorStoreRecordKey]\n",[597,3888,3889],{"class":599,"line":616},[597,3890,3891],{},"    public int Key { get; set; }\n",[597,3893,3894],{"class":599,"line":622},[597,3895,1485],{"emptyLinePlaceholder":17},[597,3897,3898],{"class":599,"line":628},[597,3899,3900],{},"    [VectorStoreRecordData]\n",[597,3902,3903],{"class":599,"line":634},[597,3904,3905],{},"    public string Title { get; set; }\n",[597,3907,3908],{"class":599,"line":3147},[597,3909,1485],{"emptyLinePlaceholder":17},[597,3911,3912],{"class":599,"line":3153},[597,3913,3900],{},[597,3915,3916],{"class":599,"line":3159},[597,3917,3918],{},"    public string Description { get; set; }\n",[597,3920,3921],{"class":599,"line":3165},[597,3922,1485],{"emptyLinePlaceholder":17},[597,3924,3925],{"class":599,"line":3171},[597,3926,3927],{},"    [VectorStoreRecordVector(384, DistanceFunction.CosineSimilarity)]\n",[597,3929,3930],{"class":599,"line":3177},[597,3931,3932],{},"    public ReadOnlyMemory\u003Cfloat> Vector { get; set; }\n",[597,3934,3935],{"class":599,"line":3183},[597,3936,637],{},[704,3938],{},"Using the attributes like ",[594,3941,3942],{},"[VectorStoreRecordKey]"," makes it easier for the vector store implementations to map POCO objects to their underlying data models.",[153,3945,3946,3947,3950,3951,3954,3955],{},"Of course we're going to need that knowledge data populated. Create a list of ",[594,3948,3949],{},"Movie"," objects, and create an ",[594,3952,3953],{},"InMemoryVectorStore"," that will have a collection of movies.",[588,3956,3958],{"className":590,"code":3957,"language":592,"meta":11,"style":11},"var movieData = new List\u003CMovie>\n{\n    new Movie { Key = 1, Title = \"The Matrix\", Description = \"A computer hacker learns from mysterious rebels about the true nature of his reality and his role in the war against its controllers.\" },\n    new Movie { Key = 2, Title = \"Inception\", Description = \"A thief who steals corporate secrets through the use of dream-sharing technology is given the inverse task of planting an idea into the mind of a C.E.O.\" },\n    new Movie { Key = 3, Title = \"Interstellar\", Description = \"A team of explorers travel through a wormhole in space in an attempt to ensure humanity's survival.\" }\n};\n\nvar vectorStore = new InMemoryVectorStore();\nvar movies = vectorStore.GetCollection\u003Cint, Movie>(\"movies\");\nawait movies.CreateCollectionIfNotExistsAsync();\n\n",[594,3959,3960,3965,3969,3974,3979,3984,3988,3992,3997,4002],{"__ignoreMap":11},[597,3961,3962],{"class":599,"line":600},[597,3963,3964],{},"var movieData = new List\u003CMovie>\n",[597,3966,3967],{"class":599,"line":12},[597,3968,608],{},[597,3970,3971],{"class":599,"line":109},[597,3972,3973],{},"    new Movie { Key = 1, Title = \"The Matrix\", Description = \"A computer hacker learns from mysterious rebels about the true nature of his reality and his role in the war against its controllers.\" },\n",[597,3975,3976],{"class":599,"line":616},[597,3977,3978],{},"    new Movie { Key = 2, Title = \"Inception\", Description = \"A thief who steals corporate secrets through the use of dream-sharing technology is given the inverse task of planting an idea into the mind of a C.E.O.\" },\n",[597,3980,3981],{"class":599,"line":622},[597,3982,3983],{},"    new Movie { Key = 3, Title = \"Interstellar\", Description = \"A team of explorers travel through a wormhole in space in an attempt to ensure humanity's survival.\" }\n",[597,3985,3986],{"class":599,"line":628},[597,3987,3588],{},[597,3989,3990],{"class":599,"line":634},[597,3991,1485],{"emptyLinePlaceholder":17},[597,3993,3994],{"class":599,"line":3147},[597,3995,3996],{},"var vectorStore = new InMemoryVectorStore();\n",[597,3998,3999],{"class":599,"line":3153},[597,4000,4001],{},"var movies = vectorStore.GetCollection\u003Cint, Movie>(\"movies\");\n",[597,4003,4004],{"class":599,"line":3159},[597,4005,4006],{},"await movies.CreateCollectionIfNotExistsAsync();\n",[153,4008,4009,4010,4013,4014,4094,4096,4097,4100,4101,4104,4105,4108,4109,4112,4113,954,4115,4117],{},"Our next task then is to convert our knowledge store (the ",[594,4011,4012],{},"movieData"," object) into embeddings and then store them into the in-memory vector store. When we create the embeddings we'll use a different model - an embeddings model instead of a language model.",[588,4015,4017],{"className":590,"code":4016,"language":592,"meta":11,"style":11},"var endpoint = new Uri(\"https://models.inference.ai.azure.com\");\nvar modelId = \"text-embedding-3-small\";\nvar credential = new AzureKeyCredential(githubToken); // githubToken is retrieved from the environment variables\n\nIEmbeddingGenerator\u003Cstring, Embedding\u003Cfloat>> generator =\n        new EmbeddingsClient(endpoint, credential)\n    .AsEmbeddingGenerator(modelId);\n\nforeach (var movie in movieData)\n{\n    // generate the embedding vector for the movie description\n    movie.Vector = await generator.GenerateEmbeddingVectorAsync(movie.Description);\n\n    // add the overall movie to the in-memory vector store's movie collection\n    await movies.UpsertAsync(movie);\n}\n",[594,4018,4019,4024,4029,4034,4038,4043,4048,4053,4057,4062,4066,4071,4076,4080,4085,4090],{"__ignoreMap":11},[597,4020,4021],{"class":599,"line":600},[597,4022,4023],{},"var endpoint = new Uri(\"https://models.inference.ai.azure.com\");\n",[597,4025,4026],{"class":599,"line":12},[597,4027,4028],{},"var modelId = \"text-embedding-3-small\";\n",[597,4030,4031],{"class":599,"line":109},[597,4032,4033],{},"var credential = new AzureKeyCredential(githubToken); // githubToken is retrieved from the environment variables\n",[597,4035,4036],{"class":599,"line":616},[597,4037,1485],{"emptyLinePlaceholder":17},[597,4039,4040],{"class":599,"line":622},[597,4041,4042],{},"IEmbeddingGenerator\u003Cstring, Embedding\u003Cfloat>> generator =\n",[597,4044,4045],{"class":599,"line":628},[597,4046,4047],{},"        new EmbeddingsClient(endpoint, credential)\n",[597,4049,4050],{"class":599,"line":634},[597,4051,4052],{},"    .AsEmbeddingGenerator(modelId);\n",[597,4054,4055],{"class":599,"line":3147},[597,4056,1485],{"emptyLinePlaceholder":17},[597,4058,4059],{"class":599,"line":3153},[597,4060,4061],{},"foreach (var movie in movieData)\n",[597,4063,4064],{"class":599,"line":3159},[597,4065,608],{},[597,4067,4068],{"class":599,"line":3165},[597,4069,4070],{},"    // generate the embedding vector for the movie description\n",[597,4072,4073],{"class":599,"line":3171},[597,4074,4075],{},"    movie.Vector = await generator.GenerateEmbeddingVectorAsync(movie.Description);\n",[597,4077,4078],{"class":599,"line":3177},[597,4079,1485],{"emptyLinePlaceholder":17},[597,4081,4082],{"class":599,"line":3183},[597,4083,4084],{},"    // add the overall movie to the in-memory vector store's movie collection\n",[597,4086,4087],{"class":599,"line":3189},[597,4088,4089],{},"    await movies.UpsertAsync(movie);\n",[597,4091,4092],{"class":599,"line":3194},[597,4093,637],{},[704,4095],{},"Our generator object is of an ",[594,4098,4099],{},"IEmbeddingGenerator\u003Cstring, Embedding\u003Cfloat>>"," type. This means it is expecting inputs of ",[594,4102,4103],{},"string"," and outputs of ",[594,4106,4107],{},"Embedding\u003Cfloat>",". We're again using GitHub Models and that means the ",[391,4110,4111],{},"Microsoft.Extensions.AI.AzureAIInference"," package. But you could use ",[391,4114,834],{},[391,4116,829],{}," just as easily.",[505,4119,4120],{},[33,4121,3225,4122,4124],{},[391,4123,1788],{}," Generally you'll only be creating embeddings for your knowledge store once and then storing them. This won't be done every single time you run the application. But since we're using an in-memory store, we need to because the data gets wiped every time the application restarts.",[28,4126,4128],{"id":4127},"retrieving-the-knowledge","Retrieving the knowledge",[1053,4130,4131],{},[153,4132,4133,4134],{},"Now for the retrieval phase. We need to query the vectorized knowledge store to find the most relevant information based on the user's prompt. And to query the vectorized knowledge store that means we'll need to get the user's prompt into an embedding vector.",[588,4135,4137],{"className":590,"code":4136,"language":592,"meta":11,"style":11},"// generate the embedding vector for the user's prompt\nvar query = \"I want to see family friendly movie\";\nvar queryEmbedding = await generator.GenerateEmbeddingVectorAsync(query);\n\nvar searchOptions = new VectorSearchOptions\n{\n    Top = 1,\n    VectorPropertyName = \"Vector\"\n};\n\n// search the knowledge store based on the user's prompt\nvar searchResults = await movies.VectorizedSearchAsync(queryEmbedding, searchOptions);\n\n// let's see the results just so we know what they look like\nawait foreach (var result in searchResults.Results)\n{\n    Console.WriteLine($\"Title: {result.Record.Title}\");\n    Console.WriteLine($\"Description: {result.Record.Description}\");\n    Console.WriteLine($\"Score: {result.Score}\");\n    Console.WriteLine();\n}\n",[594,4138,4139,4144,4149,4154,4158,4163,4167,4172,4177,4181,4185,4190,4195,4199,4204,4209,4213,4218,4223,4228,4233],{"__ignoreMap":11},[597,4140,4141],{"class":599,"line":600},[597,4142,4143],{},"// generate the embedding vector for the user's prompt\n",[597,4145,4146],{"class":599,"line":12},[597,4147,4148],{},"var query = \"I want to see family friendly movie\";\n",[597,4150,4151],{"class":599,"line":109},[597,4152,4153],{},"var queryEmbedding = await generator.GenerateEmbeddingVectorAsync(query);\n",[597,4155,4156],{"class":599,"line":616},[597,4157,1485],{"emptyLinePlaceholder":17},[597,4159,4160],{"class":599,"line":622},[597,4161,4162],{},"var searchOptions = new VectorSearchOptions\n",[597,4164,4165],{"class":599,"line":628},[597,4166,608],{},[597,4168,4169],{"class":599,"line":634},[597,4170,4171],{},"    Top = 1,\n",[597,4173,4174],{"class":599,"line":3147},[597,4175,4176],{},"    VectorPropertyName = \"Vector\"\n",[597,4178,4179],{"class":599,"line":3153},[597,4180,3588],{},[597,4182,4183],{"class":599,"line":3159},[597,4184,1485],{"emptyLinePlaceholder":17},[597,4186,4187],{"class":599,"line":3165},[597,4188,4189],{},"// search the knowledge store based on the user's prompt\n",[597,4191,4192],{"class":599,"line":3171},[597,4193,4194],{},"var searchResults = await movies.VectorizedSearchAsync(queryEmbedding, searchOptions);\n",[597,4196,4197],{"class":599,"line":3177},[597,4198,1485],{"emptyLinePlaceholder":17},[597,4200,4201],{"class":599,"line":3183},[597,4202,4203],{},"// let's see the results just so we know what they look like\n",[597,4205,4206],{"class":599,"line":3189},[597,4207,4208],{},"await foreach (var result in searchResults.Results)\n",[597,4210,4211],{"class":599,"line":3194},[597,4212,608],{},[597,4214,4215],{"class":599,"line":3200},[597,4216,4217],{},"    Console.WriteLine($\"Title: {result.Record.Title}\");\n",[597,4219,4220],{"class":599,"line":3206},[597,4221,4222],{},"    Console.WriteLine($\"Description: {result.Record.Description}\");\n",[597,4224,4225],{"class":599,"line":3211},[597,4226,4227],{},"    Console.WriteLine($\"Score: {result.Score}\");\n",[597,4229,4230],{"class":599,"line":3217},[597,4231,4232],{},"    Console.WriteLine();\n",[597,4234,4235],{"class":599,"line":3397},[597,4236,637],{},[28,4238,4240],{"id":4239},"generating-the-response","Generating the response",[33,4242,4243],{},"Now we're on to the generation portion of RAG. This is where we provide the language model the additional context that the retrieval portion just found so it can better formulate a response. This will be a lot like the chat completions we've seen before - except now we're providing the model with the user's prompt and the retrieved information.",[33,4245,4246,4247,4250,4251,529,4253,533,4255,4257,4258,4260],{},"If you remember from before we use ",[594,4248,4249],{},"ChatMessage"," objects when carrying on a conversation with the model which have roles of ",[391,4252,3273],{},[391,4254,3279],{},[391,4256,3285],{},". Most of the time we'll probably be setting the search results as a ",[391,4259,3279],{}," message.",[33,4262,4263],{},"So we could do something like the following while looping through the results of the vector search:",[588,4265,4267],{"className":590,"code":4266,"language":592,"meta":11,"style":11},"\n// assuming chatClient is instatiated as before to a language model\n// assuming the vector search is done as above\n// assuming List\u003CChatMessage> conversation object is already instantiated and has a system prompt\n\nconversation.Add(new ChatMessage(ChatRole.User, query)); // this is the user prompt\n\n// ... do the vector search\n\n// add the search results to the conversation\nawait foreach (var result in searchResults.Results)\n{\n    conversation.Add(new ChatMessage(ChatRole.User, $\"This movie is playing nearby: {result.Record.Title} and it's about {result.Record.Description}\"));\n}\n\n// send the conversation to the model\nvar response = await chatClient.GetResponseAsync(conversation);\n\n// add the assistant message to the conversation\nconversation.Add(new ChatMessage(ChatRole.Assistant, response.Message));\n\n//display the conversation\nConsole.WriteLine($\"Bot:> {response.Message.Text});\n",[594,4268,4269,4273,4278,4283,4288,4292,4297,4301,4306,4310,4315,4319,4323,4328,4332,4336,4341,4346,4350,4355,4360,4364,4369],{"__ignoreMap":11},[597,4270,4271],{"class":599,"line":600},[597,4272,1485],{"emptyLinePlaceholder":17},[597,4274,4275],{"class":599,"line":12},[597,4276,4277],{},"// assuming chatClient is instatiated as before to a language model\n",[597,4279,4280],{"class":599,"line":109},[597,4281,4282],{},"// assuming the vector search is done as above\n",[597,4284,4285],{"class":599,"line":616},[597,4286,4287],{},"// assuming List\u003CChatMessage> conversation object is already instantiated and has a system prompt\n",[597,4289,4290],{"class":599,"line":622},[597,4291,1485],{"emptyLinePlaceholder":17},[597,4293,4294],{"class":599,"line":628},[597,4295,4296],{},"conversation.Add(new ChatMessage(ChatRole.User, query)); // this is the user prompt\n",[597,4298,4299],{"class":599,"line":634},[597,4300,1485],{"emptyLinePlaceholder":17},[597,4302,4303],{"class":599,"line":3147},[597,4304,4305],{},"// ... do the vector search\n",[597,4307,4308],{"class":599,"line":3153},[597,4309,1485],{"emptyLinePlaceholder":17},[597,4311,4312],{"class":599,"line":3159},[597,4313,4314],{},"// add the search results to the conversation\n",[597,4316,4317],{"class":599,"line":3165},[597,4318,4208],{},[597,4320,4321],{"class":599,"line":3171},[597,4322,608],{},[597,4324,4325],{"class":599,"line":3177},[597,4326,4327],{},"    conversation.Add(new ChatMessage(ChatRole.User, $\"This movie is playing nearby: {result.Record.Title} and it's about {result.Record.Description}\"));\n",[597,4329,4330],{"class":599,"line":3183},[597,4331,637],{},[597,4333,4334],{"class":599,"line":3189},[597,4335,1485],{"emptyLinePlaceholder":17},[597,4337,4338],{"class":599,"line":3194},[597,4339,4340],{},"// send the conversation to the model\n",[597,4342,4343],{"class":599,"line":3200},[597,4344,4345],{},"var response = await chatClient.GetResponseAsync(conversation);\n",[597,4347,4348],{"class":599,"line":3206},[597,4349,1485],{"emptyLinePlaceholder":17},[597,4351,4352],{"class":599,"line":3211},[597,4353,4354],{},"// add the assistant message to the conversation\n",[597,4356,4357],{"class":599,"line":3217},[597,4358,4359],{},"conversation.Add(new ChatMessage(ChatRole.Assistant, response.Message));\n",[597,4361,4362],{"class":599,"line":3397},[597,4363,1485],{"emptyLinePlaceholder":17},[597,4365,4366],{"class":599,"line":3403},[597,4367,4368],{},"//display the conversation\n",[597,4370,4371],{"class":599,"line":3408},[597,4372,4373],{},"Console.WriteLine($\"Bot:> {response.Message.Text});\n",[505,4375,4376],{},[33,4377,1495,4378,3247,4380,405],{},[391,4379,1498],{},[356,4381,3252],{"href":3250,"rel":4382},[360],[135,4384,3676],{"id":1515},[150,4386,4387,4394],{},[153,4388,4389],{},[356,4390,4393],{"href":4391,"rel":4392},"https://github.com/microsoft/generative-ai-for-beginners/blob/main/15-rag-and-vector-databases/README.md",[360],"GenAI for Beginners: RAG and Vector Databases",[153,4395,4396],{},[356,4397,4400],{"href":4398,"rel":4399},"https://learn.microsoft.com/dotnet/ai/quickstarts/quickstart-ai-chat-with-data?tabs=azd&pivots=openai",[360],"Build a .NET Vector AI Search App",[28,4402,4404],{"id":4403},"community-resources","Community resources",[150,4406,4407,4414],{},[153,4408,4409],{},[356,4410,4413],{"href":4411,"rel":4412},"https://github.com/AsterixBG/my-first-ai-ragbot",[360],"AI Chatbot with Retrieval-Augmented Generation (RAG) for .NET",[153,4415,4416],{},[356,4417,4420],{"href":4418,"rel":4419},"https://www.youtube.com/watch?v=O7Ce3YljyIY",[360],"StructRAG, the groundbreaking framework for transforming raw data into structured knowledge to boost Retrieval-Augmented Generation (RAG) performance",[135,4422,4424],{"id":4423},"next-up","Next up",[33,4426,4427],{},"Now that you've seen what it takes to implement RAG, you can see how it can be a powerful tool in your AI applications. It can provide more accurate responses, up-to-date information, and domain-specific knowledge to your users.",[33,4429,783,4430,405],{},[356,4431,4432],{"href":3022},"Next up let's learn about adding Vision and Audio to your AI applications",[789,4434,791],{},{"title":11,"searchDepth":12,"depth":12,"links":4436},[4437,4438,4439,4444,4447],{"id":3783,"depth":12,"text":3784},{"id":532,"depth":12,"text":3807},{"id":3822,"depth":12,"text":3823,"children":4440},[4441,4442,4443],{"id":3861,"depth":109,"text":3862},{"id":4127,"depth":109,"text":4128},{"id":4239,"depth":109,"text":4240},{"id":1515,"depth":12,"text":3676,"children":4445},[4446],{"id":4403,"depth":109,"text":4404},{"id":4423,"depth":12,"text":4424},"In this lesson learn how to use Retrieval-Augmented Generation (RAG) in your AI applications. This technique can be used to augment the response of a language model with information retrieved from a data store - or chat with your data!",{},"/programming-language/platform/dotnet/generative-ai-for-beginners-dotnet/core-generative-ai-techniques/retrieval-augmented-generation",{"title":3017,"description":4448},"Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/3.Core-Generative-AI-Techniques/3.retrieval-augmented-generation","cYxiqobUTLazpGRQ6IRoOWC6nwFermTd4wEPmwELG4Q",{"id":4455,"title":4456,"body":4457,"description":4464,"extension":14,"meta":5032,"navigation":17,"path":5033,"seo":5034,"stem":5035,"__hash__":5036},"content/Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/3.Core-Generative-AI-Techniques/4.vision-audio.md","Vision and Audio AI apps",{"type":8,"value":4458,"toc":5022},[4459,4462,4465,4467,4471,4481,4485,4488,4492,4498,4501,4507,4519,4720,4729,4738,4742,4752,4756,4759,4762,4773,4781,4785,4792,4802,4978,4981,4990,4999,5001,5010,5012,5015,5020],[343,4460,4456],{"id":4461},"vision-and-audio-ai-apps",[33,4463,4464],{},"In this lesson learn how vision AI allows your apps to generate and interpret images. Audio AI provides your apps to generate audio and even transcribe it in real-time.",[351,4466],{},[135,4468,4470],{"id":4469},"vision","Vision",[33,4472,4473],{},[356,4474,4477],{"href":4475,"rel":4476},"https://aka.ms/genainnet/videos/lesson3-vision",[360],[145,4478],{"alt":4479,"src":4480},"Vision AI explainer","content/generative-ai/images/LIM_GAN_06_thumb_w480.png",[33,4482,4483],{},[368,4484,370],{},[33,4486,4487],{},"Vision-based AI approaches are used to generate and interpret images. This can be useful for a wide range of applications, such as image recognition, image generation, and image manipulation. Current models are multimodal, meaning they can accept a variety of inputs, such as text, images, and audio, and generate a variety of outputs. In this case, we are going to focus on image recognition.",[28,4489,4491],{"id":4490},"image-recognition-with-meai","Image recognition with MEAI",[33,4493,4494,4495],{},"Image recognition is more than having the AI model tell you what it thinks is present in an image. You can also ask questions about the image, for example: ",[368,4496,4497],{},"How many people are present and is it raining?",[33,4499,4500],{},"Ok - so we're going to put the model through its paces and ask it if it can tell us how many red shoes are in the first photo and then have it analyze a receipt that's in German so we know how much to tip.",[33,4502,4503],{},[145,4504],{"alt":4505,"src":4506},"A composite showing both images the example will use. The first is several runners but only showing their legs. The second is a German restaurant receipt","content/generative-ai/images/example-visual-image.png",[505,4508,4509],{},[33,4510,3096,4511,4513,4514,405],{},[391,4512,3099],{},": You can follow ",[356,4515,4518],{"href":4516,"rel":4517},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/blob/main/03-CoreGenerativeAITechniques/src/Vision-01MEAI-GitHubModels",[360],"along with sample code here",[1053,4520,4521,4578,4649],{},[153,4522,4523,4524,4526,4527],{},"We're using MEAI and GitHub Models, so instantiate the ",[594,4525,2043],{}," as we have been. Also start to create a chat history.",[588,4528,4530],{"className":590,"code":4529,"language":592,"meta":11,"style":11},"IChatClient chatClient = new ChatCompletionsClient(\n    endpoint: new Uri(\"https://models.inference.ai.azure.com\"),\n    new AzureKeyCredential(githubToken)) // make sure to grab githubToken from the secrets or environment\n.AsChatClient(\"gpt-4o-mini\");\n\nList\u003CChatMessage> messages =\n[\n    new ChatMessage(ChatRole.System, \"You are a useful assistant that describes images using a direct style.\"),\n    new ChatMessage(ChatRole.User, \"How many red shoes are in the photo?\") // we'll start with the running photo\n];\n",[594,4531,4532,4537,4541,4546,4551,4555,4560,4564,4569,4574],{"__ignoreMap":11},[597,4533,4534],{"class":599,"line":600},[597,4535,4536],{},"IChatClient chatClient = new ChatCompletionsClient(\n",[597,4538,4539],{"class":599,"line":12},[597,4540,3130],{},[597,4542,4543],{"class":599,"line":109},[597,4544,4545],{},"    new AzureKeyCredential(githubToken)) // make sure to grab githubToken from the secrets or environment\n",[597,4547,4548],{"class":599,"line":616},[597,4549,4550],{},".AsChatClient(\"gpt-4o-mini\");\n",[597,4552,4553],{"class":599,"line":622},[597,4554,1485],{"emptyLinePlaceholder":17},[597,4556,4557],{"class":599,"line":628},[597,4558,4559],{},"List\u003CChatMessage> messages =\n",[597,4561,4562],{"class":599,"line":634},[597,4563,3324],{},[597,4565,4566],{"class":599,"line":3147},[597,4567,4568],{},"    new ChatMessage(ChatRole.System, \"You are a useful assistant that describes images using a direct style.\"),\n",[597,4570,4571],{"class":599,"line":3153},[597,4572,4573],{},"    new ChatMessage(ChatRole.User, \"How many red shoes are in the photo?\") // we'll start with the running photo\n",[597,4575,4576],{"class":599,"line":3159},[597,4577,3334],{},[153,4579,4580,4581,4584,4585],{},"The next part is to load the image into an ",[594,4582,4583],{},"AIContent"," object and set that as part of our conversation and then send that off to the model to describe for us.",[588,4586,4588],{"className":590,"code":4587,"language":592,"meta":11,"style":11},"var imagePath = \"FULL PATH TO THE IMAGE ON DISK\";\n\nAIContent imageContent = new DataContent(File.ReadAllBytes(imagePath), \"image/jpeg\"); // the important part here is that we're loading it in bytes. The image could come from anywhere.\n\nvar imageMessage = new ChatMessage(ChatRole.User, [imageContent]);\n\nmessages.Add(imageMessage);\n\nvar response = await chatClient.GetResponseAsync(messages);\n\nmessages.Add(response.Message);\n\nConsole.WriteLine(response.Message.Text);\n",[594,4589,4590,4595,4599,4604,4608,4613,4617,4622,4626,4631,4635,4640,4644],{"__ignoreMap":11},[597,4591,4592],{"class":599,"line":600},[597,4593,4594],{},"var imagePath = \"FULL PATH TO THE IMAGE ON DISK\";\n",[597,4596,4597],{"class":599,"line":12},[597,4598,1485],{"emptyLinePlaceholder":17},[597,4600,4601],{"class":599,"line":109},[597,4602,4603],{},"AIContent imageContent = new DataContent(File.ReadAllBytes(imagePath), \"image/jpeg\"); // the important part here is that we're loading it in bytes. The image could come from anywhere.\n",[597,4605,4606],{"class":599,"line":616},[597,4607,1485],{"emptyLinePlaceholder":17},[597,4609,4610],{"class":599,"line":622},[597,4611,4612],{},"var imageMessage = new ChatMessage(ChatRole.User, [imageContent]);\n",[597,4614,4615],{"class":599,"line":628},[597,4616,1485],{"emptyLinePlaceholder":17},[597,4618,4619],{"class":599,"line":634},[597,4620,4621],{},"messages.Add(imageMessage);\n",[597,4623,4624],{"class":599,"line":3147},[597,4625,1485],{"emptyLinePlaceholder":17},[597,4627,4628],{"class":599,"line":3153},[597,4629,4630],{},"var response = await chatClient.GetResponseAsync(messages);\n",[597,4632,4633],{"class":599,"line":3159},[597,4634,1485],{"emptyLinePlaceholder":17},[597,4636,4637],{"class":599,"line":3165},[597,4638,4639],{},"messages.Add(response.Message);\n",[597,4641,4642],{"class":599,"line":3171},[597,4643,1485],{"emptyLinePlaceholder":17},[597,4645,4646],{"class":599,"line":3177},[597,4647,4648],{},"Console.WriteLine(response.Message.Text);\n",[153,4650,4651,4652],{},"Then to get the model to work on the restaurant receipt - which is in German - to find out how much of a tip we should leave:",[588,4653,4655],{"className":590,"code":4654,"language":592,"meta":11,"style":11},"// this will go after the previous code block\nmessages.Add(new ChatMessage(ChatRole.User, \"This is a receipt from a lunch. I had the sausage. How much of a tip should I leave?\"));\n\nvar receiptPath = \"FULL PATH TO THE RECEIPT IMAGE ON DISK\";\n\nAIContent receiptContent = new DataContent(File.ReadAllBytes(receiptPath), \"image/jpeg\");\nvar receiptMessage = new ChatMessage(ChatRole.User, [receiptContent]);\n\nmessages.Add(receiptMessage);\n\nresponse = await chatClient.GetResponseAsync(messages);\nmessages.Add(response.Message);\n\nConsole.WriteLine(response.Message.Text);\n",[594,4656,4657,4662,4667,4671,4676,4680,4685,4690,4694,4699,4703,4708,4712,4716],{"__ignoreMap":11},[597,4658,4659],{"class":599,"line":600},[597,4660,4661],{},"// this will go after the previous code block\n",[597,4663,4664],{"class":599,"line":12},[597,4665,4666],{},"messages.Add(new ChatMessage(ChatRole.User, \"This is a receipt from a lunch. I had the sausage. How much of a tip should I leave?\"));\n",[597,4668,4669],{"class":599,"line":109},[597,4670,1485],{"emptyLinePlaceholder":17},[597,4672,4673],{"class":599,"line":616},[597,4674,4675],{},"var receiptPath = \"FULL PATH TO THE RECEIPT IMAGE ON DISK\";\n",[597,4677,4678],{"class":599,"line":622},[597,4679,1485],{"emptyLinePlaceholder":17},[597,4681,4682],{"class":599,"line":628},[597,4683,4684],{},"AIContent receiptContent = new DataContent(File.ReadAllBytes(receiptPath), \"image/jpeg\");\n",[597,4686,4687],{"class":599,"line":634},[597,4688,4689],{},"var receiptMessage = new ChatMessage(ChatRole.User, [receiptContent]);\n",[597,4691,4692],{"class":599,"line":3147},[597,4693,1485],{"emptyLinePlaceholder":17},[597,4695,4696],{"class":599,"line":3153},[597,4697,4698],{},"messages.Add(receiptMessage);\n",[597,4700,4701],{"class":599,"line":3159},[597,4702,1485],{"emptyLinePlaceholder":17},[597,4704,4705],{"class":599,"line":3165},[597,4706,4707],{},"response = await chatClient.GetResponseAsync(messages);\n",[597,4709,4710],{"class":599,"line":3171},[597,4711,4639],{},[597,4713,4714],{"class":599,"line":3177},[597,4715,1485],{"emptyLinePlaceholder":17},[597,4717,4718],{"class":599,"line":3183},[597,4719,4648],{},[33,4721,4722,4723,4725,4726,4728],{},"Here's a point I want to drive home. We're conversing with a language model, or more appropriately a multi-modal model that can handle text as well as image (and audio) interactions. And we're carrying on the conversation with the model as normal. Sure it's a different type of object we're sending to the model, ",[594,4724,4583],{}," instead of a ",[594,4727,4103],{},", but the workflow is the same.",[505,4730,4731],{},[33,4732,1495,4733,3247,4735,405],{},[391,4734,1498],{},[356,4736,3252],{"href":3250,"rel":4737},[360],[135,4739,4741],{"id":4740},"audio-ai","Audio AI",[33,4743,4744],{},[356,4745,4748],{"href":4746,"rel":4747},"https://aka.ms/genainnet/videos/lesson3-realtimeaudio",[360],[145,4749],{"alt":4750,"src":4751},"Audio AI explainer video","content/generative-ai/images/LIM_GAN_05_thumb_w480.png",[33,4753,4754],{},[368,4755,370],{},[33,4757,4758],{},"Real-time audio techniques allow your apps to generate audio and transcribe it in real-time. This can be useful for a wide range of applications, such as voice recognition, speech synthesis, and audio manipulation.",[33,4760,4761],{},"But we're going to have to transition away from MEAI and from the model we were using to Azure AI Speech Services.",[33,4763,4764,4765,4769,4770,405],{},"To setup an Azure AI Speech Service model, ",[356,4766,4768],{"href":4767},"../setup-dev-environment/getting-started-azure-openai","follow these directions"," but instead of choosing an OpenAI model, choose ",[391,4771,4772],{},"Azure-AI-Speech",[505,4774,4775],{},[33,4776,4777,4780],{},[391,4778,4779],{},"🗒️Note:>"," Audio is coming to MEAI, but as of this writing isn't available yet. When it is available we'll update this course.",[28,4782,4784],{"id":4783},"implementing-speech-to-text-with-cognitive-services","Implementing speech-to-text with Cognitive Services",[33,4786,4787,4788,4791],{},"You'll need the ",[391,4789,4790],{},"Microsoft.CognitiveServices.Speech"," NuGet package for this example.",[505,4793,4794],{},[33,4795,3096,4796,4513,4798,405],{},[391,4797,3099],{},[356,4799,4518],{"href":4800,"rel":4801},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/blob/main/03-CoreGenerativeAITechniques/src/Audio-01-SpeechMic",[360],[1053,4803,4804,4845,4867],{},[153,4805,4806,4807,4810,4811],{},"The first thing we'll do (after grabbing the key and region of the model's deployment) is instantiate a ",[594,4808,4809],{},"SpeechTranslationConfig"," object. This will enable us to direct the model that we'll be taking in spoken English and translating to written Spanish.",[588,4812,4814],{"className":590,"code":4813,"language":592,"meta":11,"style":11},"var speechKey = \"\u003CFROM YOUR MODEL DEPLOYMENT>\";\nvar speechRegion = \"\u003CFROM YOUR MODEL DEPLOYMENT>\";\n\nvar speechTranslationConfig = SpeechTranslationConfig.FromSubscription(speechKey, speechRegion);\nspeechTranslationConfig.SpeechRecognitionLanguage = \"en-US\";\nspeechTranslationConfig.AddTargetLanguage(\"es-ES\");\n",[594,4815,4816,4821,4826,4830,4835,4840],{"__ignoreMap":11},[597,4817,4818],{"class":599,"line":600},[597,4819,4820],{},"var speechKey = \"\u003CFROM YOUR MODEL DEPLOYMENT>\";\n",[597,4822,4823],{"class":599,"line":12},[597,4824,4825],{},"var speechRegion = \"\u003CFROM YOUR MODEL DEPLOYMENT>\";\n",[597,4827,4828],{"class":599,"line":109},[597,4829,1485],{"emptyLinePlaceholder":17},[597,4831,4832],{"class":599,"line":616},[597,4833,4834],{},"var speechTranslationConfig = SpeechTranslationConfig.FromSubscription(speechKey, speechRegion);\n",[597,4836,4837],{"class":599,"line":622},[597,4838,4839],{},"speechTranslationConfig.SpeechRecognitionLanguage = \"en-US\";\n",[597,4841,4842],{"class":599,"line":628},[597,4843,4844],{},"speechTranslationConfig.AddTargetLanguage(\"es-ES\");\n",[153,4846,4847,4848,4851,4852],{},"Next up we need to get access to the microphone and then new up a ",[594,4849,4850],{},"TranslationRecognizer"," object which will do the communication with the model.",[588,4853,4855],{"className":590,"code":4854,"language":592,"meta":11,"style":11},"using var audioConfig = AudioConfig.FromDefaultMicrophoneInput();\nusing var translationRecognizer = new TranslationRecognizer(speechTranslationConfig, audioConfig);\n",[594,4856,4857,4862],{"__ignoreMap":11},[597,4858,4859],{"class":599,"line":600},[597,4860,4861],{},"using var audioConfig = AudioConfig.FromDefaultMicrophoneInput();\n",[597,4863,4864],{"class":599,"line":12},[597,4865,4866],{},"using var translationRecognizer = new TranslationRecognizer(speechTranslationConfig, audioConfig);\n",[153,4868,4869,4870],{},"Finally, we'll call the model and setup a function to handle its return.",[588,4871,4873],{"className":590,"code":4872,"language":592,"meta":11,"style":11},"var translationRecognitionResult = await translationRecognizer.RecognizeOnceAsync();\nOutputSpeechRecognitionResult(translationRecognitionResult);\n\nvoid OutputSpeechRecognitionResult(TranslationRecognitionResult translationRecognitionResult)\n{\n    switch (translationRecognitionResult.Reason)\n    {\n        case ResultReason.TranslatedSpeech:\n            Console.WriteLine($\"RECOGNIZED: Text={translationRecognitionResult.Text}\");\n            foreach (var element in translationRecognitionResult.Translations)\n            {\n                Console.WriteLine($\"TRANSLATED into '{element.Key}': {element.Value}\");\n            }\n            break;\n        case ResultReason.NoMatch:\n            // handle when speech could not be recognized\n            break;\n        case ResultReason.Canceled:\n            // handle an error condition\n            break;\n    }\n}\n",[594,4874,4875,4880,4885,4889,4894,4898,4903,4907,4912,4917,4922,4927,4932,4937,4942,4947,4952,4956,4961,4966,4970,4974],{"__ignoreMap":11},[597,4876,4877],{"class":599,"line":600},[597,4878,4879],{},"var translationRecognitionResult = await translationRecognizer.RecognizeOnceAsync();\n",[597,4881,4882],{"class":599,"line":12},[597,4883,4884],{},"OutputSpeechRecognitionResult(translationRecognitionResult);\n",[597,4886,4887],{"class":599,"line":109},[597,4888,1485],{"emptyLinePlaceholder":17},[597,4890,4891],{"class":599,"line":616},[597,4892,4893],{},"void OutputSpeechRecognitionResult(TranslationRecognitionResult translationRecognitionResult)\n",[597,4895,4896],{"class":599,"line":622},[597,4897,608],{},[597,4899,4900],{"class":599,"line":628},[597,4901,4902],{},"    switch (translationRecognitionResult.Reason)\n",[597,4904,4905],{"class":599,"line":634},[597,4906,3380],{},[597,4908,4909],{"class":599,"line":3147},[597,4910,4911],{},"        case ResultReason.TranslatedSpeech:\n",[597,4913,4914],{"class":599,"line":3153},[597,4915,4916],{},"            Console.WriteLine($\"RECOGNIZED: Text={translationRecognitionResult.Text}\");\n",[597,4918,4919],{"class":599,"line":3159},[597,4920,4921],{},"            foreach (var element in translationRecognitionResult.Translations)\n",[597,4923,4924],{"class":599,"line":3165},[597,4925,4926],{},"            {\n",[597,4928,4929],{"class":599,"line":3171},[597,4930,4931],{},"                Console.WriteLine($\"TRANSLATED into '{element.Key}': {element.Value}\");\n",[597,4933,4934],{"class":599,"line":3177},[597,4935,4936],{},"            }\n",[597,4938,4939],{"class":599,"line":3183},[597,4940,4941],{},"            break;\n",[597,4943,4944],{"class":599,"line":3189},[597,4945,4946],{},"        case ResultReason.NoMatch:\n",[597,4948,4949],{"class":599,"line":3194},[597,4950,4951],{},"            // handle when speech could not be recognized\n",[597,4953,4954],{"class":599,"line":3200},[597,4955,4941],{},[597,4957,4958],{"class":599,"line":3206},[597,4959,4960],{},"        case ResultReason.Canceled:\n",[597,4962,4963],{"class":599,"line":3211},[597,4964,4965],{},"            // handle an error condition\n",[597,4967,4968],{"class":599,"line":3217},[597,4969,4941],{},[597,4971,4972],{"class":599,"line":3397},[597,4973,3390],{},[597,4975,4976],{"class":599,"line":3403},[597,4977,637],{},[33,4979,4980],{},"Using AI to process audio is a bit different than what we have been doing because we are using Azure AI Speech services to do so, but the results of translating spoken audio to text are pretty powerful.",[505,4982,4983],{},[33,4984,1495,4985,3247,4987,405],{},[391,4986,1498],{},[356,4988,3252],{"href":3250,"rel":4989},[360],[33,4991,4992,4993,4998],{},"We have another example that ",[356,4994,4997],{"href":4995,"rel":4996},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/blob/main/03-CoreGenerativeAITechniques/src/Audio-02-RealTimeAudio",[360],"demonstrates how to perform real-time audio conversation with Azure Open AI"," - check it out!",[135,5000,3676],{"id":1515},[150,5002,5003],{},[153,5004,5005],{},[356,5006,5009],{"href":5007,"rel":5008},"https://learn.microsoft.com/dotnet/ai/quickstarts/quickstart-openai-generate-images?tabs=azd&pivots=openai",[360],"Generate images with AI and .NET",[135,5011,3703],{"id":3702},[33,5013,5014],{},"You've learned how to add vision and audio capabilities to your .NET applications, in the next lesson find out how to create AI that has some ability to act autonomously.",[33,5016,783,5017,405],{},[356,5018,5019],{"href":3028},"Check out AI Agents",[789,5021,791],{},{"title":11,"searchDepth":12,"depth":12,"links":5023},[5024,5027,5030,5031],{"id":4469,"depth":12,"text":4470,"children":5025},[5026],{"id":4490,"depth":109,"text":4491},{"id":4740,"depth":12,"text":4741,"children":5028},[5029],{"id":4783,"depth":109,"text":4784},{"id":1515,"depth":12,"text":3676},{"id":3702,"depth":12,"text":3703},{},"/programming-language/platform/dotnet/generative-ai-for-beginners-dotnet/core-generative-ai-techniques/vision-audio",{"title":4456,"description":4464},"Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/3.Core-Generative-AI-Techniques/4.vision-audio","hyaqY82sYsE4mAieqsNcf1eiGU3pR5HNZd-e3jfrn2c",{"id":5038,"title":5039,"body":5040,"description":5047,"extension":14,"meta":5704,"navigation":17,"path":5705,"seo":5706,"stem":5707,"__hash__":5708},"content/Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/3.Core-Generative-AI-Techniques/5.agents.md","AI Agents",{"type":8,"value":5041,"toc":5694},[5042,5045,5048,5050,5060,5064,5079,5082,5095,5099,5102,5138,5142,5149,5160,5164,5184,5191,5194,5223,5226,5230,5233,5604,5613,5616,5618,5621,5635,5646,5649,5651,5680,5682,5685,5692],[343,5043,5039],{"id":5044},"ai-agents",[33,5046,5047],{},"In this lesson, you will learn to create an AI entity that... makes decisions and executes actions without continuous human interaction? That's right, AI agents are able to perform specific tasks independently.",[351,5049],{},[33,5051,5052],{},[356,5053,5056],{"href":5054,"rel":5055},"https://aka.ms/genainnet/videos/lesson3-agents",[360],[145,5057],{"alt":5058,"src":5059},"Agents explainer video","content/generative-ai/images/LIM_GAN_08_thumb_w480.png",[33,5061,5062],{},[368,5063,370],{},[33,5065,5066,5067,5070,5071,5074,5075,5078],{},"AI agents allow LLMs to evolve from assistants into entities capable of taking actions on behalf of users. Agents are even able to interact with other agents to perform tasks. Some of the key attributes of an agent include a level of ",[391,5068,5069],{},"autonomy"," allowing the agent to initiate actions based on their programming which leads to the ability for ",[391,5072,5073],{},"decision-making"," based on pre-defined objectives. They are also ",[391,5076,5077],{},"adaptable"," in that they learn and adjust to improve performance over time.",[33,5080,5081],{},"One key thing to keep in mind when building agents is that they are focused on doing only thing. You want to narrow down their purpose as much as possible.",[505,5083,5084],{},[33,5085,5086,5087,5089,5090,405],{},"🧑‍🏫",[391,5088,512],{},": Learn more about the fundamentals of AI Agents ",[356,5091,5094],{"href":5092,"rel":5093},"https://github.com/microsoft/generative-ai-for-beginners/tree/main/17-ai-agents",[360],"Generative AI for Beginners: AI Agents",[135,5096,5098],{"id":5097},"creating-an-ai-agent","Creating an AI Agent",[33,5100,5101],{},"We'll be working with a couple of new concepts in order to build an AI agent in .NET. We'll be using a new SDK and will have to do some additional setup in Azure AI Foundry to get things started.",[505,5103,5104,5115],{},[33,5105,3096,5106,5108,5109,5114],{},[391,5107,3099],{},": We'll be working from the ",[356,5110,5113],{"href":5111,"rel":5112},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/blob/main/03-CoreGenerativeAITechniques/src/AgentLabs-01-Simple",[360],"AgentLabs-01-Simple sample"," for this lesson.",[33,5116,5117,5118,5121,5122,954,5127,954,5132,5137],{},"We did include some more advanced samples in the ",[594,5119,5120],{},"/src/"," folder as well. You can view the README's of ",[356,5123,5126],{"href":5124,"rel":5125},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/blob/main/03-CoreGenerativeAITechniques/src/AgentLabs-02-Functions",[360],"AgentLabs-02-Functions",[356,5128,5131],{"href":5129,"rel":5130},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/blob/main/03-CoreGenerativeAITechniques/src/AgentLabs-03-OpenAPIs",[360],"AgentLabs-03-OpenAPIs",[356,5133,5136],{"href":5134,"rel":5135},"https://github.com/microsoft/Generative-AI-for-beginners-dotnet/blob/main/03-CoreGenerativeAITechniques/src/AgentLabs-03-PythonParksInformationServer",[360],"AgentLabs-03-PythonParksInformationServer"," for more info on them.",[28,5139,5141],{"id":5140},"azure-ai-agent-service","Azure AI Agent Service",[33,5143,5144,5145,405],{},"We're going to introduce a new Azure Service that will help us build agents, the appropriately named ",[356,5146,5141],{"href":5147,"rel":5148},"https://learn.microsoft.com/azure/ai-services/agents/overview",[360],[33,5150,5151,5152,405],{},"To run the code samples included in this lesson, you'll need to perform some additional setup in Azure AI Foundry. You can follow ",[356,5153,5156,5157],{"href":5154,"rel":5155},"https://learn.microsoft.com/azure/ai-services/agents/quickstart?pivots=programming-language-csharp",[360],"these instructions to setup a ",[391,5158,5159],{},"Basic Agent",[28,5161,5163],{"id":5162},"azure-ai-projects-library","Azure AI Projects library",[33,5165,5166,5167,5170,5171,5174,5175,5178,5179,5183],{},"Agents are composed of 3 parts. The ",[391,5168,5169],{},"LLM"," or the model. ",[391,5172,5173],{},"State"," or the context (much like a conversation) that helps guide decisions based off of past results. And ",[391,5176,5177],{},"Tools"," which are like ",[356,5180,5182],{"href":5181},"./lm-completions-functions#function-calling","functions we learned about before"," that allow a bridge between the model and external systems.",[33,5185,5186,5187,5190],{},"So, in theory, you could build AI Agents with what you've learned already. But the ",[391,5188,5189],{},"Azure AI Projects for .NET"," library makes developing agents easier by providing an API that streamlines a lot of the typical tasks for you.",[33,5192,5193],{},"There are a couple of concepts (which map to classes) to understand when working with the Azure AI Projects library.",[150,5195,5196,5202,5208,5217],{},[153,5197,5198,5201],{},[594,5199,5200],{},"AgentClient",": The overall client that creates and hosts the agents, manages threads in which they run, and handles the connection to the cloud.",[153,5203,5204,5207],{},[594,5205,5206],{},"Agent",": The agent that holds instructions on what it's to do as well as definitions for tools it has access to.",[153,5209,5210,5213,5214,5216],{},[594,5211,5212],{},"ThreadMessage",": These are messages - almost like prompts we learned about before - that get passed to the agent. Agents also create ",[594,5215,5212],{}," objects to communicate.",[153,5218,5219,5222],{},[594,5220,5221],{},"ThreadRun",": A thread on which messages are passed to the agent on. The thread is started and can be provided additional instructions and then is polled as to its status.",[33,5224,5225],{},"Let's see a simple example of this in action!",[28,5227,5229],{"id":5228},"build-a-math-agent","Build a math agent",[33,5231,5232],{},"We'll be building a single purpose agent that acts as a tutor to math students. Its sole purpose in life is to solve and then explain math problems the user asks.",[1053,5234,5235,5266,5352,5416,5474,5519],{},[153,5236,5237,5238,5241,5242,5257,5259,5260,5262],{},"To start with, we need to create an ",[594,5239,5240],{},"AgentsClient"," object that is responsible for managing the connection to Azure, the agent itself, the threads, the messages, and so on.",[588,5243,5245],{"className":590,"code":5244,"language":592,"meta":11,"style":11},"string projectConnectionString = \"\u003C YOU GET THIS FROM THE PROJECT IN AI FOUNDRY >\";\nAgentsClient client = new(projectConnectionString, new DefaultAzureCredential());\n",[594,5246,5247,5252],{"__ignoreMap":11},[597,5248,5249],{"class":599,"line":600},[597,5250,5251],{},"string projectConnectionString = \"\u003C YOU GET THIS FROM THE PROJECT IN AI FOUNDRY >\";\n",[597,5253,5254],{"class":599,"line":12},[597,5255,5256],{},"AgentsClient client = new(projectConnectionString, new DefaultAzureCredential());\n",[704,5258],{},"You can find the project connection string in AI Foundry by opening up the Hub you created, then the project. It will be on the right-hand side.",[704,5261],{},[145,5263],{"alt":5264,"src":5265},"Screenshot of the project homepage in AI Foundry with the project connection string highlighted in red","content/generative-ai/images/project-connection-string.png",[153,5267,5268,5269,5299,5301,5302,5305,5306,5309,5310,5313,5314,5327,5329,5330,5333,5334,5337,5338,5341,5342,5345,5346,5348,5349,5351],{},"Next we want to create the tutor agent. Remember, it should be focused only on one thing.",[588,5270,5272],{"className":590,"code":5271,"language":592,"meta":11,"style":11},"Agent tutorAgent = (await client.CreateAgentAsync(\nmodel: \"gpt-4o\",\nname: \"Math Tutor\",\ninstructions: \"You are a personal math tutor. Write and run code to answer math questions.\",\ntools: [new CodeInterpreterToolDefinition()])).Value;\n",[594,5273,5274,5279,5284,5289,5294],{"__ignoreMap":11},[597,5275,5276],{"class":599,"line":600},[597,5277,5278],{},"Agent tutorAgent = (await client.CreateAgentAsync(\n",[597,5280,5281],{"class":599,"line":12},[597,5282,5283],{},"model: \"gpt-4o\",\n",[597,5285,5286],{"class":599,"line":109},[597,5287,5288],{},"name: \"Math Tutor\",\n",[597,5290,5291],{"class":599,"line":616},[597,5292,5293],{},"instructions: \"You are a personal math tutor. Write and run code to answer math questions.\",\n",[597,5295,5296],{"class":599,"line":622},[597,5297,5298],{},"tools: [new CodeInterpreterToolDefinition()])).Value;\n",[704,5300],{},"A couple of things to note here. The first is the ",[594,5303,5304],{},"tools"," parameter. We're creating a ",[594,5307,5308],{},"CodeInterpreterToolDefinition"," object (that is apart of the ",[391,5311,5312],{},"Azure.AI.Projects"," SDK) that will allow the agent to create and execute code.",[505,5315,5316],{},[33,5317,3225,5318,5321,5322,5326],{},[391,5319,5320],{},"Note",": You can create your own tools too. See the ",[356,5323,5325],{"href":5124,"rel":5324},[360],"Functions"," to learn more.",[704,5328],{},"Second note the ",[594,5331,5332],{},"instructions"," that are being sent along. It's a prompt and we're limiting it to answering math questions. Then last creating the agent is an async operation. That's because it's creating an object within Azure AI Foundry Agents service. So we both ",[594,5335,5336],{},"await"," the ",[594,5339,5340],{},"CreateAgentAsync"," function and then grab the ",[594,5343,5344],{},"Value"," of its return to get at the actual ",[594,5347,5206],{}," object. You'll see this pattern occur over and over again when creating objects with the ",[391,5350,5312],{}," SDK.",[153,5353,5354,5355,5358,5359,5361,5362,5406,5408,5409,5411,5412,5415],{},"An ",[594,5356,5357],{},"AgentThread"," is an object that handles the communication between individual agents and the user and so on. We'll need to create that so we can add a ",[594,5360,5212],{}," on to it. And in this case it's the user's first question.",[588,5363,5365],{"className":590,"code":5364,"language":592,"meta":11,"style":11},"AgentThread thread = (await client.CreateThreadAsync()).Value;\n\n// Creating the first user message to AN agent - notice how we're putting it on a thread\nThreadMessage userMessage = (await client.CreateMessageAsync(\n    thread.Id,\n    MessageRole.User,\n    \"Hello, I need to solve the equation `3x + 11 = 14`. Can you help me?\")\n).Value;\n",[594,5366,5367,5372,5376,5381,5386,5391,5396,5401],{"__ignoreMap":11},[597,5368,5369],{"class":599,"line":600},[597,5370,5371],{},"AgentThread thread = (await client.CreateThreadAsync()).Value;\n",[597,5373,5374],{"class":599,"line":12},[597,5375,1485],{"emptyLinePlaceholder":17},[597,5377,5378],{"class":599,"line":109},[597,5379,5380],{},"// Creating the first user message to AN agent - notice how we're putting it on a thread\n",[597,5382,5383],{"class":599,"line":616},[597,5384,5385],{},"ThreadMessage userMessage = (await client.CreateMessageAsync(\n",[597,5387,5388],{"class":599,"line":622},[597,5389,5390],{},"    thread.Id,\n",[597,5392,5393],{"class":599,"line":628},[597,5394,5395],{},"    MessageRole.User,\n",[597,5397,5398],{"class":599,"line":634},[597,5399,5400],{},"    \"Hello, I need to solve the equation `3x + 11 = 14`. Can you help me?\")\n",[597,5402,5403],{"class":599,"line":3147},[597,5404,5405],{},").Value;\n",[704,5407],{},"Note the ",[594,5410,5212],{}," has a type of ",[594,5413,5414],{},"MessageRole.User",". And notice we're not sending the message to a specific agent, rather we're just putting it onto a thread.",[153,5417,5418,5419],{},"Next up, we're going to get the agent to provide an initial response and put that on the thread and then kick the thread off. When we start the thread we're going to provide the initial agent's id to run and any additional instructions.",[588,5420,5422],{"className":590,"code":5421,"language":592,"meta":11,"style":11},"ThreadMessage agentMessage =  await client.CreateMessageAsync(\n    thread.Id,\n    MessageRole.Agent,\n    \"Please address the user as their name. The user has a basic account, so just share the answer to the question.\")\n).Value;\n\nThreadRun run = (await client.CreateRunAsync(\n    thread.Id,\n    assistantId: agentMathTutor.Id,\n    additionalInstructions: \"You are working in FREE TIER EXPERIENCE mode`, every user has premium account for a short period of time. Explain detailed the steps to answer the user questions\")\n).Value;\n",[594,5423,5424,5429,5433,5438,5443,5447,5451,5456,5460,5465,5470],{"__ignoreMap":11},[597,5425,5426],{"class":599,"line":600},[597,5427,5428],{},"ThreadMessage agentMessage =  await client.CreateMessageAsync(\n",[597,5430,5431],{"class":599,"line":12},[597,5432,5390],{},[597,5434,5435],{"class":599,"line":109},[597,5436,5437],{},"    MessageRole.Agent,\n",[597,5439,5440],{"class":599,"line":616},[597,5441,5442],{},"    \"Please address the user as their name. The user has a basic account, so just share the answer to the question.\")\n",[597,5444,5445],{"class":599,"line":622},[597,5446,5405],{},[597,5448,5449],{"class":599,"line":628},[597,5450,1485],{"emptyLinePlaceholder":17},[597,5452,5453],{"class":599,"line":634},[597,5454,5455],{},"ThreadRun run = (await client.CreateRunAsync(\n",[597,5457,5458],{"class":599,"line":3147},[597,5459,5390],{},[597,5461,5462],{"class":599,"line":3153},[597,5463,5464],{},"    assistantId: agentMathTutor.Id,\n",[597,5466,5467],{"class":599,"line":3159},[597,5468,5469],{},"    additionalInstructions: \"You are working in FREE TIER EXPERIENCE mode`, every user has premium account for a short period of time. Explain detailed the steps to answer the user questions\")\n",[597,5471,5472],{"class":599,"line":3165},[597,5473,5405],{},[153,5475,5476,5477],{},"All that's left then is to check the status of the run",[588,5478,5480],{"className":590,"code":5479,"language":592,"meta":11,"style":11},"do\n{\n    await Task.Delay(Timespan.FromMilliseconds(100));\n    run = (await client.GetRunAsync(thread.Id, run.Id)).Value;\n\n    Console.WriteLine($\"Run Status: {run.Status}\");\n}\nwhile (run.Status == RunStatus.Queued || run.Status == RunStatus.InProgress);\n",[594,5481,5482,5487,5491,5496,5501,5505,5510,5514],{"__ignoreMap":11},[597,5483,5484],{"class":599,"line":600},[597,5485,5486],{},"do\n",[597,5488,5489],{"class":599,"line":12},[597,5490,608],{},[597,5492,5493],{"class":599,"line":109},[597,5494,5495],{},"    await Task.Delay(Timespan.FromMilliseconds(100));\n",[597,5497,5498],{"class":599,"line":616},[597,5499,5500],{},"    run = (await client.GetRunAsync(thread.Id, run.Id)).Value;\n",[597,5502,5503],{"class":599,"line":622},[597,5504,1485],{"emptyLinePlaceholder":17},[597,5506,5507],{"class":599,"line":628},[597,5508,5509],{},"    Console.WriteLine($\"Run Status: {run.Status}\");\n",[597,5511,5512],{"class":599,"line":634},[597,5513,637],{},[597,5515,5516],{"class":599,"line":3147},[597,5517,5518],{},"while (run.Status == RunStatus.Queued || run.Status == RunStatus.InProgress);\n",[153,5520,5521,5522],{},"And then display the messages from the results",[588,5523,5525],{"className":590,"code":5524,"language":592,"meta":11,"style":11},"Response\u003CPageableList\u003CThreadMessage>> afterRunMessagesResponse = await client.GetMessagesAsync(thread.Id);\nIReadOnlyList\u003CThreadMessage> messages = afterRunMessagesResponse.Value.Data;\n\n// sort by creation date\nmessages = messages.OrderBy(m => m.CreatedAt).ToList();\n\nforeach (ThreadMessage msg in messages)\n{\n    Console.Write($\"{msg.CreatedAt:yyyy-MM-dd HH:mm:ss} - {msg.Role,10}: \");\n\n    foreach (MessageContent contentItem in msg.ContentItems)\n    {\n        if (contentItem is MessageTextContent textItem)\n            Console.Write(textItem.Text);\n    }\n    Console.WriteLine();\n}\n",[594,5526,5527,5532,5537,5541,5546,5551,5555,5560,5564,5569,5573,5578,5582,5587,5592,5596,5600],{"__ignoreMap":11},[597,5528,5529],{"class":599,"line":600},[597,5530,5531],{},"Response\u003CPageableList\u003CThreadMessage>> afterRunMessagesResponse = await client.GetMessagesAsync(thread.Id);\n",[597,5533,5534],{"class":599,"line":12},[597,5535,5536],{},"IReadOnlyList\u003CThreadMessage> messages = afterRunMessagesResponse.Value.Data;\n",[597,5538,5539],{"class":599,"line":109},[597,5540,1485],{"emptyLinePlaceholder":17},[597,5542,5543],{"class":599,"line":616},[597,5544,5545],{},"// sort by creation date\n",[597,5547,5548],{"class":599,"line":622},[597,5549,5550],{},"messages = messages.OrderBy(m => m.CreatedAt).ToList();\n",[597,5552,5553],{"class":599,"line":628},[597,5554,1485],{"emptyLinePlaceholder":17},[597,5556,5557],{"class":599,"line":634},[597,5558,5559],{},"foreach (ThreadMessage msg in messages)\n",[597,5561,5562],{"class":599,"line":3147},[597,5563,608],{},[597,5565,5566],{"class":599,"line":3153},[597,5567,5568],{},"    Console.Write($\"{msg.CreatedAt:yyyy-MM-dd HH:mm:ss} - {msg.Role,10}: \");\n",[597,5570,5571],{"class":599,"line":3159},[597,5572,1485],{"emptyLinePlaceholder":17},[597,5574,5575],{"class":599,"line":3165},[597,5576,5577],{},"    foreach (MessageContent contentItem in msg.ContentItems)\n",[597,5579,5580],{"class":599,"line":3171},[597,5581,3380],{},[597,5583,5584],{"class":599,"line":3177},[597,5585,5586],{},"        if (contentItem is MessageTextContent textItem)\n",[597,5588,5589],{"class":599,"line":3183},[597,5590,5591],{},"            Console.Write(textItem.Text);\n",[597,5593,5594],{"class":599,"line":3189},[597,5595,3390],{},[597,5597,5598],{"class":599,"line":3194},[597,5599,4232],{},[597,5601,5602],{"class":599,"line":3200},[597,5603,637],{},[505,5605,5606],{},[33,5607,1495,5608,3247,5610,405],{},[391,5609,1498],{},[356,5611,3252],{"href":3250,"rel":5612},[360],[33,5614,5615],{},"The logical next step is to start to use multiple agents to create an autonomous system. A next step might be to have an agent that checks to see if the user has a premium account or not.",[135,5617,1509],{"id":1508},[33,5619,5620],{},"AI Agents are autonomous AI entities that go beyond simple chat interactions - they can:",[150,5622,5623,5626,5629,5632],{},[153,5624,5625],{},"Make Independent Decisions: Execute tasks without constant human input",[153,5627,5628],{},"Maintain Context: Hold state and remember previous interactions",[153,5630,5631],{},"Use Tools: Access external systems and APIs to accomplish tasks",[153,5633,5634],{},"Collaborate: Work with other agents to solve complex problems",[33,5636,5637,5638,5641,5642,5645],{},"And you learned how to use the ",[391,5639,5640],{},"Azure AI Agents"," service with the ",[391,5643,5644],{},"Azure AI Project"," SDK to create a rudimentary agent.",[33,5647,5648],{},"Think of agents as AI assistants with agency - they don't just respond, they act based on their programming and objectives.",[135,5650,3676],{"id":1515},[150,5652,5653,5660,5667,5674],{},[153,5654,5655],{},[356,5656,5659],{"href":5657,"rel":5658},"https://learn.microsoft.com/dotnet/ai/quickstarts/quickstart-assistants?pivots=openai",[360],"Build a minimal agent with .NET",[153,5661,5662],{},[356,5663,5666],{"href":5664,"rel":5665},"https://techcommunity.microsoft.com/blog/educatordeveloperblog/using-azure-ai-agent-service-with-autogen--semantic-kernel-to-build-a-multi-agen/4363121",[360],"Multi-agent orchestration",[153,5668,5669],{},[356,5670,5673],{"href":5671,"rel":5672},"https://learn.microsoft.com/semantic-kernel/frameworks/agent/?pivots=programming-language-csharp",[360],"Semantic Kernel Agent Framework",[153,5675,5676],{},[356,5677,5679],{"href":5092,"rel":5678},[360],"AI Agents - Beginners Series to GenAI",[135,5681,774],{"id":773},[33,5683,5684],{},"You've come a long way! From learning about simple one and done text completions to building agents!",[33,5686,783,5687,5691],{},[356,5688,5690],{"href":5689},"../practical-samples","In the next lesson see some real-life practical examples"," of using everything together.",[789,5693,791],{},{"title":11,"searchDepth":12,"depth":12,"links":5695},[5696,5701,5702,5703],{"id":5097,"depth":12,"text":5098,"children":5697},[5698,5699,5700],{"id":5140,"depth":109,"text":5141},{"id":5162,"depth":109,"text":5163},{"id":5228,"depth":109,"text":5229},{"id":1508,"depth":12,"text":1509},{"id":1515,"depth":12,"text":3676},{"id":773,"depth":12,"text":774},{},"/programming-language/platform/dotnet/generative-ai-for-beginners-dotnet/core-generative-ai-techniques/agents",{"title":5039,"description":5047},"Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/3.Core-Generative-AI-Techniques/5.agents","E5t-Uu4xRj8cMPjfpawccFZ-R9WnMC-Qg_62xBtefxs",{"id":5710,"title":5711,"body":5712,"description":5719,"extension":14,"meta":7774,"navigation":17,"path":7775,"seo":7776,"stem":7777,"__hash__":7778},"content/Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/4.Practical-Samples/index.md","Canonical Generative AI Samples",{"type":8,"value":5713,"toc":7760},[5714,5717,5720,5722,5726,5734,5738,5807,5817,5821,5824,5834,5846,5849,5853,5863,5867,5870,5873,5886,5892,5901,5907,5910,6036,6039,6042,6122,6129,6192,6199,6304,6307,6310,6317,6320,6323,6603,6606,6609,6612,6618,6621,6627,6630,6633,6636,6647,6651,6658,6661,6670,6673,6683,6687,6690,6700,6706,6713,6787,6790,6852,6855,6953,6960,6963,6972,6975,6985,6989,6992,7003,7009,7012,7026,7029,7035,7038,7113,7134,7224,7241,7499,7509,7696,7699,7704,7710,7719,7721,7724,7731,7733,7747,7749,7752,7758],[343,5715,5711],{"id":5716},"canonical-generative-ai-samples",[33,5718,5719],{},"See all the concepts you've learned in action in these real-world and practical samples.",[351,5721],{},[135,5723,5725],{"id":5724},"what-youll-achieve","What you'll achieve",[150,5727,5728,5731],{},[153,5729,5730],{},"See how GenAI fits into existing applications.",[153,5732,5733],{},"Understand how Agents work in complex scenarios.",[135,5735,5737],{"id":5736},"included-samples","Included samples",[150,5739,5740],{},[153,5741,5742,5745],{},[356,5743,5711],{"href":5744},"#canonical-generative-ai-samples",[150,5746,5747,5752,5757,5783,5789],{},[153,5748,5749],{},[356,5750,5725],{"href":5751},"#what-youll-achieve",[153,5753,5754],{},[356,5755,5737],{"href":5756},"#included-samples",[153,5758,5759,5763],{},[356,5760,5762],{"href":5761},"#eshoplite-demos","eShopLite Demos",[150,5764,5765,5771,5777],{},[153,5766,5767],{},[356,5768,5770],{"href":5769},"#eshoplite-with-semantic-search","eShopLite with semantic search - In Memory",[153,5772,5773],{},[356,5774,5776],{"href":5775},"#eshoplite-with-semantic-search-using-azure-ai-search","eShopLite with semantic search - Azure AI Search",[153,5778,5779],{},[356,5780,5782],{"href":5781},"#eshoplite-with-realtime-audio","eShopLite with realtime audio",[153,5784,5785],{},[356,5786,5788],{"href":5787},"#creative-writer-agent","Creative Writer Agent",[153,5790,5791,5794],{},[356,5792,1509],{"href":5793},"#summary",[150,5795,5796,5801],{},[153,5797,5798],{},[356,5799,3676],{"href":5800},"#additional-resources",[153,5802,5803],{},[356,5804,5806],{"href":5805},"#next-steps","Next steps",[33,5808,5809],{},[356,5810,5813],{"href":5811,"rel":5812},"https://aka.ms/genainnet/videos/lesson4-overview",[360],[145,5814],{"alt":5815,"src":5816},"overall explainer video","content/generative-ai/images/LIM_GAN_09_thumb_w480.png",[33,5818,5819],{},[368,5820,370],{},[135,5822,5762],{"id":5823},"eshoplite-demos",[33,5825,5826,5827,5830,5831,5833],{},"For our first demos, we'll explore the ",[391,5828,5829],{},"eShopLite"," projects. ",[391,5832,5829],{}," is a simple e-commerce application for outdoor gear and camping enthusiasts that is augmented with Generative AI capabilities, such as search features optimization, customer support, and real-time audio analysis.",[33,5835,5836,5837,830,5841,5845],{},"These demos use ",[356,5838,829],{"href":5839,"rel":5840},"https://azure.microsoft.com/products/ai-services/openai-service",[360],[356,5842,5844],{"href":1625,"rel":5843},[360],"Azure AI Foundry Models"," to do their inferences (or the generative AI portion) for the applications.",[33,5847,5848],{},"In the first demo, we show how to use the Semantic Kernel to enhance the search capabilities, which can understand the context of the user's queries and provide accurate results.",[28,5850,5852],{"id":5851},"eshoplite-with-semantic-search","eShopLite with semantic search",[33,5854,5855],{},[356,5856,5859],{"href":5857,"rel":5858},"https://aka.ms/genainnet/videos/lesson4-eshoplite-semanticsearch",[360],[145,5860],{"alt":5861,"src":5862},"eShop Semantic Search explainer video","content/generative-ai/images/LIM_GAN_10_thumb_w480.png",[33,5864,5865],{},[368,5866,370],{},[33,5868,5869],{},"In eShopLite with semantic search, we use Semantic Kernel to enhance the search capabilities of the e-commerce application. Semantic Kernel helps us create a more robust search engine that can understand the context of the user's queries and provide more accurate results.",[33,5871,5872],{},"For example, if a user searches for \"do you have something for cooking\", the search engine can understand that the user is looking for kitchenware and show the most relevant products, in context of our sample, it returns Camping Cookware.",[505,5874,5875],{},[33,5876,3096,5877,5880,5881],{},[391,5878,5879],{},"Sample",": Check out the ",[356,5882,5885],{"href":5883,"rel":5884},"https://aka.ms/netaieshoplitesemanticsearch",[360],"eShopLite semantic search sample",[33,5887,5888],{},[145,5889],{"alt":5890,"src":5891},"Image demonstrating the search capabilities in eShopLite","content/generative-ai/images/search-eshoplite.png",[33,5893,5894,5895,5900],{},"Semantic search can help users find the products they need more easily, leading to a better shopping experience and increased sales, to implement this feature, we need to have a vector store with the products, a search index, and a language model. The ",[356,5896,5899],{"href":5897,"rel":5898},"https://learn.microsoft.com/dotnet/aspire/get-started/aspire-overview",[360],".NET Aspire"," tooling helps to coordinate all the processes in the backend.",[33,5902,5903],{},[145,5904],{"alt":5905,"src":5906},"Image demonstrating the .NET Aspire Dashboard","content/generative-ai/images/aspire-dashboard.png",[33,5908,5909],{},"In the .NET Aspire dashboard, we can see the products, SQL, and store containers, which can interact with the language model. Looking deeper into the Aspire App Host, we have the following:",[588,5911,5913],{"className":590,"code":5912,"language":592,"meta":11,"style":11},"if (builder.ExecutionContext.IsPublishMode)\n{\n    // Add the Azure Application Insights for monitoring\n    var appInsights = builder.AddAzureApplicationInsights(\"appInsights\");\n    // Add the Azure OpenAI for the chat and embeddings deployments, the embedding is used for the vector entities\n    var chatDeploymentName = \"gpt-4o-mini\";\n    var embeddingsDeploymentName = \"text-embedding-ada-002\";\n    var aoai = builder.AddAzureOpenAI(\"openai\")\n        .AddDeployment(new AzureOpenAIDeployment(chatDeploymentName,\n        \"gpt-4o-mini\",\n        \"2024-07-18\",\n        \"GlobalStandard\",\n        10))\n        .AddDeployment(new AzureOpenAIDeployment(embeddingsDeploymentName,\n        \"text-embedding-ada-002\",\n        \"2\"));\n\n    products.WithReference(appInsights)\n        .WithReference(aoai)\n        .WithEnvironment(\"AI_ChatDeploymentName\", chatDeploymentName)\n        .WithEnvironment(\"AI_embeddingsDeploymentName\", embeddingsDeploymentName);\n\n    store.WithReference(appInsights)\n        .WithExternalHttpEndpoints();\n}\n",[594,5914,5915,5920,5924,5929,5934,5939,5944,5949,5954,5959,5964,5969,5974,5979,5984,5989,5994,5998,6003,6008,6013,6018,6022,6027,6032],{"__ignoreMap":11},[597,5916,5917],{"class":599,"line":600},[597,5918,5919],{},"if (builder.ExecutionContext.IsPublishMode)\n",[597,5921,5922],{"class":599,"line":12},[597,5923,608],{},[597,5925,5926],{"class":599,"line":109},[597,5927,5928],{},"    // Add the Azure Application Insights for monitoring\n",[597,5930,5931],{"class":599,"line":616},[597,5932,5933],{},"    var appInsights = builder.AddAzureApplicationInsights(\"appInsights\");\n",[597,5935,5936],{"class":599,"line":622},[597,5937,5938],{},"    // Add the Azure OpenAI for the chat and embeddings deployments, the embedding is used for the vector entities\n",[597,5940,5941],{"class":599,"line":628},[597,5942,5943],{},"    var chatDeploymentName = \"gpt-4o-mini\";\n",[597,5945,5946],{"class":599,"line":634},[597,5947,5948],{},"    var embeddingsDeploymentName = \"text-embedding-ada-002\";\n",[597,5950,5951],{"class":599,"line":3147},[597,5952,5953],{},"    var aoai = builder.AddAzureOpenAI(\"openai\")\n",[597,5955,5956],{"class":599,"line":3153},[597,5957,5958],{},"        .AddDeployment(new AzureOpenAIDeployment(chatDeploymentName,\n",[597,5960,5961],{"class":599,"line":3159},[597,5962,5963],{},"        \"gpt-4o-mini\",\n",[597,5965,5966],{"class":599,"line":3165},[597,5967,5968],{},"        \"2024-07-18\",\n",[597,5970,5971],{"class":599,"line":3171},[597,5972,5973],{},"        \"GlobalStandard\",\n",[597,5975,5976],{"class":599,"line":3177},[597,5977,5978],{},"        10))\n",[597,5980,5981],{"class":599,"line":3183},[597,5982,5983],{},"        .AddDeployment(new AzureOpenAIDeployment(embeddingsDeploymentName,\n",[597,5985,5986],{"class":599,"line":3189},[597,5987,5988],{},"        \"text-embedding-ada-002\",\n",[597,5990,5991],{"class":599,"line":3194},[597,5992,5993],{},"        \"2\"));\n",[597,5995,5996],{"class":599,"line":3200},[597,5997,1485],{"emptyLinePlaceholder":17},[597,5999,6000],{"class":599,"line":3206},[597,6001,6002],{},"    products.WithReference(appInsights)\n",[597,6004,6005],{"class":599,"line":3211},[597,6006,6007],{},"        .WithReference(aoai)\n",[597,6009,6010],{"class":599,"line":3217},[597,6011,6012],{},"        .WithEnvironment(\"AI_ChatDeploymentName\", chatDeploymentName)\n",[597,6014,6015],{"class":599,"line":3397},[597,6016,6017],{},"        .WithEnvironment(\"AI_embeddingsDeploymentName\", embeddingsDeploymentName);\n",[597,6019,6020],{"class":599,"line":3403},[597,6021,1485],{"emptyLinePlaceholder":17},[597,6023,6024],{"class":599,"line":3408},[597,6025,6026],{},"    store.WithReference(appInsights)\n",[597,6028,6029],{"class":599,"line":3414},[597,6030,6031],{},"        .WithExternalHttpEndpoints();\n",[597,6033,6034],{"class":599,"line":3420},[597,6035,637],{},[33,6037,6038],{},"The code above demonstrates how to add the Azure Application Insights for monitoring, the Azure OpenAI for the chat and embeddings deployments, and the embedding used for the vector entities.",[33,6040,6041],{},"For embedding and AOAI creation, it can be found at the product container, as follows:",[588,6043,6045],{"className":590,"code":6044,"language":592,"meta":11,"style":11},"var azureOpenAiClientName = \"openai\";\nbuilder.AddAzureOpenAIClient(azureOpenAiClientName);\n\n// get azure openai client and create Chat client from aspire hosting configuration\nbuilder.Services.AddSingleton\u003CChatClient>(serviceProvider =>\n{\n    var chatDeploymentName = \"gpt-4o-mini\";\n    var logger = serviceProvider.GetService\u003CILogger\u003CProgram>>()!;\n    logger.LogInformation($\"Chat client configuration, modelId: {chatDeploymentName}\");\n    ChatClient chatClient = null;\n    try\n    {\n        OpenAIClient client = serviceProvider.GetRequiredService\u003COpenAIClient>();\n        chatClient = client.GetChatClient(chatDeploymentName);\n    }...\n}\n",[594,6046,6047,6052,6057,6061,6066,6071,6075,6079,6084,6089,6094,6099,6103,6108,6113,6118],{"__ignoreMap":11},[597,6048,6049],{"class":599,"line":600},[597,6050,6051],{},"var azureOpenAiClientName = \"openai\";\n",[597,6053,6054],{"class":599,"line":12},[597,6055,6056],{},"builder.AddAzureOpenAIClient(azureOpenAiClientName);\n",[597,6058,6059],{"class":599,"line":109},[597,6060,1485],{"emptyLinePlaceholder":17},[597,6062,6063],{"class":599,"line":616},[597,6064,6065],{},"// get azure openai client and create Chat client from aspire hosting configuration\n",[597,6067,6068],{"class":599,"line":622},[597,6069,6070],{},"builder.Services.AddSingleton\u003CChatClient>(serviceProvider =>\n",[597,6072,6073],{"class":599,"line":628},[597,6074,608],{},[597,6076,6077],{"class":599,"line":634},[597,6078,5943],{},[597,6080,6081],{"class":599,"line":3147},[597,6082,6083],{},"    var logger = serviceProvider.GetService\u003CILogger\u003CProgram>>()!;\n",[597,6085,6086],{"class":599,"line":3153},[597,6087,6088],{},"    logger.LogInformation($\"Chat client configuration, modelId: {chatDeploymentName}\");\n",[597,6090,6091],{"class":599,"line":3159},[597,6092,6093],{},"    ChatClient chatClient = null;\n",[597,6095,6096],{"class":599,"line":3165},[597,6097,6098],{},"    try\n",[597,6100,6101],{"class":599,"line":3171},[597,6102,3380],{},[597,6104,6105],{"class":599,"line":3177},[597,6106,6107],{},"        OpenAIClient client = serviceProvider.GetRequiredService\u003COpenAIClient>();\n",[597,6109,6110],{"class":599,"line":3183},[597,6111,6112],{},"        chatClient = client.GetChatClient(chatDeploymentName);\n",[597,6114,6115],{"class":599,"line":3189},[597,6116,6117],{},"    }...\n",[597,6119,6120],{"class":599,"line":3194},[597,6121,637],{},[33,6123,6124,6125,6128],{},"The code above demonstrates how to get the Azure OpenAI client and create the Chat client from the Aspire hosting configuration. The ",[594,6126,6127],{},"chatDeploymentName"," is the name of the deployment used in the application. The same process is used to create the Embedding client, as follows:",[588,6130,6132],{"className":590,"code":6131,"language":592,"meta":11,"style":11},"// get azure openai client and create embedding client from aspire hosting configuration\nbuilder.Services.AddSingleton\u003CEmbeddingClient>(serviceProvider =>\n{\n    var embeddingsDeploymentName = \"text-embedding-ada-002\";\n    var logger = serviceProvider.GetService\u003CILogger\u003CProgram>>()!;\n    logger.LogInformation($\"Embeddings client configuration, modelId: {embeddingsDeploymentName}\");\n    EmbeddingClient embeddingsClient = null;\n    try\n    {\n        OpenAIClient client = serviceProvider.GetRequiredService\u003COpenAIClient>();\n        embeddingsClient = client.GetEmbeddingClient(embeddingsDeploymentName);\n    }...\n});\n",[594,6133,6134,6139,6144,6148,6152,6156,6161,6166,6170,6174,6178,6183,6187],{"__ignoreMap":11},[597,6135,6136],{"class":599,"line":600},[597,6137,6138],{},"// get azure openai client and create embedding client from aspire hosting configuration\n",[597,6140,6141],{"class":599,"line":12},[597,6142,6143],{},"builder.Services.AddSingleton\u003CEmbeddingClient>(serviceProvider =>\n",[597,6145,6146],{"class":599,"line":109},[597,6147,608],{},[597,6149,6150],{"class":599,"line":616},[597,6151,5948],{},[597,6153,6154],{"class":599,"line":622},[597,6155,6083],{},[597,6157,6158],{"class":599,"line":628},[597,6159,6160],{},"    logger.LogInformation($\"Embeddings client configuration, modelId: {embeddingsDeploymentName}\");\n",[597,6162,6163],{"class":599,"line":634},[597,6164,6165],{},"    EmbeddingClient embeddingsClient = null;\n",[597,6167,6168],{"class":599,"line":3147},[597,6169,6098],{},[597,6171,6172],{"class":599,"line":3153},[597,6173,3380],{},[597,6175,6176],{"class":599,"line":3159},[597,6177,6107],{},[597,6179,6180],{"class":599,"line":3165},[597,6181,6182],{},"        embeddingsClient = client.GetEmbeddingClient(embeddingsDeploymentName);\n",[597,6184,6185],{"class":599,"line":3171},[597,6186,6117],{},[597,6188,6189],{"class":599,"line":3177},[597,6190,6191],{},"});\n",[33,6193,6194,6195,6198],{},"With it we can create the ",[594,6196,6197],{},"MemoryContext",", as our vector store to compare to the user's query, and return the most relevant products, as follows:",[588,6200,6202],{"className":590,"code":6201,"language":592,"meta":11,"style":11},"// Iterate over the products and add them to the memory\n_logger.LogInformation(\"Adding product to memory: {Product}\", product.Name);\nvar productInfo = $\"[{product.Name}] is a product that costs [{product.Price}] and is described as [{product.Description}]\";\n\n// Create a new product vector\nvar productVector = new ProductVector\n{\n    Id = product.Id,\n    Name = product.Name,\n    Description = product.Description,\n    Price = product.Price,\n    ImageUrl = product.ImageUrl\n};\n\n// Generate the embedding for the product information\nvar result = await _embeddingClient.GenerateEmbeddingAsync(productInfo);\n\n// Convert the embedding result to a float array and assign it to the product vector\nproductVector.Vector = result.Value.ToFloats();\nvar recordId = await _productsCollection.UpsertAsync(productVector);\n_logger.LogInformation(\"Product added to memory: {Product} with recordId: {RecordId}\", product.Name, recordId);\n",[594,6203,6204,6209,6214,6219,6223,6228,6233,6237,6242,6247,6252,6257,6262,6266,6270,6275,6280,6284,6289,6294,6299],{"__ignoreMap":11},[597,6205,6206],{"class":599,"line":600},[597,6207,6208],{},"// Iterate over the products and add them to the memory\n",[597,6210,6211],{"class":599,"line":12},[597,6212,6213],{},"_logger.LogInformation(\"Adding product to memory: {Product}\", product.Name);\n",[597,6215,6216],{"class":599,"line":109},[597,6217,6218],{},"var productInfo = $\"[{product.Name}] is a product that costs [{product.Price}] and is described as [{product.Description}]\";\n",[597,6220,6221],{"class":599,"line":616},[597,6222,1485],{"emptyLinePlaceholder":17},[597,6224,6225],{"class":599,"line":622},[597,6226,6227],{},"// Create a new product vector\n",[597,6229,6230],{"class":599,"line":628},[597,6231,6232],{},"var productVector = new ProductVector\n",[597,6234,6235],{"class":599,"line":634},[597,6236,608],{},[597,6238,6239],{"class":599,"line":3147},[597,6240,6241],{},"    Id = product.Id,\n",[597,6243,6244],{"class":599,"line":3153},[597,6245,6246],{},"    Name = product.Name,\n",[597,6248,6249],{"class":599,"line":3159},[597,6250,6251],{},"    Description = product.Description,\n",[597,6253,6254],{"class":599,"line":3165},[597,6255,6256],{},"    Price = product.Price,\n",[597,6258,6259],{"class":599,"line":3171},[597,6260,6261],{},"    ImageUrl = product.ImageUrl\n",[597,6263,6264],{"class":599,"line":3177},[597,6265,3588],{},[597,6267,6268],{"class":599,"line":3183},[597,6269,1485],{"emptyLinePlaceholder":17},[597,6271,6272],{"class":599,"line":3189},[597,6273,6274],{},"// Generate the embedding for the product information\n",[597,6276,6277],{"class":599,"line":3194},[597,6278,6279],{},"var result = await _embeddingClient.GenerateEmbeddingAsync(productInfo);\n",[597,6281,6282],{"class":599,"line":3200},[597,6283,1485],{"emptyLinePlaceholder":17},[597,6285,6286],{"class":599,"line":3206},[597,6287,6288],{},"// Convert the embedding result to a float array and assign it to the product vector\n",[597,6290,6291],{"class":599,"line":3211},[597,6292,6293],{},"productVector.Vector = result.Value.ToFloats();\n",[597,6295,6296],{"class":599,"line":3217},[597,6297,6298],{},"var recordId = await _productsCollection.UpsertAsync(productVector);\n",[597,6300,6301],{"class":599,"line":3397},[597,6302,6303],{},"_logger.LogInformation(\"Product added to memory: {Product} with recordId: {RecordId}\", product.Name, recordId);\n",[33,6305,6306],{},"The code above demonstrates how to iterate over the products and add them to the memory.",[33,6308,6309],{},"After we create a new product vector, we use it to generate embedding for the product information, convert the embedding result to a float array, and assign it to the product vector.",[33,6311,6312,6313,6316],{},"Look at ",[594,6314,6315],{},"_productsCollection",", it is a reference to the container where the products are stored, using a CosmosDB call to get the response for the recordId. For in this case, for logging.",[33,6318,6319],{},"The product is then added to the memory, repeating the process for each product in the collection.",[33,6321,6322],{},"After that, when the user searches for a product, we can compare the user's query with the product vectors and return the most relevant products.",[588,6324,6326],{"className":590,"code":6325,"language":592,"meta":11,"style":11},"try\n{\n    // Generate embedding for the search query\n    var result = await _embeddingClient.GenerateEmbeddingAsync(search);\n    var vectorSearchQuery = result.Value.ToFloats();\n\n    var searchOptions = new VectorSearchOptions()\n    {\n        Top = 1, // Retrieve the top 1 result\n        VectorPropertyName = \"Vector\"\n    };\n\n    // Search the vector database for the most similar product\n    var searchResults = await _productsCollection.VectorizedSearchAsync(vectorSearchQuery, searchOptions);\n    double searchScore = 0.0;\n    await foreach (var searchItem in searchResults.Results)\n    {\n        if (searchItem.Score > 0.5)\n        {\n            // Product found, retrieve the product details\n            firstProduct = new Product\n            {\n                Id = searchItem.Record.Id,\n                Name = searchItem.Record.Name,\n                Description = searchItem.Record.Description,\n                Price = searchItem.Record.Price,\n                ImageUrl = searchItem.Record.ImageUrl\n            };\n\n            searchScore = searchItem.Score.Value;\n            responseText = $\"The product [{firstProduct.Name}] fits with the search criteria [{search}][{searchItem.Score.Value.ToString(\"0.00\")}]\";\n            _logger.LogInformation($\"Search Response: {responseText}\");\n        }\n    }\n\n    // Generate a friendly response message using the found product information\n    var prompt = @$\"You are an intelligent assistant helping clients with their search about outdoor products. Generate a catchy and friendly message using the following information:\n    - User Question: {search}\n    - Found Product Name: {firstProduct.Name}\n    - Found Product Description: {firstProduct.Description}\n    - Found Product Price: {firstProduct.Price}\n    Include the found product information in the response to the user question.\";\n\n    var messages = new List\u003CChatMessage>\n    {\n        new SystemChatMessage(_systemPrompt),\n        new UserChatMessage(prompt)\n    };\n\n    _logger.LogInformation(\"{ChatHistory}\", JsonConvert.SerializeObject(messages));\n\n    var resultPrompt = await _chatClient.CompleteChatAsync(messages);\n}\n",[594,6327,6328,6333,6337,6342,6347,6352,6356,6361,6365,6370,6375,6380,6384,6389,6394,6399,6404,6408,6413,6418,6423,6428,6432,6437,6442,6447,6452,6457,6462,6467,6473,6479,6485,6491,6496,6501,6507,6513,6519,6525,6531,6537,6543,6548,6554,6559,6565,6571,6576,6581,6587,6592,6598],{"__ignoreMap":11},[597,6329,6330],{"class":599,"line":600},[597,6331,6332],{},"try\n",[597,6334,6335],{"class":599,"line":12},[597,6336,608],{},[597,6338,6339],{"class":599,"line":109},[597,6340,6341],{},"    // Generate embedding for the search query\n",[597,6343,6344],{"class":599,"line":616},[597,6345,6346],{},"    var result = await _embeddingClient.GenerateEmbeddingAsync(search);\n",[597,6348,6349],{"class":599,"line":622},[597,6350,6351],{},"    var vectorSearchQuery = result.Value.ToFloats();\n",[597,6353,6354],{"class":599,"line":628},[597,6355,1485],{"emptyLinePlaceholder":17},[597,6357,6358],{"class":599,"line":634},[597,6359,6360],{},"    var searchOptions = new VectorSearchOptions()\n",[597,6362,6363],{"class":599,"line":3147},[597,6364,3380],{},[597,6366,6367],{"class":599,"line":3153},[597,6368,6369],{},"        Top = 1, // Retrieve the top 1 result\n",[597,6371,6372],{"class":599,"line":3159},[597,6373,6374],{},"        VectorPropertyName = \"Vector\"\n",[597,6376,6377],{"class":599,"line":3165},[597,6378,6379],{},"    };\n",[597,6381,6382],{"class":599,"line":3171},[597,6383,1485],{"emptyLinePlaceholder":17},[597,6385,6386],{"class":599,"line":3177},[597,6387,6388],{},"    // Search the vector database for the most similar product\n",[597,6390,6391],{"class":599,"line":3183},[597,6392,6393],{},"    var searchResults = await _productsCollection.VectorizedSearchAsync(vectorSearchQuery, searchOptions);\n",[597,6395,6396],{"class":599,"line":3189},[597,6397,6398],{},"    double searchScore = 0.0;\n",[597,6400,6401],{"class":599,"line":3194},[597,6402,6403],{},"    await foreach (var searchItem in searchResults.Results)\n",[597,6405,6406],{"class":599,"line":3200},[597,6407,3380],{},[597,6409,6410],{"class":599,"line":3206},[597,6411,6412],{},"        if (searchItem.Score > 0.5)\n",[597,6414,6415],{"class":599,"line":3211},[597,6416,6417],{},"        {\n",[597,6419,6420],{"class":599,"line":3217},[597,6421,6422],{},"            // Product found, retrieve the product details\n",[597,6424,6425],{"class":599,"line":3397},[597,6426,6427],{},"            firstProduct = new Product\n",[597,6429,6430],{"class":599,"line":3403},[597,6431,4926],{},[597,6433,6434],{"class":599,"line":3408},[597,6435,6436],{},"                Id = searchItem.Record.Id,\n",[597,6438,6439],{"class":599,"line":3414},[597,6440,6441],{},"                Name = searchItem.Record.Name,\n",[597,6443,6444],{"class":599,"line":3420},[597,6445,6446],{},"                Description = searchItem.Record.Description,\n",[597,6448,6449],{"class":599,"line":3426},[597,6450,6451],{},"                Price = searchItem.Record.Price,\n",[597,6453,6454],{"class":599,"line":3431},[597,6455,6456],{},"                ImageUrl = searchItem.Record.ImageUrl\n",[597,6458,6459],{"class":599,"line":3437},[597,6460,6461],{},"            };\n",[597,6463,6465],{"class":599,"line":6464},29,[597,6466,1485],{"emptyLinePlaceholder":17},[597,6468,6470],{"class":599,"line":6469},30,[597,6471,6472],{},"            searchScore = searchItem.Score.Value;\n",[597,6474,6476],{"class":599,"line":6475},31,[597,6477,6478],{},"            responseText = $\"The product [{firstProduct.Name}] fits with the search criteria [{search}][{searchItem.Score.Value.ToString(\"0.00\")}]\";\n",[597,6480,6482],{"class":599,"line":6481},32,[597,6483,6484],{},"            _logger.LogInformation($\"Search Response: {responseText}\");\n",[597,6486,6488],{"class":599,"line":6487},33,[597,6489,6490],{},"        }\n",[597,6492,6494],{"class":599,"line":6493},34,[597,6495,3390],{},[597,6497,6499],{"class":599,"line":6498},35,[597,6500,1485],{"emptyLinePlaceholder":17},[597,6502,6504],{"class":599,"line":6503},36,[597,6505,6506],{},"    // Generate a friendly response message using the found product information\n",[597,6508,6510],{"class":599,"line":6509},37,[597,6511,6512],{},"    var prompt = @$\"You are an intelligent assistant helping clients with their search about outdoor products. Generate a catchy and friendly message using the following information:\n",[597,6514,6516],{"class":599,"line":6515},38,[597,6517,6518],{},"    - User Question: {search}\n",[597,6520,6522],{"class":599,"line":6521},39,[597,6523,6524],{},"    - Found Product Name: {firstProduct.Name}\n",[597,6526,6528],{"class":599,"line":6527},40,[597,6529,6530],{},"    - Found Product Description: {firstProduct.Description}\n",[597,6532,6534],{"class":599,"line":6533},41,[597,6535,6536],{},"    - Found Product Price: {firstProduct.Price}\n",[597,6538,6540],{"class":599,"line":6539},42,[597,6541,6542],{},"    Include the found product information in the response to the user question.\";\n",[597,6544,6546],{"class":599,"line":6545},43,[597,6547,1485],{"emptyLinePlaceholder":17},[597,6549,6551],{"class":599,"line":6550},44,[597,6552,6553],{},"    var messages = new List\u003CChatMessage>\n",[597,6555,6557],{"class":599,"line":6556},45,[597,6558,3380],{},[597,6560,6562],{"class":599,"line":6561},46,[597,6563,6564],{},"        new SystemChatMessage(_systemPrompt),\n",[597,6566,6568],{"class":599,"line":6567},47,[597,6569,6570],{},"        new UserChatMessage(prompt)\n",[597,6572,6574],{"class":599,"line":6573},48,[597,6575,6379],{},[597,6577,6579],{"class":599,"line":6578},49,[597,6580,1485],{"emptyLinePlaceholder":17},[597,6582,6584],{"class":599,"line":6583},50,[597,6585,6586],{},"    _logger.LogInformation(\"{ChatHistory}\", JsonConvert.SerializeObject(messages));\n",[597,6588,6590],{"class":599,"line":6589},51,[597,6591,1485],{"emptyLinePlaceholder":17},[597,6593,6595],{"class":599,"line":6594},52,[597,6596,6597],{},"    var resultPrompt = await _chatClient.CompleteChatAsync(messages);\n",[597,6599,6601],{"class":599,"line":6600},53,[597,6602,637],{},[33,6604,6605],{},"With the code above, we generate the embedding for the search query, search the vector database for the most similar product, and get a response message using the found product information.",[33,6607,6608],{},"Helping the user find the products they need more easily, leading to a better shopping experience and increased sales.",[33,6610,6611],{},"Moreover, as generative AI evolves, we need some telemetry and monitoring to understand the user's behavior and improve the search engine, this is where Azure Application Insights and .NET Aspire come in.",[33,6613,6614],{},[145,6615],{"alt":6616,"src":6617},"Image demonstrating the .NET Aspire tracing capabilities","content/generative-ai/images/aspire-tracing-eshoplite.png",[33,6619,6620],{},".NET Aspire provides a powerful set of tools to monitor and trace the application's behavior, including the user's interactions with the search engine, backend services, and the AI models. The tracing capabilities can help us understand possible bottlenecks, errors, and performance issues, allowing us to optimize the application and provide a better user experience.",[33,6622,6623],{},[145,6624],{"alt":6625,"src":6626},"Image demonstrating the Azure Application Insights in eShopLite","content/generative-ai/images/app-insights-eshoplite.png",[33,6628,6629],{},"As telemetry is essential to understand the user's behavior and improve services, we introduce Azure Application Insights to monitor the application's performance and user interactions.",[33,6631,6632],{},"Application Insights provides a comprehensive set of telemetry data, helping us to understand how our services are performing, and how users are interacting with the application and cloud usage.",[33,6634,6635],{},"In the image, we can see the Application Insights dashboard, providing how the services are performing, for example, the calls for our Database, number of requests, and the response time.",[505,6637,6638],{},[33,6639,1155,6640,6643,6644],{},[391,6641,6642],{},"Pro Tip",": For more information on eShopLite with Semantic Search, look at the repository to learn more: ",[356,6645,5883],{"href":5883,"rel":6646},[360],[28,6648,6650],{"id":6649},"eshoplite-with-semantic-search-using-azure-ai-search","eShopLite with semantic search using Azure AI Search",[33,6652,6653,6654,6657],{},"In eShopLite end-to-end demo, we use ",[391,6655,6656],{},"Azure AI Search"," to enhance the search capabilities of the e-commerce application. Azure AI Search helps us create a more robust search engine that can understand the context of the user's queries and provide more accurate results.",[33,6659,6660],{},"It also provides a more scalable and reliable search engine that can handle large amounts of data and user queries. Azure AI Search allows the solution to persist the search indexes, so the information will be available even if the application is restarted.",[150,6662,6663],{},[153,6664,6665],{},[356,6666,6669],{"href":6667,"rel":6668},"https://aka.ms/netaieshoplitesemanticsearchazureaisearch",[360],"eShopLite with Azure AI Search",[28,6671,5782],{"id":6672},"eshoplite-with-realtime-audio",[33,6674,6675],{},[356,6676,6679],{"href":6677,"rel":6678},"https://aka.ms/genainnet/videos/lesson4-eshoplite-realtimeaudio",[360],[145,6680],{"alt":6681,"src":6682},"eShop with real-time audio explainer video","content/generative-ai/images/LIM_GAN_13_thumb_w480.png",[33,6684,6685],{},[368,6686,370],{},[33,6688,6689],{},"In eShopLite with real-time audio, we use the real-time audio capabilities of GPT-4o to analyze the conversations between the customer and the chatbot, providing a more personalized and engaging experience. For example, if a customer asks for a product recommendation, the chatbot can analyze the customer's request in real-time and provide a more accurate and relevant response.",[505,6691,6692],{},[33,6693,3096,6694,5880,6696],{},[391,6695,5879],{},[356,6697,5885],{"href":6698,"rel":6699},"https://aka.ms/netaieshopliterealtimechat",[360],[33,6701,6702],{},[145,6703],{"alt":6704,"src":6705},"Image demonstrating the Realtime Analysis in eShopLite","content/generative-ai/images/realtime-analysis-eshoplite.gif",[33,6707,6708,6709,6712],{},"To implement this feature, we need to implement new features to create the endpoints for the Realtime Analysis, it can be found on the ",[594,6710,6711],{},"StoreRealtime\\ConversationManager.cs"," implementation for real-time analysis.",[588,6714,6716],{"className":590,"code":6715,"language":592,"meta":11,"style":11},"public async Task RunAsync(\n    Stream audioInput,\n    Speaker audioOutput,\n    Func\u003Cstring, Task> addMessageAsync,\n    Func\u003Cstring, bool, Task> addChatMessageAsync,\n    CancellationToken cancellationToken)\n{\n    // Define the initial prompt for the assistant\n    var prompt = $\"\"\"\n        You are a useful assistant.\n        Respond as succinctly as possible, in just a few words.\n        Check the product database and external sources for information.\n        The current date is {DateTime.Now.ToLongDateString()}\n        \"\"\";\n",[594,6717,6718,6723,6728,6733,6738,6743,6748,6752,6757,6762,6767,6772,6777,6782],{"__ignoreMap":11},[597,6719,6720],{"class":599,"line":600},[597,6721,6722],{},"public async Task RunAsync(\n",[597,6724,6725],{"class":599,"line":12},[597,6726,6727],{},"    Stream audioInput,\n",[597,6729,6730],{"class":599,"line":109},[597,6731,6732],{},"    Speaker audioOutput,\n",[597,6734,6735],{"class":599,"line":616},[597,6736,6737],{},"    Func\u003Cstring, Task> addMessageAsync,\n",[597,6739,6740],{"class":599,"line":622},[597,6741,6742],{},"    Func\u003Cstring, bool, Task> addChatMessageAsync,\n",[597,6744,6745],{"class":599,"line":628},[597,6746,6747],{},"    CancellationToken cancellationToken)\n",[597,6749,6750],{"class":599,"line":634},[597,6751,608],{},[597,6753,6754],{"class":599,"line":3147},[597,6755,6756],{},"    // Define the initial prompt for the assistant\n",[597,6758,6759],{"class":599,"line":3153},[597,6760,6761],{},"    var prompt = $\"\"\"\n",[597,6763,6764],{"class":599,"line":3159},[597,6765,6766],{},"        You are a useful assistant.\n",[597,6768,6769],{"class":599,"line":3165},[597,6770,6771],{},"        Respond as succinctly as possible, in just a few words.\n",[597,6773,6774],{"class":599,"line":3171},[597,6775,6776],{},"        Check the product database and external sources for information.\n",[597,6778,6779],{"class":599,"line":3177},[597,6780,6781],{},"        The current date is {DateTime.Now.ToLongDateString()}\n",[597,6783,6784],{"class":599,"line":3183},[597,6785,6786],{},"        \"\"\";\n",[33,6788,6789],{},"First, we define the initial prompt for the assistant, providing the user with instructions on how to interact with the chatbot. Remember to use prompts that are clear and concise, prompt engineering is essential to get accurate results from the AI models.",[588,6791,6793],{"className":590,"code":6792,"language":592,"meta":11,"style":11},"// Notify the user that the connection is being established\nawait addMessageAsync(\"Connecting...\");\n\n// Send an initial greeting message\nawait addChatMessageAsync(\"Hello, how can I help?\", false);\n\n// Create AI functions for semantic search and product name search\nvar contosoSemanticSearchTool = AIFunctionFactory.Create(_contosoProductContext.SemanticSearchOutdoorProductsAsync);\nvar contosoSearchByProductNameTool = AIFunctionFactory.Create(_contosoProductContext.SearchOutdoorProductsByNameAsync);\n\n// Add the AI functions to a list of tools\nList\u003CAIFunction> tools = new List\u003CAIFunction> { contosoSemanticSearchTool, contosoSearchByProductNameTool };\n",[594,6794,6795,6800,6805,6809,6814,6819,6823,6828,6833,6838,6842,6847],{"__ignoreMap":11},[597,6796,6797],{"class":599,"line":600},[597,6798,6799],{},"// Notify the user that the connection is being established\n",[597,6801,6802],{"class":599,"line":12},[597,6803,6804],{},"await addMessageAsync(\"Connecting...\");\n",[597,6806,6807],{"class":599,"line":109},[597,6808,1485],{"emptyLinePlaceholder":17},[597,6810,6811],{"class":599,"line":616},[597,6812,6813],{},"// Send an initial greeting message\n",[597,6815,6816],{"class":599,"line":622},[597,6817,6818],{},"await addChatMessageAsync(\"Hello, how can I help?\", false);\n",[597,6820,6821],{"class":599,"line":628},[597,6822,1485],{"emptyLinePlaceholder":17},[597,6824,6825],{"class":599,"line":634},[597,6826,6827],{},"// Create AI functions for semantic search and product name search\n",[597,6829,6830],{"class":599,"line":3147},[597,6831,6832],{},"var contosoSemanticSearchTool = AIFunctionFactory.Create(_contosoProductContext.SemanticSearchOutdoorProductsAsync);\n",[597,6834,6835],{"class":599,"line":3153},[597,6836,6837],{},"var contosoSearchByProductNameTool = AIFunctionFactory.Create(_contosoProductContext.SearchOutdoorProductsByNameAsync);\n",[597,6839,6840],{"class":599,"line":3159},[597,6841,1485],{"emptyLinePlaceholder":17},[597,6843,6844],{"class":599,"line":3165},[597,6845,6846],{},"// Add the AI functions to a list of tools\n",[597,6848,6849],{"class":599,"line":3171},[597,6850,6851],{},"List\u003CAIFunction> tools = new List\u003CAIFunction> { contosoSemanticSearchTool, contosoSearchByProductNameTool };\n",[33,6853,6854],{},"We then notify the user that the chat is ready to start and send an initial greeting message. Then, AI functions to search products, semantic search, and search by product name are created and added to a list of tools. Which could be used to provide the user with relevant information on their queries.",[588,6856,6858],{"className":590,"code":6857,"language":592,"meta":11,"style":11},"// Configure the conversation session options\nvar sessionOptions = new ConversationSessionOptions()\n{\n    Instructions = prompt,\n    Voice = ConversationVoice.Shimmer,\n    InputTranscriptionOptions = new() { Model = \"whisper-1\" },\n};\n\n// Add each tool to the session options\nforeach (var tool in tools)\n{\n    sessionOptions.Tools.Add(tool.ToConversationFunctionTool());\n}\n\n// Start the conversation session with the configured options\nsession = await client.StartConversationSessionAsync(cancellationToken);\nawait session.ConfigureSessionAsync(sessionOptions);\n\n// Initialize a StringBuilder to store the output transcription\nvar outputTranscription = new StringBuilder();\n",[594,6859,6860,6865,6870,6874,6879,6884,6889,6893,6897,6902,6907,6911,6916,6920,6924,6929,6934,6939,6943,6948],{"__ignoreMap":11},[597,6861,6862],{"class":599,"line":600},[597,6863,6864],{},"// Configure the conversation session options\n",[597,6866,6867],{"class":599,"line":12},[597,6868,6869],{},"var sessionOptions = new ConversationSessionOptions()\n",[597,6871,6872],{"class":599,"line":109},[597,6873,608],{},[597,6875,6876],{"class":599,"line":616},[597,6877,6878],{},"    Instructions = prompt,\n",[597,6880,6881],{"class":599,"line":622},[597,6882,6883],{},"    Voice = ConversationVoice.Shimmer,\n",[597,6885,6886],{"class":599,"line":628},[597,6887,6888],{},"    InputTranscriptionOptions = new() { Model = \"whisper-1\" },\n",[597,6890,6891],{"class":599,"line":634},[597,6892,3588],{},[597,6894,6895],{"class":599,"line":3147},[597,6896,1485],{"emptyLinePlaceholder":17},[597,6898,6899],{"class":599,"line":3153},[597,6900,6901],{},"// Add each tool to the session options\n",[597,6903,6904],{"class":599,"line":3159},[597,6905,6906],{},"foreach (var tool in tools)\n",[597,6908,6909],{"class":599,"line":3165},[597,6910,608],{},[597,6912,6913],{"class":599,"line":3171},[597,6914,6915],{},"    sessionOptions.Tools.Add(tool.ToConversationFunctionTool());\n",[597,6917,6918],{"class":599,"line":3177},[597,6919,637],{},[597,6921,6922],{"class":599,"line":3183},[597,6923,1485],{"emptyLinePlaceholder":17},[597,6925,6926],{"class":599,"line":3189},[597,6927,6928],{},"// Start the conversation session with the configured options\n",[597,6930,6931],{"class":599,"line":3194},[597,6932,6933],{},"session = await client.StartConversationSessionAsync(cancellationToken);\n",[597,6935,6936],{"class":599,"line":3200},[597,6937,6938],{},"await session.ConfigureSessionAsync(sessionOptions);\n",[597,6940,6941],{"class":599,"line":3206},[597,6942,1485],{"emptyLinePlaceholder":17},[597,6944,6945],{"class":599,"line":3211},[597,6946,6947],{},"// Initialize a StringBuilder to store the output transcription\n",[597,6949,6950],{"class":599,"line":3217},[597,6951,6952],{},"var outputTranscription = new StringBuilder();\n",[33,6954,6955,6956,6959],{},"The conversation session options are configured, including the instructions, voice, and input transcription options. Using the ",[594,6957,6958],{},"Whisper-1"," model for the input transcription.",[33,6961,6962],{},"Each tool is added to the session options, and the conversation session is started with the configured options. Those can be changed to fit the user's needs.",[505,6964,6965],{},[33,6966,3096,6967,5880,6969],{},[391,6968,5879],{},[356,6970,5885],{"href":6698,"rel":6971},[360],[135,6973,5788],{"id":6974},"creative-writer-agent",[33,6976,6977],{},[356,6978,6981],{"href":6979,"rel":6980},"https://aka.ms/genainnet/videos/lesson4-creative-writer-agents",[360],[145,6982],{"alt":6983,"src":6984},"Creative Writing Agent explainer video","content/generative-ai/images/LIM_GAN_12_thumb_w480.png",[33,6986,6987],{},[368,6988,370],{},[33,6990,6991],{},"Agents are a big topic in the current AI landscape, and to demonstrate their capabilities, we'll use the Creative Writer Agent, a tool that can generate creative and engaging text based on the user's input, helping to write researched, specific, and engaging content.",[505,6993,6994],{},[33,6995,3096,6996,5880,6998],{},[391,6997,5879],{},[356,6999,7002],{"href":7000,"rel":7001},"https://aka.ms/netaicreativewriter",[360],"Creative Writing Agent sample code",[33,7004,7005],{},[145,7006],{"alt":7007,"src":7008},"Image demonstrating the Creative Writer Agent","content/generative-ai/images/creative-writer-agent.png",[33,7010,7011],{},"This solution centers on four dedicated modules that combine to generate high-quality content:",[150,7013,7014,7017,7020,7023],{},[153,7015,7016],{},"Researcher: Leverages Bing search to gather context, topics, and data, then concisely summarizes it.",[153,7018,7019],{},"Marketing: Interprets user intent, constructs relevant questions, and taps into the Vector DB for precise results.",[153,7021,7022],{},"Writer: Synthesizes findings from Researcher and Marketing, producing a cohesive writing of the article.",[153,7024,7025],{},"Editor: Assesses the draft, offers corrections, and decides whether it’s publication-ready.",[33,7027,7028],{},"The workflow integrates relevant data, effective messaging, and review, being orchestrated by Semantic Kernel, Microsoft AI Extension, and .NET Aspire.",[33,7030,7031],{},[145,7032],{"alt":7033,"src":7034},"Image demonstrating the Creative Writer Agent architecture","content/generative-ai/images/creative-writer-agent-architecture.png",[33,7036,7037],{},"Understanding how the components interact with each other can be a reference for creating your own Agentic applications, take a look at the code below to understand how the components interact with each other, first look at the ChatController.cs call to the Creative Writer:",[588,7039,7041],{"className":590,"code":7040,"language":592,"meta":11,"style":11},"var userInput = request.Messages.Last();\n\n// Deserialize the user input content into a CreateWriterRequest object\nCreateWriterRequest createWriterRequest = _yamlDeserializer.Deserialize\u003CCreateWriterRequest>(userInput.Content);\n\n// Create a new session for the Creative Writer application\nvar session = await _creativeWriterApp.CreateSessionAsync(Response);\n\n// Process the streaming request and write the response in real-time\nawait foreach (var delta in session.ProcessStreamingRequest(createWriterRequest))\n{\n    // Serialize the delta and write it to the response stream and flush\n    await response.WriteAsync($\"{JsonSerializer.Serialize(delta)}\\r\\n\");\n    await response.Body.FlushAsync();\n}\n",[594,7042,7043,7048,7052,7057,7062,7066,7071,7076,7080,7085,7090,7094,7099,7104,7109],{"__ignoreMap":11},[597,7044,7045],{"class":599,"line":600},[597,7046,7047],{},"var userInput = request.Messages.Last();\n",[597,7049,7050],{"class":599,"line":12},[597,7051,1485],{"emptyLinePlaceholder":17},[597,7053,7054],{"class":599,"line":109},[597,7055,7056],{},"// Deserialize the user input content into a CreateWriterRequest object\n",[597,7058,7059],{"class":599,"line":616},[597,7060,7061],{},"CreateWriterRequest createWriterRequest = _yamlDeserializer.Deserialize\u003CCreateWriterRequest>(userInput.Content);\n",[597,7063,7064],{"class":599,"line":622},[597,7065,1485],{"emptyLinePlaceholder":17},[597,7067,7068],{"class":599,"line":628},[597,7069,7070],{},"// Create a new session for the Creative Writer application\n",[597,7072,7073],{"class":599,"line":634},[597,7074,7075],{},"var session = await _creativeWriterApp.CreateSessionAsync(Response);\n",[597,7077,7078],{"class":599,"line":3147},[597,7079,1485],{"emptyLinePlaceholder":17},[597,7081,7082],{"class":599,"line":3153},[597,7083,7084],{},"// Process the streaming request and write the response in real-time\n",[597,7086,7087],{"class":599,"line":3159},[597,7088,7089],{},"await foreach (var delta in session.ProcessStreamingRequest(createWriterRequest))\n",[597,7091,7092],{"class":599,"line":3165},[597,7093,608],{},[597,7095,7096],{"class":599,"line":3171},[597,7097,7098],{},"    // Serialize the delta and write it to the response stream and flush\n",[597,7100,7101],{"class":599,"line":3177},[597,7102,7103],{},"    await response.WriteAsync($\"{JsonSerializer.Serialize(delta)}\\r\\n\");\n",[597,7105,7106],{"class":599,"line":3183},[597,7107,7108],{},"    await response.Body.FlushAsync();\n",[597,7110,7111],{"class":599,"line":3189},[597,7112,637],{},[33,7114,7115,7116,7119,7120,529,7123,533,7126,7129,7130,7133],{},"The type ",[594,7117,7118],{},"CreateWriterRequest"," needs to have three properties: ",[594,7121,7122],{},"Research",[594,7124,7125],{},"Products",[594,7127,7128],{},"Writing",". After getting them set by processing the request, it calls the ",[594,7131,7132],{},"CreateSessionAsync"," method, which looks like this:",[588,7135,7137],{"className":590,"code":7136,"language":592,"meta":11,"style":11},"internal async Task\u003CCreativeWriterSession> CreateSessionAsync(HttpResponse response)\n{\n    // Add custom function invocation filters to handle response modifications\n    defaultKernel.FunctionInvocationFilters.Add(new FunctionInvocationFilter(response));\n\n    // Create a separate kernel for Bing search integration and intialize the Bing service, and create a plugin for Bing search\n    Kernel bingKernel = defaultKernel.Clone();\n    BingTextSearch textSearch = new(apiKey: configuration[\"BingAPIKey\"]!);\n    KernelPlugin searchPlugin = textSearch.CreateWithSearch(\"BingSearchPlugin\");\n    bingKernel.Plugins.Add(searchPlugin);\n\n    // Clone the default kernel to set up the vector search capabilities, and create the vector search kernel\n    Kernel vectorSearchKernel = defaultKernel.Clone();\n    await ConfigureVectorSearchKernel(vectorSearchKernel);\n\n    // Return a new session encapsulating all configured kernels for comprehensive AI functionalities\n    return new CreativeWriterSession(defaultKernel, bingKernel, vectorSearchKernel);\n}\n",[594,7138,7139,7144,7148,7153,7158,7162,7167,7172,7177,7182,7187,7191,7196,7201,7206,7210,7215,7220],{"__ignoreMap":11},[597,7140,7141],{"class":599,"line":600},[597,7142,7143],{},"internal async Task\u003CCreativeWriterSession> CreateSessionAsync(HttpResponse response)\n",[597,7145,7146],{"class":599,"line":12},[597,7147,608],{},[597,7149,7150],{"class":599,"line":109},[597,7151,7152],{},"    // Add custom function invocation filters to handle response modifications\n",[597,7154,7155],{"class":599,"line":616},[597,7156,7157],{},"    defaultKernel.FunctionInvocationFilters.Add(new FunctionInvocationFilter(response));\n",[597,7159,7160],{"class":599,"line":622},[597,7161,1485],{"emptyLinePlaceholder":17},[597,7163,7164],{"class":599,"line":628},[597,7165,7166],{},"    // Create a separate kernel for Bing search integration and intialize the Bing service, and create a plugin for Bing search\n",[597,7168,7169],{"class":599,"line":634},[597,7170,7171],{},"    Kernel bingKernel = defaultKernel.Clone();\n",[597,7173,7174],{"class":599,"line":3147},[597,7175,7176],{},"    BingTextSearch textSearch = new(apiKey: configuration[\"BingAPIKey\"]!);\n",[597,7178,7179],{"class":599,"line":3153},[597,7180,7181],{},"    KernelPlugin searchPlugin = textSearch.CreateWithSearch(\"BingSearchPlugin\");\n",[597,7183,7184],{"class":599,"line":3159},[597,7185,7186],{},"    bingKernel.Plugins.Add(searchPlugin);\n",[597,7188,7189],{"class":599,"line":3165},[597,7190,1485],{"emptyLinePlaceholder":17},[597,7192,7193],{"class":599,"line":3171},[597,7194,7195],{},"    // Clone the default kernel to set up the vector search capabilities, and create the vector search kernel\n",[597,7197,7198],{"class":599,"line":3177},[597,7199,7200],{},"    Kernel vectorSearchKernel = defaultKernel.Clone();\n",[597,7202,7203],{"class":599,"line":3183},[597,7204,7205],{},"    await ConfigureVectorSearchKernel(vectorSearchKernel);\n",[597,7207,7208],{"class":599,"line":3189},[597,7209,1485],{"emptyLinePlaceholder":17},[597,7211,7212],{"class":599,"line":3194},[597,7213,7214],{},"    // Return a new session encapsulating all configured kernels for comprehensive AI functionalities\n",[597,7216,7217],{"class":599,"line":3200},[597,7218,7219],{},"    return new CreativeWriterSession(defaultKernel, bingKernel, vectorSearchKernel);\n",[597,7221,7222],{"class":599,"line":3206},[597,7223,637],{},[33,7225,7226,7227,7230,7231,7234,7235,830,7237,7240],{},"Now, we can see the ",[594,7228,7229],{},"CreativeWriterSession"," class for the ",[594,7232,7233],{},"ProcessStreamingRequest"," function, to understand how the components interact with each other, first look at the ",[594,7236,7122],{},[594,7238,7239],{},"Marketing"," components:",[588,7242,7244],{"className":590,"code":7243,"language":592,"meta":11,"style":11},"// Initialize the Researcher Agent with a specific prompt template.\n// This agent leverages the Bing Kernel for enhanced semantic search capabilities.\nChatCompletionAgent researcherAgent = new(ReadFileForPromptTemplateConfig(\"./Agents/Prompts/researcher.yaml\"))\n{\n    Name = ResearcherName,\n    Kernel = bingKernel,\n    Arguments = CreateFunctionChoiceAutoBehavior(),\n    LoggerFactory = bingKernel.LoggerFactory\n};\n\n// Initialize the Marketing Agent with its own prompt template.\n// This agent utilizes the Vector Search Kernel to handle product-related queries efficiently.\nChatCompletionAgent marketingAgent = new(ReadFileForPromptTemplateConfig(\"./Agents/Prompts/marketing.yaml\"))\n{\n    Name = MarketingName,\n    Kernel = vectorSearchKernel,\n    Arguments = CreateFunctionChoiceAutoBehavior(),\n    LoggerFactory = vectorSearchKernel.LoggerFactory\n};\n\n// ...\n\n// Invoke the Researcher Agent asynchronously with the provided research context.\nawait foreach (ChatMessageContent response in researcherAgent.InvokeAsync(\n    new object[] { },\n    new Dictionary\u003Cstring, string> { { \"research_context\", createWriterRequest.Research } }))\n{\n    // Aggregate the research results for further processing or display.\n    sbResearchResults.AppendLine(response.Content);\n\n    yield return new AIChatCompletionDelta(Delta: new AIChatMessageDelta\n    {\n        Role = AIChatRole.Assistant,\n        Context = new AIChatAgentInfo(ResearcherName),\n        Content = response.Content,\n    });\n}\n\n// ...\n\n// Invoke the Marketing Agent with the provided product context.\nawait foreach (ChatMessageContent response in marketingAgent.InvokeAsync(\n    new object[] { },\n    new Dictionary\u003Cstring, string> { { \"product_context\", createWriterRequest.Products } }))\n{\n    // Consolidate the product-related results for use in marketing strategies or user feedback.\n    sbProductResults.AppendLine(response.Content);\n\n    yield return new AIChatCompletionDelta(Delta: new AIChatMessageDelta\n    {\n        Role = AIChatRole.Assistant,\n        Context = new AIChatAgentInfo(MarketingName),\n        Content = response.Content,\n    });\n}\n",[594,7245,7246,7251,7256,7261,7265,7270,7275,7280,7285,7289,7293,7298,7303,7308,7312,7317,7322,7326,7331,7335,7339,7344,7348,7353,7358,7363,7368,7372,7377,7382,7386,7391,7395,7400,7405,7410,7415,7419,7423,7427,7431,7436,7441,7445,7450,7454,7459,7464,7468,7472,7476,7480,7485,7489,7494],{"__ignoreMap":11},[597,7247,7248],{"class":599,"line":600},[597,7249,7250],{},"// Initialize the Researcher Agent with a specific prompt template.\n",[597,7252,7253],{"class":599,"line":12},[597,7254,7255],{},"// This agent leverages the Bing Kernel for enhanced semantic search capabilities.\n",[597,7257,7258],{"class":599,"line":109},[597,7259,7260],{},"ChatCompletionAgent researcherAgent = new(ReadFileForPromptTemplateConfig(\"./Agents/Prompts/researcher.yaml\"))\n",[597,7262,7263],{"class":599,"line":616},[597,7264,608],{},[597,7266,7267],{"class":599,"line":622},[597,7268,7269],{},"    Name = ResearcherName,\n",[597,7271,7272],{"class":599,"line":628},[597,7273,7274],{},"    Kernel = bingKernel,\n",[597,7276,7277],{"class":599,"line":634},[597,7278,7279],{},"    Arguments = CreateFunctionChoiceAutoBehavior(),\n",[597,7281,7282],{"class":599,"line":3147},[597,7283,7284],{},"    LoggerFactory = bingKernel.LoggerFactory\n",[597,7286,7287],{"class":599,"line":3153},[597,7288,3588],{},[597,7290,7291],{"class":599,"line":3159},[597,7292,1485],{"emptyLinePlaceholder":17},[597,7294,7295],{"class":599,"line":3165},[597,7296,7297],{},"// Initialize the Marketing Agent with its own prompt template.\n",[597,7299,7300],{"class":599,"line":3171},[597,7301,7302],{},"// This agent utilizes the Vector Search Kernel to handle product-related queries efficiently.\n",[597,7304,7305],{"class":599,"line":3177},[597,7306,7307],{},"ChatCompletionAgent marketingAgent = new(ReadFileForPromptTemplateConfig(\"./Agents/Prompts/marketing.yaml\"))\n",[597,7309,7310],{"class":599,"line":3183},[597,7311,608],{},[597,7313,7314],{"class":599,"line":3189},[597,7315,7316],{},"    Name = MarketingName,\n",[597,7318,7319],{"class":599,"line":3194},[597,7320,7321],{},"    Kernel = vectorSearchKernel,\n",[597,7323,7324],{"class":599,"line":3200},[597,7325,7279],{},[597,7327,7328],{"class":599,"line":3206},[597,7329,7330],{},"    LoggerFactory = vectorSearchKernel.LoggerFactory\n",[597,7332,7333],{"class":599,"line":3211},[597,7334,3588],{},[597,7336,7337],{"class":599,"line":3217},[597,7338,1485],{"emptyLinePlaceholder":17},[597,7340,7341],{"class":599,"line":3397},[597,7342,7343],{},"// ...\n",[597,7345,7346],{"class":599,"line":3403},[597,7347,1485],{"emptyLinePlaceholder":17},[597,7349,7350],{"class":599,"line":3408},[597,7351,7352],{},"// Invoke the Researcher Agent asynchronously with the provided research context.\n",[597,7354,7355],{"class":599,"line":3414},[597,7356,7357],{},"await foreach (ChatMessageContent response in researcherAgent.InvokeAsync(\n",[597,7359,7360],{"class":599,"line":3420},[597,7361,7362],{},"    new object[] { },\n",[597,7364,7365],{"class":599,"line":3426},[597,7366,7367],{},"    new Dictionary\u003Cstring, string> { { \"research_context\", createWriterRequest.Research } }))\n",[597,7369,7370],{"class":599,"line":3431},[597,7371,608],{},[597,7373,7374],{"class":599,"line":3437},[597,7375,7376],{},"    // Aggregate the research results for further processing or display.\n",[597,7378,7379],{"class":599,"line":6464},[597,7380,7381],{},"    sbResearchResults.AppendLine(response.Content);\n",[597,7383,7384],{"class":599,"line":6469},[597,7385,1485],{"emptyLinePlaceholder":17},[597,7387,7388],{"class":599,"line":6475},[597,7389,7390],{},"    yield return new AIChatCompletionDelta(Delta: new AIChatMessageDelta\n",[597,7392,7393],{"class":599,"line":6481},[597,7394,3380],{},[597,7396,7397],{"class":599,"line":6487},[597,7398,7399],{},"        Role = AIChatRole.Assistant,\n",[597,7401,7402],{"class":599,"line":6493},[597,7403,7404],{},"        Context = new AIChatAgentInfo(ResearcherName),\n",[597,7406,7407],{"class":599,"line":6498},[597,7408,7409],{},"        Content = response.Content,\n",[597,7411,7412],{"class":599,"line":6503},[597,7413,7414],{},"    });\n",[597,7416,7417],{"class":599,"line":6509},[597,7418,637],{},[597,7420,7421],{"class":599,"line":6515},[597,7422,1485],{"emptyLinePlaceholder":17},[597,7424,7425],{"class":599,"line":6521},[597,7426,7343],{},[597,7428,7429],{"class":599,"line":6527},[597,7430,1485],{"emptyLinePlaceholder":17},[597,7432,7433],{"class":599,"line":6533},[597,7434,7435],{},"// Invoke the Marketing Agent with the provided product context.\n",[597,7437,7438],{"class":599,"line":6539},[597,7439,7440],{},"await foreach (ChatMessageContent response in marketingAgent.InvokeAsync(\n",[597,7442,7443],{"class":599,"line":6545},[597,7444,7362],{},[597,7446,7447],{"class":599,"line":6550},[597,7448,7449],{},"    new Dictionary\u003Cstring, string> { { \"product_context\", createWriterRequest.Products } }))\n",[597,7451,7452],{"class":599,"line":6556},[597,7453,608],{},[597,7455,7456],{"class":599,"line":6561},[597,7457,7458],{},"    // Consolidate the product-related results for use in marketing strategies or user feedback.\n",[597,7460,7461],{"class":599,"line":6567},[597,7462,7463],{},"    sbProductResults.AppendLine(response.Content);\n",[597,7465,7466],{"class":599,"line":6573},[597,7467,1485],{"emptyLinePlaceholder":17},[597,7469,7470],{"class":599,"line":6578},[597,7471,7390],{},[597,7473,7474],{"class":599,"line":6583},[597,7475,3380],{},[597,7477,7478],{"class":599,"line":6589},[597,7479,7399],{},[597,7481,7482],{"class":599,"line":6594},[597,7483,7484],{},"        Context = new AIChatAgentInfo(MarketingName),\n",[597,7486,7487],{"class":599,"line":6600},[597,7488,7409],{},[597,7490,7492],{"class":599,"line":7491},54,[597,7493,7414],{},[597,7495,7497],{"class":599,"line":7496},55,[597,7498,637],{},[33,7500,7501,7502,830,7505,7508],{},"Now, we initialize and configure the ",[594,7503,7504],{},"Writer",[594,7506,7507],{},"Editor"," agents. Look at the code:",[588,7510,7512],{"className":590,"code":7511,"language":592,"meta":11,"style":11},"// Initialize the Writer Agent with its specific prompt configuration\nChatCompletionAgent writerAgent = new(ReadFileForPromptTemplateConfig(\"./Agents/Prompts/writer.yaml\"))\n{\n    Name = WriterName,\n    Kernel = kernel, /\n    Arguments = new Dictionary\u003Cstring, string>(),\n    LoggerFactory = kernel.LoggerFactory\n};\n\n// Initialize the Editor Agent with its specific prompt configuration\nChatCompletionAgent editorAgent = new(ReadFileForPromptTemplateConfig(\"./Agents/Prompts/editor.yaml\"))\n{\n    Name = EditorName,\n    Kernel = kernel,\n    LoggerFactory = kernel.LoggerFactory\n};\n\n// Populate the Writer Agent with contextual data required for generating content, gathered from the User, Researcher and Marketing Agents\nwriterAgent.Arguments[\"research_context\"] = createWriterRequest.Research;\nwriterAgent.Arguments[\"research_results\"] = sbResearchResults.ToString();\nwriterAgent.Arguments[\"product_context\"] = createWriterRequest.Products;\nwriterAgent.Arguments[\"product_results\"] = sbProductResults.ToString();\nwriterAgent.Arguments[\"assignment\"] = createWriterRequest.Writing;\n\n// Configure the Agent Group Chat to manage interactions between Writer and Editor\nAgentGroupChat chat = new(writerAgent, editorAgent)\n{\n    LoggerFactory = kernel.LoggerFactory,\n    ExecutionSettings = new AgentGroupChatSettings\n    {\n        // Define the strategy for selecting which agent interacts next\n        SelectionStrategy = new SequentialSelectionStrategy()\n        {\n            InitialAgent = writerAgent // Start the conversation with the Writer Agent\n        },\n        // Define the termination condition for the agent interactions, in this case, the Editor Agent will terminate the conversation\n        TerminationStrategy = new NoFeedbackLeftTerminationStrategy()\n    }\n};\n",[594,7513,7514,7519,7524,7528,7533,7538,7543,7548,7552,7556,7561,7566,7570,7575,7580,7584,7588,7592,7597,7602,7607,7612,7617,7622,7626,7631,7636,7640,7645,7650,7654,7659,7664,7668,7673,7678,7683,7688,7692],{"__ignoreMap":11},[597,7515,7516],{"class":599,"line":600},[597,7517,7518],{},"// Initialize the Writer Agent with its specific prompt configuration\n",[597,7520,7521],{"class":599,"line":12},[597,7522,7523],{},"ChatCompletionAgent writerAgent = new(ReadFileForPromptTemplateConfig(\"./Agents/Prompts/writer.yaml\"))\n",[597,7525,7526],{"class":599,"line":109},[597,7527,608],{},[597,7529,7530],{"class":599,"line":616},[597,7531,7532],{},"    Name = WriterName,\n",[597,7534,7535],{"class":599,"line":622},[597,7536,7537],{},"    Kernel = kernel, /\n",[597,7539,7540],{"class":599,"line":628},[597,7541,7542],{},"    Arguments = new Dictionary\u003Cstring, string>(),\n",[597,7544,7545],{"class":599,"line":634},[597,7546,7547],{},"    LoggerFactory = kernel.LoggerFactory\n",[597,7549,7550],{"class":599,"line":3147},[597,7551,3588],{},[597,7553,7554],{"class":599,"line":3153},[597,7555,1485],{"emptyLinePlaceholder":17},[597,7557,7558],{"class":599,"line":3159},[597,7559,7560],{},"// Initialize the Editor Agent with its specific prompt configuration\n",[597,7562,7563],{"class":599,"line":3165},[597,7564,7565],{},"ChatCompletionAgent editorAgent = new(ReadFileForPromptTemplateConfig(\"./Agents/Prompts/editor.yaml\"))\n",[597,7567,7568],{"class":599,"line":3171},[597,7569,608],{},[597,7571,7572],{"class":599,"line":3177},[597,7573,7574],{},"    Name = EditorName,\n",[597,7576,7577],{"class":599,"line":3183},[597,7578,7579],{},"    Kernel = kernel,\n",[597,7581,7582],{"class":599,"line":3189},[597,7583,7547],{},[597,7585,7586],{"class":599,"line":3194},[597,7587,3588],{},[597,7589,7590],{"class":599,"line":3200},[597,7591,1485],{"emptyLinePlaceholder":17},[597,7593,7594],{"class":599,"line":3206},[597,7595,7596],{},"// Populate the Writer Agent with contextual data required for generating content, gathered from the User, Researcher and Marketing Agents\n",[597,7598,7599],{"class":599,"line":3211},[597,7600,7601],{},"writerAgent.Arguments[\"research_context\"] = createWriterRequest.Research;\n",[597,7603,7604],{"class":599,"line":3217},[597,7605,7606],{},"writerAgent.Arguments[\"research_results\"] = sbResearchResults.ToString();\n",[597,7608,7609],{"class":599,"line":3397},[597,7610,7611],{},"writerAgent.Arguments[\"product_context\"] = createWriterRequest.Products;\n",[597,7613,7614],{"class":599,"line":3403},[597,7615,7616],{},"writerAgent.Arguments[\"product_results\"] = sbProductResults.ToString();\n",[597,7618,7619],{"class":599,"line":3408},[597,7620,7621],{},"writerAgent.Arguments[\"assignment\"] = createWriterRequest.Writing;\n",[597,7623,7624],{"class":599,"line":3414},[597,7625,1485],{"emptyLinePlaceholder":17},[597,7627,7628],{"class":599,"line":3420},[597,7629,7630],{},"// Configure the Agent Group Chat to manage interactions between Writer and Editor\n",[597,7632,7633],{"class":599,"line":3426},[597,7634,7635],{},"AgentGroupChat chat = new(writerAgent, editorAgent)\n",[597,7637,7638],{"class":599,"line":3431},[597,7639,608],{},[597,7641,7642],{"class":599,"line":3437},[597,7643,7644],{},"    LoggerFactory = kernel.LoggerFactory,\n",[597,7646,7647],{"class":599,"line":6464},[597,7648,7649],{},"    ExecutionSettings = new AgentGroupChatSettings\n",[597,7651,7652],{"class":599,"line":6469},[597,7653,3380],{},[597,7655,7656],{"class":599,"line":6475},[597,7657,7658],{},"        // Define the strategy for selecting which agent interacts next\n",[597,7660,7661],{"class":599,"line":6481},[597,7662,7663],{},"        SelectionStrategy = new SequentialSelectionStrategy()\n",[597,7665,7666],{"class":599,"line":6487},[597,7667,6417],{},[597,7669,7670],{"class":599,"line":6493},[597,7671,7672],{},"            InitialAgent = writerAgent // Start the conversation with the Writer Agent\n",[597,7674,7675],{"class":599,"line":6498},[597,7676,7677],{},"        },\n",[597,7679,7680],{"class":599,"line":6503},[597,7681,7682],{},"        // Define the termination condition for the agent interactions, in this case, the Editor Agent will terminate the conversation\n",[597,7684,7685],{"class":599,"line":6509},[597,7686,7687],{},"        TerminationStrategy = new NoFeedbackLeftTerminationStrategy()\n",[597,7689,7690],{"class":599,"line":6515},[597,7691,3390],{},[597,7693,7694],{"class":599,"line":6521},[597,7695,3588],{},[33,7697,7698],{},"In .NET Aspire, we notice how the components are orchestrated to create a seamless experience for the user. The tracing feature allows us to monitor the interactions between the agents, and the telemetry feature provides insights into the user's behavior and the performance of the AI models.",[33,7700,7701],{},[145,7702],{"alt":6616,"src":7703},"content/generative-ai/images/aspire-tracing-creative-writer.png",[33,7705,7706],{},[145,7707],{"alt":7708,"src":7709},"Image demonstrating the .NET Aspire telemetry capabilities","content/generative-ai/images/aspire-telemetry-creative-writer.png",[505,7711,7712],{},[33,7713,3096,7714,5880,7716],{},[391,7715,5879],{},[356,7717,7002],{"href":7000,"rel":7718},[360],[135,7720,1509],{"id":1508},[33,7722,7723],{},"Those are just a few examples of how you can use GenAI in your applications. The possibilities are endless, and the technology is evolving rapidly, look at some of our resources to learn more about GenAI and how you can use it in your projects.",[505,7725,7726],{},[33,7727,1495,7728,7730],{},[391,7729,1498],{},": If you encounter any issues, open an issue in the repository.",[28,7732,3676],{"id":1515},[150,7734,7735,7742],{},[153,7736,7737],{},[356,7738,7741],{"href":7739,"rel":7740},"https://aka.ms/netaieshoplitedeepseekr1",[360],"eShopLite with DeepSeek",[153,7743,7744],{},[356,7745,6669],{"href":6667,"rel":7746},[360],[28,7748,5806],{"id":773},[33,7750,7751],{},"Learn about responsible AI practices and how to ensure that your AI models are ethical and have a positive impact!",[33,7753,783,7754],{},[356,7755,7757],{"href":7756},"./responsible-gen-ai","Responsible AI",[789,7759,791],{},{"title":11,"searchDepth":12,"depth":12,"links":7761},[7762,7763,7764,7769,7770],{"id":5724,"depth":12,"text":5725},{"id":5736,"depth":12,"text":5737},{"id":5823,"depth":12,"text":5762,"children":7765},[7766,7767,7768],{"id":5851,"depth":109,"text":5852},{"id":6649,"depth":109,"text":6650},{"id":6672,"depth":109,"text":5782},{"id":6974,"depth":12,"text":5788},{"id":1508,"depth":12,"text":1509,"children":7771},[7772,7773],{"id":1515,"depth":109,"text":3676},{"id":773,"depth":109,"text":5806},{},"/programming-language/platform/dotnet/generative-ai-for-beginners-dotnet/practical-samples",{"title":5711,"description":5719},"Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/4.Practical-Samples/index","h-jJPbHakB-DOf-IDA1wH24wWeCmKtGd00u1KmCaPkI",{"id":7780,"title":7781,"body":7782,"description":7789,"extension":14,"meta":8072,"navigation":17,"path":8073,"seo":8074,"stem":8075,"__hash__":8076},"content/Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/5.Responsible-Gen-AI/index.md","Responsible use of GenAI",{"type":8,"value":7783,"toc":8057},[7784,7787,7790,7792,7796,7799,7831,7839,7843,7846,7896,7900,7903,7907,7919,7923,7928,7932,7941,7945,7955,7959,7964,7968,7979,7983,7994,7998,8001,8007,8009,8012],[343,7785,7781],{"id":7786},"responsible-use-of-genai",[33,7788,7789],{},"Generative AI offers powerful capabilities, but it is crucial to ensure these implementations are ethical, unbiased, and secure. This lesson explores how to incorporate responsible AI principles into .NET applications effectively.",[351,7791],{},[135,7793,7795],{"id":7794},"responsible-ai-principles","Responsible AI principles",[33,7797,7798],{},"When developing generative AI solutions, adhere to the following principles:",[1053,7800,7801,7807,7813,7819,7825],{},[153,7802,7803,7806],{},[391,7804,7805],{},"Fairness",": Ensure AI models treat all users equally and avoid biases.",[153,7808,7809,7812],{},[391,7810,7811],{},"Inclusivity",": Design AI systems to accommodate diverse user groups and scenarios.",[153,7814,7815,7818],{},[391,7816,7817],{},"Transparency",": Clearly communicate when users are interacting with AI and how their data is utilized.",[153,7820,7821,7824],{},[391,7822,7823],{},"Accountability",": Take responsibility for the outcomes of your AI systems and continuously monitor them.",[153,7826,7827,7830],{},[391,7828,7829],{},"Security and Privacy",": Protect user data through robust security measures and compliance.",[33,7832,7833,7834,405],{},"For more detailed information diving into each of those principles, check out this ",[356,7835,7838],{"href":7836,"rel":7837},"https://github.com/microsoft/generative-ai-for-beginners/tree/main/03-using-generative-ai-responsibly",[360],"Using Generative AI Responsibly lesson",[135,7840,7842],{"id":7841},"why-should-you-prioritize-responsible-ai","Why should you prioritize responsible AI?",[33,7844,7845],{},"Prioritizing responsible AI practices ensures trust, compliance, and better outcomes. Here are key reasons:",[150,7847,7848,7854,7866,7878,7884,7890],{},[153,7849,7850,7853],{},[391,7851,7852],{},"Hallucinations",": Generative AI systems can produce outputs that are factually incorrect or contextually irrelevant, known as hallucinations. These inaccuracies can undermine user trust and application reliability. Developers should use validation techniques, knowledge-grounding methods, and content constraints to address this challenge.",[153,7855,7856,7859,7860,7865],{},[391,7857,7858],{},"Harmful Content",": AI models may unintentionally generate offensive, biased, or inappropriate outputs. Without proper moderation, such content can harm users and tarnish reputations. Tools like ",[356,7861,7864],{"href":7862,"rel":7863},"https://azure.microsoft.com/products/ai-services/ai-content-safety/",[360],"Azure AI Content Safety"," are essential for filtering and mitigating harmful outputs effectively.",[153,7867,7868,7871,7872,7877],{},[391,7869,7870],{},"Lack of Fairness",": Generative AI can amplify biases present in training data, leading to unequal treatment of individuals or groups. Addressing this requires careful auditing of data, fairness evaluations with tools like ",[356,7873,7876],{"href":7874,"rel":7875},"https://fairlearn.org/",[360],"Fairlearn",", and ongoing monitoring to ensure equitable outcomes.",[153,7879,7880,7883],{},[391,7881,7882],{},"Legal Compliance",": Meet regulatory requirements such as GDPR and mitigate legal risks.",[153,7885,7886,7889],{},[391,7887,7888],{},"Reputation Management",": Maintain trust by avoiding ethical pitfalls and ensuring fair use.",[153,7891,7892,7895],{},[391,7893,7894],{},"Business Benefits",": Ethical AI fosters user trust, enhancing user retention and adoption.",[135,7897,7899],{"id":7898},"how-to-use-generative-ai-responsibly","How to use generative AI responsibly",[33,7901,7902],{},"Follow these steps to ensure your generative AI solutions in .NET are responsibly implemented:",[28,7904,7906],{"id":7905},"audit-your-data-sources","Audit Your Data Sources",[150,7908,7909,7912],{},[153,7910,7911],{},"Review and refine training data to avoid biases and inaccuracies.",[153,7913,7914,7915,7918],{},"Example: Use tools like ",[356,7916,7876],{"href":7874,"rel":7917},[360]," to assess fairness.",[28,7920,7922],{"id":7921},"implement-feedback-mechanisms","Implement Feedback Mechanisms",[150,7924,7925],{},[153,7926,7927],{},"Allow users to flag issues or provide corrections for model outputs.",[28,7929,7931],{"id":7930},"integrate-content-moderation","Integrate Content Moderation",[150,7933,7934],{},[153,7935,7936,7937,7940],{},"Utilize tools like ",[356,7938,7864],{"href":7862,"rel":7939},[360]," to filter inappropriate content.",[28,7942,7944],{"id":7943},"secure-your-models","Secure Your Models",[150,7946,7947],{},[153,7948,7949,7950,405],{},"Encrypt sensitive data and enforce authentication using libraries like ",[356,7951,7954],{"href":7952,"rel":7953},"https://github.com/AzureAD/microsoft-identity-web",[360],"Microsoft.Identity.Web",[28,7956,7958],{"id":7957},"test-for-edge-cases","Test for Edge Cases",[150,7960,7961],{},[153,7962,7963],{},"Simulate diverse scenarios, including adversarial and unusual inputs, to ensure robustness.",[28,7965,7967],{"id":7966},"ethical-considerations","Ethical Considerations",[150,7969,7970,7973,7976],{},[153,7971,7972],{},"Ensure transparency by informing users when they are interacting with AI.",[153,7974,7975],{},"Regularly update models to reflect ethical standards and societal norms.",[153,7977,7978],{},"Engage with diverse stakeholders to understand the broader impact of AI systems.",[28,7980,7982],{"id":7981},"continuous-monitoring","Continuous Monitoring",[150,7984,7985,7988,7991],{},[153,7986,7987],{},"Implement ongoing monitoring to detect and mitigate biases and inaccuracies.",[153,7989,7990],{},"Use automated tools to continuously evaluate the performance and fairness of AI models.",[153,7992,7993],{},"Regularly review user feedback and make necessary adjustments to improve the system.",[135,7995,7997],{"id":7996},"conclusions-and-resources","Conclusions and resources",[33,7999,8000],{},"Responsibly implementing generative AI in .NET applications is essential for ensuring ethical, secure, and unbiased outcomes. By adhering to fairness, inclusivity, transparency, accountability, and security principles, developers can build trustworthy AI systems that benefit users and society.",[505,8002,8003],{},[33,8004,1495,8005,7730],{},[391,8006,1498],{},[135,8008,1516],{"id":1515},[33,8010,8011],{},"Leverage the following tools to implement responsible AI practices:",[150,8013,8014,8020,8027,8033,8041,8049],{},[153,8015,8016,8019],{},[356,8017,7876],{"href":7874,"rel":8018},[360],": Evaluate and address fairness issues.",[153,8021,8022],{},[356,8023,8026],{"href":8024,"rel":8025},"https://techcommunity.microsoft.com/blog/educatordeveloperblog/fairlearn---a-python-package-to-assess-ai-systems-fairness/1402950",[360],"Fairlearn - A Python package to assess AI system's fairness",[153,8028,8029,8032],{},[356,8030,7864],{"href":7862,"rel":8031},[360],": Moderate content effectively.",[153,8034,8035,8040],{},[356,8036,8039],{"href":8037,"rel":8038},"https://azure.microsoft.com/products/cognitive-services/",[360],"Azure AI Services",": Build ethical AI solutions.",[153,8042,8043,8048],{},[356,8044,8047],{"href":8045,"rel":8046},"https://learn.microsoft.com/training/modules/embrace-responsible-ai-principles-practices/",[360],"Microsoft Learn - Responsible AI",": Explore responsible AI practices.",[153,8050,8051,8056],{},[356,8052,8055],{"href":8053,"rel":8054},"https://www.microsoft.com/ai/responsible-ai",[360],"Microsoft Responsible AI",": Learn how Microsoft does responsible AI practices.",{"title":11,"searchDepth":12,"depth":12,"links":8058},[8059,8060,8061,8070,8071],{"id":7794,"depth":12,"text":7795},{"id":7841,"depth":12,"text":7842},{"id":7898,"depth":12,"text":7899,"children":8062},[8063,8064,8065,8066,8067,8068,8069],{"id":7905,"depth":109,"text":7906},{"id":7921,"depth":109,"text":7922},{"id":7930,"depth":109,"text":7931},{"id":7943,"depth":109,"text":7944},{"id":7957,"depth":109,"text":7958},{"id":7966,"depth":109,"text":7967},{"id":7981,"depth":109,"text":7982},{"id":7996,"depth":12,"text":7997},{"id":1515,"depth":12,"text":1516},{},"/programming-language/platform/dotnet/generative-ai-for-beginners-dotnet/responsible-gen-ai",{"title":7781,"description":7789},"Programming Language/Platform/DotNet/Generative-AI-for-beginners-dotnet/5.Responsible-Gen-AI/index","KQ8odxiDmFBFPfJJrrRr4lxJcudgu1deaiK7Dev6OIU",{"id":8078,"title":8079,"body":8080,"description":11,"extension":14,"meta":8084,"navigation":17,"path":8085,"seo":8086,"stem":8087,"__hash__":8088},"content/Programming Language/languages like (C, C++ , Python, ...)/C/index.md","Programming Languages",{"type":8,"value":8081,"toc":8082},[],{"title":11,"searchDepth":12,"depth":12,"links":8083},[],{"author":16},"/programming-language/languages-like-(c-c++-python-...)/c",{"title":8079,"description":11},"Programming Language/languages like (C, C++ , Python, ...)/C/index","ELeUm8vqD_1pNOjdZYTzxUFgQ1MqtZ8UoNP7T0_SoAs",{"id":8090,"title":8091,"body":8092,"description":11,"extension":14,"meta":8096,"navigation":17,"path":8097,"seo":8098,"stem":8099,"__hash__":8100},"content/Software Engineering/CI-CD/index.md","CI/CD",{"type":8,"value":8093,"toc":8094},[],{"title":11,"searchDepth":12,"depth":12,"links":8095},[],{"author":16},"/software-engineering/ci-cd",{"title":8091,"description":11},"Software Engineering/CI-CD/index","9Mniv1c28qlq6I8d8BeRH6WswT0L241ByQa552jjJU8",{"id":8102,"title":8103,"body":8104,"description":11,"extension":14,"meta":8108,"navigation":17,"path":8109,"seo":8110,"stem":8111,"__hash__":8112},"content/Software Engineering/Design Patterns/index.md","Design Pattern",{"type":8,"value":8105,"toc":8106},[],{"title":11,"searchDepth":12,"depth":12,"links":8107},[],{"author":16},"/software-engineering/design-patterns",{"title":8103,"description":11},"Software Engineering/Design Patterns/index","RNhiaU4YYwX4W-Mkjnd1J4rQz_lYO9GWZ-TO7RxtpV4",{"id":8114,"title":8115,"body":8116,"description":11,"extension":14,"meta":8120,"navigation":17,"path":8121,"seo":8122,"stem":8123,"__hash__":8124},"content/Software Engineering/VSC/index.md","VSC",{"type":8,"value":8117,"toc":8118},[],{"title":11,"searchDepth":12,"depth":12,"links":8119},[],{"author":16},"/software-engineering/vsc",{"title":8115,"description":11},"Software Engineering/VSC/index","7Za5T7vMZ5hLBnb16HVnKjUXx0rbtvKkNgdHcwtZp-U",{"id":8126,"title":8127,"body":8128,"description":11,"extension":14,"meta":8132,"navigation":17,"path":8133,"seo":8134,"stem":8135,"__hash__":8136},"content/Software Engineering/methodologies/index.md","Development Methodologies",{"type":8,"value":8129,"toc":8130},[],{"title":11,"searchDepth":12,"depth":12,"links":8131},[],{"author":16},"/software-engineering/methodologies",{"title":8127,"description":11},"Software Engineering/methodologies/index","WhUs6FyLafhOiYF8TWlfhMFnTvDqv9lJQ2pZp5xUhcE",{"id":8138,"title":8139,"body":8140,"description":11,"extension":14,"meta":8144,"navigation":17,"path":8145,"seo":8146,"stem":8147,"__hash__":8148},"content/Software Testing/Unit/index.md","Software Testing - Unit",{"type":8,"value":8141,"toc":8142},[],{"title":11,"searchDepth":12,"depth":12,"links":8143},[],{"author":16},"/software-testing/unit",{"title":8139,"description":11},"Software Testing/Unit/index","aZ-0Ui7BLj_gWJ0m4omcNHxvE0bPVm9A5HEq-I9jmLo",{"id":8150,"title":8151,"body":8152,"description":11,"extension":14,"meta":8156,"navigation":17,"path":8157,"seo":8158,"stem":8159,"__hash__":8160},"content/Software Testing/e2e/index.md","Software Testing - e2e",{"type":8,"value":8153,"toc":8154},[],{"title":11,"searchDepth":12,"depth":12,"links":8155},[],{"author":16},"/software-testing/e2e",{"title":8151,"description":11},"Software Testing/e2e/index","4hkmCm5Dv79kIouCoeyRU3TRlNQ-IGzc3LQuH5sppDI",{"id":8162,"title":8163,"body":8164,"description":11,"extension":14,"meta":8168,"navigation":17,"path":8169,"seo":8170,"stem":8171,"__hash__":8172},"content/Software Testing/integration/index.md","Software Testing - Integration",{"type":8,"value":8165,"toc":8166},[],{"title":11,"searchDepth":12,"depth":12,"links":8167},[],{"author":16},"/software-testing/integration",{"title":8163,"description":11},"Software Testing/integration/index","gidAVqmdYo-GYr3YgDa2zBC_6fmUYH9yOnXh38sUsXY",{"id":8174,"title":8175,"body":8176,"description":11,"extension":14,"meta":8180,"navigation":17,"path":8181,"seo":8182,"stem":8183,"__hash__":8184},"content/Web Development/Back-end/DotNet/index.md","Back-end Development",{"type":8,"value":8177,"toc":8178},[],{"title":11,"searchDepth":12,"depth":12,"links":8179},[],{"author":16},"/web-development/back-end/dotnet",{"title":8175,"description":11},"Web Development/Back-end/DotNet/index","_fRYCVI04wfj8_qbXSlVZbrq-9Ir_vTRvweD2hUmDEQ",{"id":8186,"title":8187,"body":8188,"description":11,"extension":14,"meta":8192,"navigation":17,"path":8193,"seo":8194,"stem":8195,"__hash__":8196},"content/Web Development/Front-end/index.md","front-end Development",{"type":8,"value":8189,"toc":8190},[],{"title":11,"searchDepth":12,"depth":12,"links":8191},[],{"author":16},"/web-development/front-end",{"title":8187,"description":11},"Web Development/Front-end/index","YiCJmfS4LRdcQk45j521WkHHTElR3ZJdvl9EgDZU8mE",{"id":8198,"title":8199,"body":8200,"description":8213,"extension":14,"meta":8214,"navigation":17,"path":8219,"seo":8220,"stem":8221,"__hash__":8222},"content/Web Development/Front-end/web-components.md","Web Components",{"type":8,"value":8201,"toc":8211},[8202,8205],[33,8203,8204],{},"Embark on a journey through the world of web components in this comprehensive course!\nDiscover the essence of web components, how they came to be, and the reasons why major tech companies are adopting them over traditional JavaScript libraries. Following this introduction, we'll delve into the technical aspects, exploring the building blocks of web components and guiding you through creating your own.\nFor those ready to leap into this exciting topic, your adventure starts here:",[33,8206,8207],{},[356,8208,8210],{"href":8209},"/web-components/what-are-web-components","1. What are Web Components?",{"title":11,"searchDepth":12,"depth":12,"links":8212},[],"Embark on a journey through the world of web components in this comprehensive course!\\nDiscover the essence of web components, how they came to be, and the reasons why major tech companies are adopting them over traditional JavaScript libraries. Following this introduction, we'll delve into the technical aspects, exploring the building blocks of web components and guiding you through creating your own.\\nFor those ready to leap into this exciting topic, your adventure starts here:",{"author":8215,"github":8216,"tags":8217,"cover":8218},"Masoud Alemi","iMasoud","web components, front end","/media/web-components/images/web-components.png","/web-development/front-end/web-components",{"title":8199,"description":8204},"Web Development/Front-end/web-components","6WBr7nu8OVdR0EOFyWkMsBxG03kkSyksJo_7kOAcaZQ",{"id":8224,"title":8225,"body":8226,"description":8230,"extension":14,"meta":8314,"navigation":17,"path":8320,"seo":8321,"stem":8322,"__hash__":8323},"content/Web Development/Front-end/web-components/what-are-web-components.md","What are Web Components?",{"type":8,"value":8227,"toc":8307},[8228,8231,8235,8238,8241,8244,8248,8251,8254,8258,8261,8264,8267,8270,8274,8277,8284,8287,8290,8293,8295,8298,8301],[33,8229,8230],{},"In 2004, Gmail revolutionized the web platform — could we be on the cusp of another major shift? Perhaps this time, it will be a transformation that relies on less JavaScript.",[28,8232,8234],{"id":8233},"the-dawn-of-ajax-and-spas","The Dawn of AJAX and SPAs",[33,8236,8237],{},"The landscape of web development has undergone a seismic shift over the past two decades, with the advent of AJAX and the proliferation of JavaScript libraries fundamentally altering how we interact with the web. This evolution can be traced back to a pivotal moment in the early 2000s when Gmail was launched, utilizing AJAX to create a user experience that felt more like an application than a series of web pages. This marked the beginning of what we now refer to as Single Page Applications (SPAs).",[33,8239,8240],{},"Gmail's use of AJAX was revolutionary for its time. It allowed for asynchronous data fetching, which meant that the page didn't need to be reloaded to update the content. This approach not only improved the user experience by making it smoother and more responsive but also set the stage for the web 2.0 era, characterized by interactive, dynamic websites that could update content without a full page reload.",[33,8242,8243],{},"The success of Gmail's AJAX-based interface inspired a wave of innovation, leading to the development of more complex web applications. Developers began to see the potential of AJAX to create rich, engaging user experiences, and this led to the widespread adoption of the technology in web development.",[28,8245,8247],{"id":8246},"the-rise-of-javascript-libraries","The Rise of JavaScript Libraries",[33,8249,8250],{},"As web applications grew in complexity, so did the need for more structured ways to manage the growing codebase. This need gave rise to JavaScript libraries like React, which provided developers with a way to build user interfaces using reusable components. React, in particular, gained popularity for its virtual DOM, which optimized performance by minimizing the number of updates to the actual DOM.",[33,8252,8253],{},"The introduction of these libraries marked a significant shift in web development practices. They allowed for the creation of SPAs that were not only fast and efficient but also easier to maintain and scale. The component-based architecture of libraries like React made code more reusable and paved the way for a more modular approach to building web applications.",[28,8255,8257],{"id":8256},"the-case-for-web-components","The Case for Web Components",[33,8259,8260],{},"Despite the advantages of SPAs and JavaScript libraries, there are inherent issues that developers face, such as SEO challenges, increased complexity, and potential performance bottlenecks. These challenges have led some in the developer community to advocate for a \"correction of course\" towards web components.",[33,8262,8263],{},"Web components offer a standards-based way of creating reusable custom elements, independent of any specific JavaScript library or framework. They provide encapsulation and interoperability, which means they can work across different browsers and can be used with any JavaScript library or even with vanilla JavaScript.",[33,8265,8266],{},"One of the key advantages of web components over SPAs based on JavaScript libraries is their lightweight nature. Without the overhead of a framework, web components can lead to faster load times and better performance, especially on mobile devices where resources are more limited.",[33,8268,8269],{},"Moreover, web components align closely with the web's native APIs, making them a more future-proof choice as they are less likely to be affected by the shifting trends in JavaScript library popularity. They also offer better compatibility with SEO practices, as they can be rendered server-side, and their content is more easily indexed by search engines.",[28,8271,8273],{"id":8272},"adoption-by-industry-leaders","Adoption by Industry Leaders",[33,8275,8276],{},"Perhaps GitHub and Microsoft have been at the forefront of embracing Web Components to enhance performance and user experience. Microsoft's FAST initiative showcases this commitment.",[33,8278,8279,8280,8283],{},"The FAST library offers a lightweight solution for building performant, memory-efficient, and standards-compliant Web Components that function seamlessly across all major browsers. It enables developers to create reusable UI components with ",[594,8281,8282],{},"@microsoft/fast-element",", which can be integrated with any library or framework, or even used without one.",[33,8285,8286],{},"The 2023 State of Web Components report highlights the adoption of these standards by industry giants, including Microsoft, which has seen a 30%-50% performance improvement in MSN by switching from React to Fluent UI Web Components based on FAST. This is a testament to the potential of Web Components in delivering faster and more responsive web applications.",[33,8288,8289],{},"GitHub's adoption of Web Components, as detailed in their blog, further underscores the advantages they offer over traditional JavaScript behaviors. The encapsulation and portability provided by Web Components have allowed GitHub to create more modular and maintainable codebases.",[33,8291,8292],{},"For developers accustomed to libraries like React or Vue.js, the transition to Web Components may seem daunting. However, the benefits of improved performance, standards compliance, and the ability to work across different frameworks make it a worthwhile endeavor. As the web continues to mature, the shift towards Web Components is a clear step towards a more open and interoperable web ecosystem.",[28,8294,767],{"id":766},[33,8296,8297],{},"The journey from Gmail's AJAX-based interface to the modern landscape of web components highlights the constant evolution of web development. While SPAs and JavaScript libraries like React have played a crucial role in shaping the web, the move towards web components represents a shift towards more sustainable, standards-based development practices.",[33,8299,8300],{},"For developers, the choice between continuing with SPAs or embracing web components will depend on the specific needs of their projects. However, the trend towards web components suggests a growing recognition of the need for more lightweight, interoperable, and maintainable approaches to building web applications.",[33,8302,8303,8306],{},[391,8304,8305],{},"Intrigued? Then stay tuned!"," As in the next chapter we'll dive into the building blocks of a web component that empowers you to build your own!",{"title":11,"searchDepth":12,"depth":12,"links":8308},[8309,8310,8311,8312,8313],{"id":8233,"depth":109,"text":8234},{"id":8246,"depth":109,"text":8247},{"id":8256,"depth":109,"text":8257},{"id":8272,"depth":109,"text":8273},{"id":766,"depth":109,"text":767},{"author":8215,"github":8216,"tags":8217,"cover":8315,"excerpt":8316},"/media/web-components/images/what-are-web-components.jpeg",{"type":8,"value":8317},[8318],[33,8319,8230],{},"/web-development/front-end/web-components/what-are-web-components",{"title":8225,"description":8230},"Web Development/Front-end/web-components/what-are-web-components","chuOoSmdAmSqhpBc7I2Fq82cGeLgpnPqe7jf2Ct02Sw",{"id":8325,"title":8326,"body":8327,"description":8910,"extension":14,"meta":8911,"navigation":17,"path":8915,"seo":8916,"stem":8917,"__hash__":8918},"content/contribution-guide/index.md","Markdown Guide for Contributors",{"type":8,"value":8328,"toc":8887},[8329,8333,8336,8340,8421,8425,8465,8469,8479,8482,8513,8517,8527,8547,8551,8563,8578,8589,8604,8613,8622,8626,8630,8643,8668,8672,8675,8700,8704,8714,8723,8727,8733,8742,8744,8753,8757,8760,8764,8777,8802,8806,8810,8816,8848,8852,8855,8881,8884],[135,8330,8332],{"id":8331},"_1-front-matter","1. Front-matter",[33,8334,8335],{},"Every Markdown file should start with a Front-matter section. This is where you provide meta-data for the page, such as the title, description, tags, author name, and GitHub username. The Front-matter is written in YAML syntax with key-value pairs.",[28,8337,8339],{"id":8338},"example","Example:",[588,8341,8345],{"className":8342,"code":8343,"language":8344,"meta":11,"style":11},"language-yaml shiki shiki-themes github-light github-dark","---\ntitle: Your Article Title\ndescription: A brief description of your article\ntags: [\"tag1\", \"tag2\"]\nauthor: Your Name\ngithub: YourGitHubUsername\n---\nMarkdown content here...\n","yaml",[594,8346,8347,8352,8363,8373,8392,8402,8412,8416],{"__ignoreMap":11},[597,8348,8349],{"class":599,"line":600},[597,8350,8351],{"class":972},"---\n",[597,8353,8354,8358,8360],{"class":599,"line":12},[597,8355,8357],{"class":8356},"s9eBZ","title",[597,8359,3100],{"class":2101},[597,8361,8362],{"class":976},"Your Article Title\n",[597,8364,8365,8368,8370],{"class":599,"line":109},[597,8366,8367],{"class":8356},"description",[597,8369,3100],{"class":2101},[597,8371,8372],{"class":976},"A brief description of your article\n",[597,8374,8375,8378,8381,8384,8386,8389],{"class":599,"line":616},[597,8376,8377],{"class":8356},"tags",[597,8379,8380],{"class":2101},": [",[597,8382,8383],{"class":976},"\"tag1\"",[597,8385,529],{"class":2101},[597,8387,8388],{"class":976},"\"tag2\"",[597,8390,8391],{"class":2101},"]\n",[597,8393,8394,8397,8399],{"class":599,"line":622},[597,8395,8396],{"class":8356},"author",[597,8398,3100],{"class":2101},[597,8400,8401],{"class":976},"Your Name\n",[597,8403,8404,8407,8409],{"class":599,"line":628},[597,8405,8406],{"class":8356},"github",[597,8408,3100],{"class":2101},[597,8410,8411],{"class":976},"YourGitHubUsername\n",[597,8413,8414],{"class":599,"line":634},[597,8415,8351],{"class":972},[597,8417,8418],{"class":599,"line":3147},[597,8419,8420],{"class":976},"Markdown content here...\n",[28,8422,8424],{"id":8423},"keys-explained","Keys Explained:",[150,8426,8427,8432,8437,8442,8447,8452],{},[153,8428,8429,8431],{},[391,8430,8357],{},": The title of your article.",[153,8433,8434,8436],{},[391,8435,8367],{},": A brief description of your article.",[153,8438,8439,8441],{},[391,8440,8377],{},": A string array of the tags relevant to the content.",[153,8443,8444,8446],{},[391,8445,8396],{},": The name of the author of the content.",[153,8448,8449,8451],{},[391,8450,8406],{},": The GitHub username of author of the content.",[153,8453,8454,8457,8458,8461,8462,405],{},[391,8455,8456],{},"cover",": Path of an image you've placed in a subdirectory within ",[594,8459,8460],{},"public/media"," of the repository to use as article cover photo. It should start with ",[594,8463,8464],{},"/media/",[135,8466,8468],{"id":8467},"_2-content-excerpt","2. Content Excerpt",[33,8470,8471,8472,8475,8476,8478],{},"It's recommended that your content includes an excerpt or summary. You can create an excerpt by using ",[594,8473,8474],{},"\u003C!--more-->"," as a divider within your content. The excerpt is the content before the ",[594,8477,8474],{}," tag.",[28,8480,8339],{"id":8481},"example-1",[588,8483,8487],{"className":8484,"code":8485,"language":8486,"meta":11,"style":11},"language-markdown shiki shiki-themes github-light github-dark","# Your Article Title\n\nLearn how to use...\n\u003C!--more-->\nFull amount of content beyond the more divider.\n","markdown",[594,8488,8489,8494,8498,8503,8508],{"__ignoreMap":11},[597,8490,8491],{"class":599,"line":600},[597,8492,8493],{},"# Your Article Title\n",[597,8495,8496],{"class":599,"line":12},[597,8497,1485],{"emptyLinePlaceholder":17},[597,8499,8500],{"class":599,"line":109},[597,8501,8502],{},"Learn how to use...\n",[597,8504,8505],{"class":599,"line":616},[597,8506,8507],{},"\u003C!--more-->\n",[597,8509,8510],{"class":599,"line":622},[597,8511,8512],{},"Full amount of content beyond the more divider.\n",[135,8514,8516],{"id":8515},"_3-headings","3. Headings",[33,8518,8519,8520,8523,8524,8526],{},"Use the ",[594,8521,8522],{},"#"," symbol to create headings. The number of ",[594,8525,8522],{}," symbols represents the heading level.",[588,8528,8530],{"className":8484,"code":8529,"language":8486,"meta":11,"style":11},"# Heading 1\n## Heading 2\n### Heading 3\n",[594,8531,8532,8537,8542],{"__ignoreMap":11},[597,8533,8534],{"class":599,"line":600},[597,8535,8536],{},"# Heading 1\n",[597,8538,8539],{"class":599,"line":12},[597,8540,8541],{},"## Heading 2\n",[597,8543,8544],{"class":599,"line":109},[597,8545,8546],{},"### Heading 3\n",[135,8548,8550],{"id":8549},"_4-bold-and-italic-text","4. Bold and Italic Text",[33,8552,8553,8554,8557,8558,954,8560,405],{},"To make text ",[391,8555,8556],{},"bold",", wrap it with ",[594,8559,2120],{},[594,8561,8562],{},"__",[588,8564,8566],{"className":8484,"code":8565,"language":8486,"meta":11,"style":11},"**Bold Text**\n__Bold Text__\n",[594,8567,8568,8573],{"__ignoreMap":11},[597,8569,8570],{"class":599,"line":600},[597,8571,8572],{},"**Bold Text**\n",[597,8574,8575],{"class":599,"line":12},[597,8576,8577],{},"__Bold Text__\n",[33,8579,8553,8580,8557,8583,954,8586,405],{},[368,8581,8582],{},"italic",[594,8584,8585],{},"*",[594,8587,8588],{},"_",[588,8590,8592],{"className":8484,"code":8591,"language":8486,"meta":11,"style":11},"*Italic Text*\n_Italic Text_\n",[594,8593,8594,8599],{"__ignoreMap":11},[597,8595,8596],{"class":599,"line":600},[597,8597,8598],{},"*Italic Text*\n",[597,8600,8601],{"class":599,"line":12},[597,8602,8603],{},"_Italic Text_\n",[33,8605,8606,8607,8612],{},"You can also combine both for ",[368,8608,8609],{},[391,8610,8611],{},"bold and italic"," text.",[588,8614,8616],{"className":8484,"code":8615,"language":8486,"meta":11,"style":11},"***Bold and Italic Text***\n",[594,8617,8618],{"__ignoreMap":11},[597,8619,8620],{"class":599,"line":600},[597,8621,8615],{},[135,8623,8625],{"id":8624},"_5-lists","5. Lists",[28,8627,8629],{"id":8628},"_51-unordered-lists","5.1 Unordered Lists",[33,8631,8632,8633,529,8635,8638,8639,8642],{},"Create unordered lists using ",[594,8634,8585],{},[594,8636,8637],{},"+",", or ",[594,8640,8641],{},"-"," followed by a space.",[588,8644,8646],{"className":8484,"code":8645,"language":8486,"meta":11,"style":11},"* Item 1\n* Item 2\n  * Subitem 2.1\n  * Subitem 2.2\n",[594,8647,8648,8653,8658,8663],{"__ignoreMap":11},[597,8649,8650],{"class":599,"line":600},[597,8651,8652],{},"* Item 1\n",[597,8654,8655],{"class":599,"line":12},[597,8656,8657],{},"* Item 2\n",[597,8659,8660],{"class":599,"line":109},[597,8661,8662],{},"  * Subitem 2.1\n",[597,8664,8665],{"class":599,"line":616},[597,8666,8667],{},"  * Subitem 2.2\n",[28,8669,8671],{"id":8670},"_52-ordered-lists","5.2 Ordered Lists",[33,8673,8674],{},"Create ordered lists using numbers followed by a period and a space.",[588,8676,8678],{"className":8484,"code":8677,"language":8486,"meta":11,"style":11},"1. First item\n2. Second item\n   1. Subitem 2.1\n   2. Subitem 2.2\n",[594,8679,8680,8685,8690,8695],{"__ignoreMap":11},[597,8681,8682],{"class":599,"line":600},[597,8683,8684],{},"1. First item\n",[597,8686,8687],{"class":599,"line":12},[597,8688,8689],{},"2. Second item\n",[597,8691,8692],{"class":599,"line":109},[597,8693,8694],{},"   1. Subitem 2.1\n",[597,8696,8697],{"class":599,"line":616},[597,8698,8699],{},"   2. Subitem 2.2\n",[135,8701,8703],{"id":8702},"_6-links","6. Links",[33,8705,8706,8707,8710,8711,405],{},"To create a link, wrap the link text in ",[594,8708,8709],{},"[ ]"," and the URL in ",[594,8712,8713],{},"( )",[588,8715,8717],{"className":8484,"code":8716,"language":8486,"meta":11,"style":11},"[Link Text](https://example.com)\n",[594,8718,8719],{"__ignoreMap":11},[597,8720,8721],{"class":599,"line":600},[597,8722,8716],{},[135,8724,8726],{"id":8725},"_7-images","7. Images",[33,8728,8729,8730,8732],{},"To add images, place them in a subdirectory within ",[594,8731,8460],{}," of the repository and use the following format:",[588,8734,8736],{"className":8484,"code":8735,"language":8486,"meta":11,"style":11},"![Alt Text](/path/to/image.png)\n",[594,8737,8738],{"__ignoreMap":11},[597,8739,8740],{"class":599,"line":600},[597,8741,8735],{},[33,8743,8339],{},[588,8745,8747],{"className":8484,"code":8746,"language":8486,"meta":11,"style":11},"![My Image](/media/course-title/my-image.png)\n",[594,8748,8749],{"__ignoreMap":11},[597,8750,8751],{"class":599,"line":600},[597,8752,8746],{},[135,8754,8756],{"id":8755},"_8-code-blocks","8. Code Blocks",[33,8758,8759],{},"To include code in your content, use triple backticks (```) before and after the code block. Specify the language for syntax highlighting.\nFor example, you should put ```javascript before your javascript code starts and put ``` after it ends.",[135,8761,8763],{"id":8762},"_9-tables","9. Tables",[33,8765,8766,8767,8770,8771,8773,8774,8776],{},"Create tables using pipes ",[594,8768,8769],{},"|"," and hyphens ",[594,8772,8641],{},". Align the text by placing colons ",[594,8775,1063],{}," in the header row.",[588,8778,8780],{"className":8484,"code":8779,"language":8486,"meta":11,"style":11},"| Header 1 | Header 2 | Header 3 |\n|:-------- |:--------:| --------:|\n| Left     | Center   | Right    |\n| Content  | Content  | Content  |\n",[594,8781,8782,8787,8792,8797],{"__ignoreMap":11},[597,8783,8784],{"class":599,"line":600},[597,8785,8786],{},"| Header 1 | Header 2 | Header 3 |\n",[597,8788,8789],{"class":599,"line":12},[597,8790,8791],{},"|:-------- |:--------:| --------:|\n",[597,8793,8794],{"class":599,"line":109},[597,8795,8796],{},"| Left     | Center   | Right    |\n",[597,8798,8799],{"class":599,"line":616},[597,8800,8801],{},"| Content  | Content  | Content  |\n",[135,8803,8805],{"id":8804},"_10-custom-components","10. Custom Components",[28,8807,8809],{"id":8808},"_101-video-player","10.1 Video Player",[33,8811,8812,8813,8815],{},"To include a video in your content, upload it to a subdirectory within ",[594,8814,8460],{}," and use the following component:",[588,8817,8821],{"className":8818,"code":8819,"language":8820,"meta":11,"style":11},"language-html shiki shiki-themes github-light github-dark","\u003Cvideo-player src=\"/media/course-title/my-lesson.mp4\">\u003C/video-player>\n","html",[594,8822,8823],{"__ignoreMap":11},[597,8824,8825,8828,8831,8834,8837,8840,8843,8845],{"class":599,"line":600},[597,8826,8827],{"class":2101},"\u003C",[597,8829,8830],{"class":8356},"video-player",[597,8832,8833],{"class":972}," src",[597,8835,8836],{"class":2101},"=",[597,8838,8839],{"class":976},"\"/media/course-title/my-lesson.mp4\"",[597,8841,8842],{"class":2101},">\u003C/",[597,8844,8830],{"class":8356},[597,8846,8847],{"class":2101},">\n",[28,8849,8851],{"id":8850},"_102-youtube-videos","10.2 YouTube Videos",[33,8853,8854],{},"To embed a YouTube video, use the following component:",[588,8856,8858],{"className":8818,"code":8857,"language":8820,"meta":11,"style":11},"\u003Cyoutube src=\"https://www.youtube.com/watch?v=Z0dvAy1puIE\">\u003C/youtube>\n",[594,8859,8860],{"__ignoreMap":11},[597,8861,8862,8864,8868,8870,8872,8875,8877,8879],{"class":599,"line":600},[597,8863,8827],{"class":2101},[597,8865,8867],{"class":8866},"s7hpK","youtube",[597,8869,8833],{"class":972},[597,8871,8836],{"class":2101},[597,8873,8874],{"class":976},"\"https://www.youtube.com/watch?v=Z0dvAy1puIE\"",[597,8876,8842],{"class":2101},[597,8878,8867],{"class":8866},[597,8880,8847],{"class":2101},[33,8882,8883],{},"Feel free to refer to this guide whenever you need help formatting your content. Happy contributing!",[789,8885,8886],{},"html .default .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .shiki span {color: var(--shiki-default);background: var(--shiki-default-bg);font-style: var(--shiki-default-font-style);font-weight: var(--shiki-default-font-weight);text-decoration: var(--shiki-default-text-decoration);}html .dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html.dark .shiki span {color: var(--shiki-dark);background: var(--shiki-dark-bg);font-style: var(--shiki-dark-font-style);font-weight: var(--shiki-dark-font-weight);text-decoration: var(--shiki-dark-text-decoration);}html pre.shiki code .sScJk, html code.shiki .sScJk{--shiki-default:#6F42C1;--shiki-dark:#B392F0}html pre.shiki code .s9eBZ, html code.shiki .s9eBZ{--shiki-default:#22863A;--shiki-dark:#85E89D}html pre.shiki code .sVt8B, html code.shiki .sVt8B{--shiki-default:#24292E;--shiki-dark:#E1E4E8}html pre.shiki code .sZZnC, html code.shiki .sZZnC{--shiki-default:#032F62;--shiki-dark:#9ECBFF}html pre.shiki code .s7hpK, html code.shiki .s7hpK{--shiki-default:#B31D28;--shiki-default-font-style:italic;--shiki-dark:#FDAEB7;--shiki-dark-font-style:italic}",{"title":11,"searchDepth":12,"depth":12,"links":8888},[8889,8893,8896,8897,8898,8902,8903,8904,8905,8906],{"id":8331,"depth":12,"text":8332,"children":8890},[8891,8892],{"id":8338,"depth":109,"text":8339},{"id":8423,"depth":109,"text":8424},{"id":8467,"depth":12,"text":8468,"children":8894},[8895],{"id":8481,"depth":109,"text":8339},{"id":8515,"depth":12,"text":8516},{"id":8549,"depth":12,"text":8550},{"id":8624,"depth":12,"text":8625,"children":8899},[8900,8901],{"id":8628,"depth":109,"text":8629},{"id":8670,"depth":109,"text":8671},{"id":8702,"depth":12,"text":8703},{"id":8725,"depth":12,"text":8726},{"id":8755,"depth":12,"text":8756},{"id":8762,"depth":12,"text":8763},{"id":8804,"depth":12,"text":8805,"children":8907},[8908,8909],{"id":8808,"depth":109,"text":8809},{"id":8850,"depth":109,"text":8851},"A markdown and content authoring guide for people who want to contribute to CLAN UNI!",{"tags":8912,"author":8215,"github":8216},[8486,8913,8914],"CLAN UNI","Intenral","/contribution-guide",{"title":8326,"description":8910},"contribution-guide/index","qjP4KWdN0PhmIz1ISavv5EWwvjQb_t8YpMWZwjfCi8s",{"id":8920,"title":8921,"body":8922,"description":8929,"extension":14,"meta":9246,"navigation":17,"path":9249,"seo":9250,"stem":9251,"__hash__":9252},"content/index.md","Welcome to CODE CLAN University",{"type":8,"value":8923,"toc":9230},[8924,8927,8930,8933,8937,8940,8944,8947,8951,8966,8970,8973,8989,8993,8999,9024,9028,9031,9039,9046,9050,9056,9060,9063,9106,9110,9113,9140,9144,9147,9164,9168,9179,9183,9186,9197,9200,9204,9207,9211,9222,9225,9228],[343,8925,8921],{"id":8926},"welcome-to-code-clan-university",[33,8928,8929],{},"CODE CLAN University is a dynamic community forum that provides free technical training courses for software developers. Our mission is to empower individuals with the knowledge and skills they need to succeed in the tech industry. Our community is driven by a shared passion for learning and a commitment to helping one another grow. Join us today and become part of a supportive network of like-minded individuals!",[33,8931,8932],{},"If you want to share educational material that is related to programming by any means, we encourage and appreciate your contribution to this project.",[135,8934,8936],{"id":8935},"what-type-of-material-is-suitable","What type of material is suitable",[33,8938,8939],{},"Any educational material that is somehow related to the world of computer software or hardware.",[135,8941,8943],{"id":8942},"how-to-contribute","How to Contribute",[33,8945,8946],{},"We welcome contributions in the form of articles, tutorials, and more. Follow the steps below to get started:",[28,8948,8950],{"id":8949},"_1-fork-the-repository","1. Fork the Repository",[1053,8952,8953,8961],{},[153,8954,8955,8956,405],{},"Navigate to our ",[356,8957,8960],{"href":8958,"rel":8959},"https://github.com/CODE-CLAN-AUS/clan-uni",[360],"GitHub repository",[153,8962,1191,8963,8965],{},[391,8964,1187],{}," button in the top right corner to create your own copy of the repository.",[28,8967,8969],{"id":8968},"_2-clone-your-fork","2. Clone Your Fork",[33,8971,8972],{},"Clone the repository to your local machine using the following command:",[588,8974,8976],{"className":963,"code":8975,"language":965,"meta":11,"style":11},"git clone https://github.com/your-username/your-repo-name.git\n",[594,8977,8978],{"__ignoreMap":11},[597,8979,8980,8983,8986],{"class":599,"line":600},[597,8981,8982],{"class":972},"git",[597,8984,8985],{"class":976}," clone",[597,8987,8988],{"class":976}," https://github.com/your-username/your-repo-name.git\n",[28,8990,8992],{"id":8991},"_3-create-a-new-branch","3. Create a New Branch",[33,8994,8995,8996,405],{},"Create a new branch for your content. It’s good practice to name your branch based on the content you are adding, e.g., ",[594,8997,8998],{},"add-python-course",[588,9000,9002],{"className":963,"code":9001,"language":965,"meta":11,"style":11},"cd your-repo-name\ngit checkout -b add-python-course\n",[594,9003,9004,9011],{"__ignoreMap":11},[597,9005,9006,9008],{"class":599,"line":600},[597,9007,1297],{"class":1296},[597,9009,9010],{"class":976}," your-repo-name\n",[597,9012,9013,9015,9018,9021],{"class":599,"line":12},[597,9014,8982],{"class":972},[597,9016,9017],{"class":976}," checkout",[597,9019,9020],{"class":1296}," -b",[597,9022,9023],{"class":976}," add-python-course\n",[28,9025,9027],{"id":9026},"_4-write-your-content","4. Write Your Content",[33,9029,9030],{},"Add your content in the appropriate directory. Our content is organized using the Nuxt Content module. Here’s a basic structure:",[588,9032,9037],{"className":9033,"code":9035,"language":9036},[9034],"language-text","content/\n└── course-name/\n    └── index.md\n    └── lesson-one-name.md\n    └── lesson-two-name.md\n","text",[594,9038,9035],{"__ignoreMap":11},[33,9040,9041,9042,9045],{},"Create a new directory or new Markdown files anywhere in the ",[594,9043,9044],{},"content/"," directory.",[459,9047,9049],{"id":9048},"markdown-guide","Markdown Guide",[33,9051,9052,9053,405],{},"For a comprehensive guide on markdown and content authoring, please refer to our ",[356,9054,9055],{"href":8915},"extensive markdown/content authoring guide",[28,9057,9059],{"id":9058},"_5-preview-your-changes","5. Preview Your Changes",[33,9061,9062],{},"To preview your changes locally, follow these steps:",[1053,9064,9065,9081,9099],{},[153,9066,9067,9068],{},"Install dependencies if you haven't already:",[588,9069,9071],{"className":963,"code":9070,"language":965,"meta":11,"style":11},"npm install\n",[594,9072,9073],{"__ignoreMap":11},[597,9074,9075,9078],{"class":599,"line":600},[597,9076,9077],{"class":972},"npm",[597,9079,9080],{"class":976}," install\n",[153,9082,9083,9084],{},"Start the development server:",[588,9085,9087],{"className":963,"code":9086,"language":965,"meta":11,"style":11},"npm run dev\n",[594,9088,9089],{"__ignoreMap":11},[597,9090,9091,9093,9096],{"class":599,"line":600},[597,9092,9077],{"class":972},[597,9094,9095],{"class":976}," run",[597,9097,9098],{"class":976}," dev\n",[153,9100,9101,9102,9105],{},"Open your browser and go to ",[594,9103,9104],{},"http://localhost:3000"," to see your changes.",[28,9107,9109],{"id":9108},"_6-commit-your-changes","6. Commit Your Changes",[33,9111,9112],{},"Once you are satisfied with your content, commit your changes:",[588,9114,9116],{"className":963,"code":9115,"language":965,"meta":11,"style":11},"git add .\ngit commit -m \"Add article on [your topic]\"\n",[594,9117,9118,9127],{"__ignoreMap":11},[597,9119,9120,9122,9124],{"class":599,"line":600},[597,9121,8982],{"class":972},[597,9123,1938],{"class":976},[597,9125,9126],{"class":976}," .\n",[597,9128,9129,9131,9134,9137],{"class":599,"line":12},[597,9130,8982],{"class":972},[597,9132,9133],{"class":976}," commit",[597,9135,9136],{"class":1296}," -m",[597,9138,9139],{"class":976}," \"Add article on [your topic]\"\n",[28,9141,9143],{"id":9142},"_7-push-your-changes","7. Push Your Changes",[33,9145,9146],{},"Push your changes to your forked repository:",[588,9148,9150],{"className":963,"code":9149,"language":965,"meta":11,"style":11},"git push origin add-python-course\n",[594,9151,9152],{"__ignoreMap":11},[597,9153,9154,9156,9159,9162],{"class":599,"line":600},[597,9155,8982],{"class":972},[597,9157,9158],{"class":976}," push",[597,9160,9161],{"class":976}," origin",[597,9163,9023],{"class":976},[28,9165,9167],{"id":9166},"_8-create-a-pull-request","8. Create a Pull Request",[33,9169,9170,9171,9174,9175,9178],{},"Navigate to your forked repository on GitHub and click the ",[391,9172,9173],{},"New pull request"," button. Compare your branch with the original repository's ",[594,9176,9177],{},"main"," branch. Submit the pull request with a descriptive message about your content.",[28,9180,9182],{"id":9181},"_9-netlify-deployment-preview","9. Netlify Deployment Preview",[33,9184,9185],{},"Once you submit your pull request, our CI/CD pipeline managed by Netlify will automatically kick in. Netlify bot will:",[150,9187,9188,9191,9194],{},[153,9189,9190],{},"Check the proposed changes.",[153,9192,9193],{},"If everything is fine, deploy the changes to a preview environment.",[153,9195,9196],{},"Post a link to the preview deployment in the pull request.",[33,9198,9199],{},"You can use this preview link to review your changes. If you are not satisfied, you can make additional commits to your branch. Netlify will keep checking and redeploying the changes until you are happy with the results.",[28,9201,9203],{"id":9202},"_10-review-and-merge","10. Review and Merge",[33,9205,9206],{},"After you are satisfied with your changes in the preview deployment, our team will review your pull request. We may suggest some changes or improvements. Once everything is approved, we will merge your changes into the main branch and they will be live on the website.",[135,9208,9210],{"id":9209},"community-guidelines","Community Guidelines",[150,9212,9213,9216,9219],{},[153,9214,9215],{},"Ensure your content is original and well-written.",[153,9217,9218],{},"Provide clear and concise information.",[153,9220,9221],{},"Respect all contributors and community members.",[33,9223,9224],{},"Thank you for your contribution! Your effort helps create a valuable resource for the community.",[33,9226,9227],{},"Happy contributing!",[789,9229,1559],{},{"title":11,"searchDepth":12,"depth":12,"links":9231},[9232,9233,9245],{"id":8935,"depth":12,"text":8936},{"id":8942,"depth":12,"text":8943,"children":9234},[9235,9236,9237,9238,9239,9240,9241,9242,9243,9244],{"id":8949,"depth":109,"text":8950},{"id":8968,"depth":109,"text":8969},{"id":8991,"depth":109,"text":8992},{"id":9026,"depth":109,"text":9027},{"id":9058,"depth":109,"text":9059},{"id":9108,"depth":109,"text":9109},{"id":9142,"depth":109,"text":9143},{"id":9166,"depth":109,"text":9167},{"id":9181,"depth":109,"text":9182},{"id":9202,"depth":109,"text":9203},{"id":9209,"depth":12,"text":9210},{"author":9247,"tags":9248},"Pouria Rabeti","Internal","/",{"title":8921,"description":8929},"index","PTia-ZIQ36_7SvxMZmnhnDJhjN2Q-9U6fIOLTFz-DRo",{"id":8325,"title":8326,"body":9254,"description":8910,"extension":14,"meta":9683,"navigation":17,"path":8915,"seo":9685,"stem":8917,"__hash__":8918},{"type":8,"value":9255,"toc":9660},[9256,9258,9260,9262,9324,9326,9356,9358,9364,9366,9390,9392,9398,9414,9416,9424,9436,9444,9456,9462,9470,9472,9474,9482,9502,9504,9506,9526,9528,9534,9542,9544,9548,9556,9558,9566,9568,9570,9572,9580,9600,9602,9604,9608,9630,9632,9634,9656,9658],[135,9257,8332],{"id":8331},[33,9259,8335],{},[28,9261,8339],{"id":8338},[588,9263,9264],{"className":8342,"code":8343,"language":8344,"meta":11,"style":11},[594,9265,9266,9270,9278,9286,9300,9308,9316,9320],{"__ignoreMap":11},[597,9267,9268],{"class":599,"line":600},[597,9269,8351],{"class":972},[597,9271,9272,9274,9276],{"class":599,"line":12},[597,9273,8357],{"class":8356},[597,9275,3100],{"class":2101},[597,9277,8362],{"class":976},[597,9279,9280,9282,9284],{"class":599,"line":109},[597,9281,8367],{"class":8356},[597,9283,3100],{"class":2101},[597,9285,8372],{"class":976},[597,9287,9288,9290,9292,9294,9296,9298],{"class":599,"line":616},[597,9289,8377],{"class":8356},[597,9291,8380],{"class":2101},[597,9293,8383],{"class":976},[597,9295,529],{"class":2101},[597,9297,8388],{"class":976},[597,9299,8391],{"class":2101},[597,9301,9302,9304,9306],{"class":599,"line":622},[597,9303,8396],{"class":8356},[597,9305,3100],{"class":2101},[597,9307,8401],{"class":976},[597,9309,9310,9312,9314],{"class":599,"line":628},[597,9311,8406],{"class":8356},[597,9313,3100],{"class":2101},[597,9315,8411],{"class":976},[597,9317,9318],{"class":599,"line":634},[597,9319,8351],{"class":972},[597,9321,9322],{"class":599,"line":3147},[597,9323,8420],{"class":976},[28,9325,8424],{"id":8423},[150,9327,9328,9332,9336,9340,9344,9348],{},[153,9329,9330,8431],{},[391,9331,8357],{},[153,9333,9334,8436],{},[391,9335,8367],{},[153,9337,9338,8441],{},[391,9339,8377],{},[153,9341,9342,8446],{},[391,9343,8396],{},[153,9345,9346,8451],{},[391,9347,8406],{},[153,9349,9350,8457,9352,8461,9354,405],{},[391,9351,8456],{},[594,9353,8460],{},[594,9355,8464],{},[135,9357,8468],{"id":8467},[33,9359,8471,9360,8475,9362,8478],{},[594,9361,8474],{},[594,9363,8474],{},[28,9365,8339],{"id":8481},[588,9367,9368],{"className":8484,"code":8485,"language":8486,"meta":11,"style":11},[594,9369,9370,9374,9378,9382,9386],{"__ignoreMap":11},[597,9371,9372],{"class":599,"line":600},[597,9373,8493],{},[597,9375,9376],{"class":599,"line":12},[597,9377,1485],{"emptyLinePlaceholder":17},[597,9379,9380],{"class":599,"line":109},[597,9381,8502],{},[597,9383,9384],{"class":599,"line":616},[597,9385,8507],{},[597,9387,9388],{"class":599,"line":622},[597,9389,8512],{},[135,9391,8516],{"id":8515},[33,9393,8519,9394,8523,9396,8526],{},[594,9395,8522],{},[594,9397,8522],{},[588,9399,9400],{"className":8484,"code":8529,"language":8486,"meta":11,"style":11},[594,9401,9402,9406,9410],{"__ignoreMap":11},[597,9403,9404],{"class":599,"line":600},[597,9405,8536],{},[597,9407,9408],{"class":599,"line":12},[597,9409,8541],{},[597,9411,9412],{"class":599,"line":109},[597,9413,8546],{},[135,9415,8550],{"id":8549},[33,9417,8553,9418,8557,9420,954,9422,405],{},[391,9419,8556],{},[594,9421,2120],{},[594,9423,8562],{},[588,9425,9426],{"className":8484,"code":8565,"language":8486,"meta":11,"style":11},[594,9427,9428,9432],{"__ignoreMap":11},[597,9429,9430],{"class":599,"line":600},[597,9431,8572],{},[597,9433,9434],{"class":599,"line":12},[597,9435,8577],{},[33,9437,8553,9438,8557,9440,954,9442,405],{},[368,9439,8582],{},[594,9441,8585],{},[594,9443,8588],{},[588,9445,9446],{"className":8484,"code":8591,"language":8486,"meta":11,"style":11},[594,9447,9448,9452],{"__ignoreMap":11},[597,9449,9450],{"class":599,"line":600},[597,9451,8598],{},[597,9453,9454],{"class":599,"line":12},[597,9455,8603],{},[33,9457,8606,9458,8612],{},[368,9459,9460],{},[391,9461,8611],{},[588,9463,9464],{"className":8484,"code":8615,"language":8486,"meta":11,"style":11},[594,9465,9466],{"__ignoreMap":11},[597,9467,9468],{"class":599,"line":600},[597,9469,8615],{},[135,9471,8625],{"id":8624},[28,9473,8629],{"id":8628},[33,9475,8632,9476,529,9478,8638,9480,8642],{},[594,9477,8585],{},[594,9479,8637],{},[594,9481,8641],{},[588,9483,9484],{"className":8484,"code":8645,"language":8486,"meta":11,"style":11},[594,9485,9486,9490,9494,9498],{"__ignoreMap":11},[597,9487,9488],{"class":599,"line":600},[597,9489,8652],{},[597,9491,9492],{"class":599,"line":12},[597,9493,8657],{},[597,9495,9496],{"class":599,"line":109},[597,9497,8662],{},[597,9499,9500],{"class":599,"line":616},[597,9501,8667],{},[28,9503,8671],{"id":8670},[33,9505,8674],{},[588,9507,9508],{"className":8484,"code":8677,"language":8486,"meta":11,"style":11},[594,9509,9510,9514,9518,9522],{"__ignoreMap":11},[597,9511,9512],{"class":599,"line":600},[597,9513,8684],{},[597,9515,9516],{"class":599,"line":12},[597,9517,8689],{},[597,9519,9520],{"class":599,"line":109},[597,9521,8694],{},[597,9523,9524],{"class":599,"line":616},[597,9525,8699],{},[135,9527,8703],{"id":8702},[33,9529,8706,9530,8710,9532,405],{},[594,9531,8709],{},[594,9533,8713],{},[588,9535,9536],{"className":8484,"code":8716,"language":8486,"meta":11,"style":11},[594,9537,9538],{"__ignoreMap":11},[597,9539,9540],{"class":599,"line":600},[597,9541,8716],{},[135,9543,8726],{"id":8725},[33,9545,8729,9546,8732],{},[594,9547,8460],{},[588,9549,9550],{"className":8484,"code":8735,"language":8486,"meta":11,"style":11},[594,9551,9552],{"__ignoreMap":11},[597,9553,9554],{"class":599,"line":600},[597,9555,8735],{},[33,9557,8339],{},[588,9559,9560],{"className":8484,"code":8746,"language":8486,"meta":11,"style":11},[594,9561,9562],{"__ignoreMap":11},[597,9563,9564],{"class":599,"line":600},[597,9565,8746],{},[135,9567,8756],{"id":8755},[33,9569,8759],{},[135,9571,8763],{"id":8762},[33,9573,8766,9574,8770,9576,8773,9578,8776],{},[594,9575,8769],{},[594,9577,8641],{},[594,9579,1063],{},[588,9581,9582],{"className":8484,"code":8779,"language":8486,"meta":11,"style":11},[594,9583,9584,9588,9592,9596],{"__ignoreMap":11},[597,9585,9586],{"class":599,"line":600},[597,9587,8786],{},[597,9589,9590],{"class":599,"line":12},[597,9591,8791],{},[597,9593,9594],{"class":599,"line":109},[597,9595,8796],{},[597,9597,9598],{"class":599,"line":616},[597,9599,8801],{},[135,9601,8805],{"id":8804},[28,9603,8809],{"id":8808},[33,9605,8812,9606,8815],{},[594,9607,8460],{},[588,9609,9610],{"className":8818,"code":8819,"language":8820,"meta":11,"style":11},[594,9611,9612],{"__ignoreMap":11},[597,9613,9614,9616,9618,9620,9622,9624,9626,9628],{"class":599,"line":600},[597,9615,8827],{"class":2101},[597,9617,8830],{"class":8356},[597,9619,8833],{"class":972},[597,9621,8836],{"class":2101},[597,9623,8839],{"class":976},[597,9625,8842],{"class":2101},[597,9627,8830],{"class":8356},[597,9629,8847],{"class":2101},[28,9631,8851],{"id":8850},[33,9633,8854],{},[588,9635,9636],{"className":8818,"code":8857,"language":8820,"meta":11,"style":11},[594,9637,9638],{"__ignoreMap":11},[597,9639,9640,9642,9644,9646,9648,9650,9652,9654],{"class":599,"line":600},[597,9641,8827],{"class":2101},[597,9643,8867],{"class":8866},[597,9645,8833],{"class":972},[597,9647,8836],{"class":2101},[597,9649,8874],{"class":976},[597,9651,8842],{"class":2101},[597,9653,8867],{"class":8866},[597,9655,8847],{"class":2101},[33,9657,8883],{},[789,9659,8886],{},{"title":11,"searchDepth":12,"depth":12,"links":9661},[9662,9666,9669,9670,9671,9675,9676,9677,9678,9679],{"id":8331,"depth":12,"text":8332,"children":9663},[9664,9665],{"id":8338,"depth":109,"text":8339},{"id":8423,"depth":109,"text":8424},{"id":8467,"depth":12,"text":8468,"children":9667},[9668],{"id":8481,"depth":109,"text":8339},{"id":8515,"depth":12,"text":8516},{"id":8549,"depth":12,"text":8550},{"id":8624,"depth":12,"text":8625,"children":9672},[9673,9674],{"id":8628,"depth":109,"text":8629},{"id":8670,"depth":109,"text":8671},{"id":8702,"depth":12,"text":8703},{"id":8725,"depth":12,"text":8726},{"id":8755,"depth":12,"text":8756},{"id":8762,"depth":12,"text":8763},{"id":8804,"depth":12,"text":8805,"children":9680},[9681,9682],{"id":8808,"depth":109,"text":8809},{"id":8850,"depth":109,"text":8851},{"tags":9684,"author":8215,"github":8216},[8486,8913,8914],{"title":8326,"description":8910},1757590620551]